~launchpad-pqm/launchpad/devel

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
# Copyright 2010-2011 Canonical Ltd.  This software is licensed under the
# GNU Affero General Public License version 3 (see the file LICENSE).

"""Tests for ApportJobs."""

__metaclass__ = type

import os

from sqlobject import SQLObjectNotFound
import transaction
from zope.component import getUtility
from zope.security.proxy import removeSecurityProxy

from lp.services.config import config
from lp.services.webapp.interfaces import ILaunchpadRoot
from lp.testing.layers import (
    LaunchpadFunctionalLayer,
    LaunchpadZopelessLayer,
    )
from lp.bugs.interfaces.apportjob import (
    ApportJobType,
    IApportJob,
    IProcessApportBlobJob,
    IProcessApportBlobJobSource,
    )
from lp.bugs.model.apportjob import (
    ApportJob,
    ApportJobDerived,
    )
from lp.bugs.utilities.filebugdataparser import (
    FileBugData,
    FileBugDataParser,
    )
from lp.services.job.interfaces.job import JobStatus
from lp.services.librarian.interfaces import ILibraryFileAliasSet
from lp.services.scripts.tests import run_script
from lp.services.temporaryblobstorage.interfaces import (
    ITemporaryStorageManager,
    )
from lp.testing import (
    login_person,
    TestCaseWithFactory,
    )
from lp.testing.views import create_initialized_view


class ApportJobTestCase(TestCaseWithFactory):
    """Test case for basic ApportJob gubbins."""

    layer = LaunchpadZopelessLayer

    def test_instantiate(self):
        # ApportJob.__init__() instantiates a ApportJob instance.
        blob = self.factory.makeBlob()

        metadata = ('some', 'arbitrary', 'metadata')
        apport_job = ApportJob(
            blob, ApportJobType.PROCESS_BLOB, metadata)

        self.assertEqual(blob, apport_job.blob)
        self.assertEqual(ApportJobType.PROCESS_BLOB, apport_job.job_type)

        # When we actually access the ApportJob's metadata it gets
        # unserialized from JSON, so the representation returned by
        # apport_job.metadata will be different from what we originally
        # passed in.
        metadata_expected = [u'some', u'arbitrary', u'metadata']
        self.assertEqual(metadata_expected, apport_job.metadata)
        self.assertProvides(apport_job, IApportJob)


class ApportJobDerivedTestCase(TestCaseWithFactory):
    """Test case for the ApportJobDerived class."""

    layer = LaunchpadZopelessLayer

    def test_create_explodes(self):
        # ApportJobDerived.create() will blow up because it needs to be
        # subclassed to work properly.
        blob = self.factory.makeBlob()
        self.assertRaises(
            AttributeError, ApportJobDerived.create, blob)


class ProcessApportBlobJobTestCase(TestCaseWithFactory):
    """Test case for the ProcessApportBlobJob class."""

    layer = LaunchpadZopelessLayer

    def setUp(self):
        super(ProcessApportBlobJobTestCase, self).setUp()

        # Create a BLOB using existing testing data.
        testfiles = os.path.join(config.root, 'lib/lp/bugs/tests/testfiles')
        blob_file = open(
            os.path.join(testfiles, 'extra_filebug_data.msg'))
        blob_data = blob_file.read()

        self.blob = self.factory.makeBlob(blob_data)
        transaction.commit()  # We need the blob available from the Librarian.

    def _assertFileBugDataMatchesDict(self, filebug_data, data_dict):
        """Asser that the data in a FileBugData object matches a dict."""
        self.assertEqual(
            filebug_data.initial_summary, data_dict['initial_summary'],
            "Initial summaries do not match")
        self.assertEqual(
            filebug_data.initial_tags, data_dict['initial_tags'],
            "Values for initial_tags do not match")
        self.assertEqual(
            filebug_data.private, data_dict['private'],
            "Values for private do not match")
        self.assertEqual(
            filebug_data.subscribers, data_dict['subscribers'],
            "Values for subscribers do not match")
        self.assertEqual(
            filebug_data.extra_description,
            data_dict['extra_description'],
            "Values for extra_description do not match")
        self.assertEqual(
            filebug_data.comments, data_dict['comments'],
            "Values for comments do not match")
        self.assertEqual(
            filebug_data.hwdb_submission_keys,
            data_dict['hwdb_submission_keys'],
            "Values for hwdb_submission_keys do not match")

        # The attachments list of of the data_dict dict will be of
        # the same length as the attachments list in the filebug_data
        # object.
        self.assertEqual(
            len(filebug_data.attachments),
            len(data_dict['attachments']),
            "Lengths of attachment lists do not match.")

        # The attachments list of the data_dict dict is a list of dicts
        # containing data about the attachments to add to the bug once
        # it has been filed.
        for attachment_dict in data_dict['attachments']:
            file_alias_id = attachment_dict['file_alias_id']
            file_alias = getUtility(ILibraryFileAliasSet)[file_alias_id]
            attachment = filebug_data.attachments[
                data_dict['attachments'].index(attachment_dict)]

            if attachment.get('content', None) is not None:
                # If the FileBugData is coming from the parser directly,
                # the attachments won't have been processed, so we check
                # the unprocessed data against what the
                # ProcessApportBlobJob has stored in the librarian.
                file_content = attachment['content'].read()
                librarian_file_content = file_alias.read()
                self.assertEqual(
                    file_content, librarian_file_content,
                    "File content values do not match for attachment %s and "
                    "LibrarianFileAlias %s" % (
                        attachment['filename'], file_alias.filename))
                self.assertEqual(
                    attachment['filename'], file_alias.filename,
                    "Filenames do not match for attachment %s and "
                    "LibrarianFileAlias %s" % (
                        attachment['filename'], file_alias.id))
                self.assertEqual(
                    attachment['content_type'], file_alias.mimetype,
                    "Content types do not match for attachment %s and "
                    "LibrarianFileAlias %s" % (
                        attachment['filename'], file_alias.id))

            if attachment.get('file_alias', None) is not None:
                # If the attachment has a file_alias item, it will contain
                # the LibrarianFileAlias referenced by the attachment's
                # file_alias_id.
                self.assertEqual(
                    file_alias,
                    attachment['file_alias'],
                    "The attachment's file alias doesn't match it's "
                    "file_alias_id")

    def test_interface(self):
        # ProcessApportBlobJob instances provide IProcessApportBlobJobSource.
        job = getUtility(IProcessApportBlobJobSource).create(self.blob)
        self.assertProvides(job, IProcessApportBlobJob)

    def test_run(self):
        # IProcessApportBlobJobSource.run() extracts salient data from an
        # Apport BLOB and stores it in the job's metadata attribute.
        job = getUtility(IProcessApportBlobJobSource).create(self.blob)
        job.run()
        transaction.commit()

        # Once the job has been run, its metadata will contain a dict
        # called processed_data, which will contain the data parsed from
        # the BLOB.
        processed_data = job.metadata.get('processed_data', None)
        self.assertNotEqual(
            None, processed_data,
            "processed_data should not be None after the job has run.")

        # The items in the processed_data dict represent the salient
        # information parsed out of the BLOB. We can use our
        # FileBugDataParser to check that the items recorded in the
        # processed_data dict are correct.
        self.blob.file_alias.open()
        data_parser = FileBugDataParser(self.blob.file_alias)
        filebug_data = data_parser.parse()
        self._assertFileBugDataMatchesDict(filebug_data, processed_data)

    def test_getByBlobUUID(self):
        # IProcessApportBlobJobSource.getByBlobUUID takes a BLOB UUID as a
        # parameter and returns any jobs for that BLOB.
        uuid = self.blob.uuid

        job = getUtility(IProcessApportBlobJobSource).create(self.blob)
        job_from_uuid = getUtility(
            IProcessApportBlobJobSource).getByBlobUUID(uuid)
        self.assertEqual(
            job, job_from_uuid,
            "Job returend by getByBlobUUID() did not match original job.")
        self.assertEqual(
            self.blob, job_from_uuid.blob,
            "BLOB referenced by Job returned by getByBlobUUID() did not "
            "match original BLOB.")

        # If the UUID doesn't exist, getByBlobUUID() will raise a
        # SQLObjectNotFound error.
        self.assertRaises(
            SQLObjectNotFound,
            getUtility(IProcessApportBlobJobSource).getByBlobUUID, 'foobar')

    def test_create_job_creates_only_one(self):
        # IProcessApportBlobJobSource.create() will create only one
        # ProcessApportBlobJob for a given BLOB, no matter how many
        # times it is called.
        blobjobsource = getUtility(IProcessApportBlobJobSource)
        current_jobs = list(blobjobsource.iterReady())
        self.assertEqual(
            0, len(current_jobs),
            "There should be no ProcessApportBlobJobs. Found %s" %
            len(current_jobs))

        job = blobjobsource.create(self.blob)
        current_jobs = list(blobjobsource.iterReady())
        self.assertEqual(
            1, len(current_jobs),
            "There should be only one ProcessApportBlobJob. Found %s" %
            len(current_jobs))

        blobjobsource.create(self.blob)  # Another job.
        current_jobs = list(blobjobsource.iterReady())
        self.assertEqual(
            1, len(current_jobs),
            "There should be only one ProcessApportBlobJob. Found %s" %
            len(current_jobs))

        # If the job is complete, it will no longer show up in the list
        # of ready jobs. However, it won't be possible to create a new
        # job to process the BLOB because each BLOB can only have one
        # IProcessApportBlobJobSource.
        job.job.start()
        job.job.complete()
        current_jobs = list(blobjobsource.iterReady())
        self.assertEqual(
            0, len(current_jobs),
            "There should be no ready ProcessApportBlobJobs. Found %s" %
            len(current_jobs))

        yet_another_job = blobjobsource.create(self.blob)
        current_jobs = list(blobjobsource.iterReady())
        self.assertEqual(
            0, len(current_jobs),
            "There should be no new ProcessApportBlobJobs. Found %s" %
            len(current_jobs))

        # In fact, yet_another_job will be the same job as before, since
        # it's attached to the same BLOB.
        self.assertEqual(job.id, yet_another_job.id, "Jobs do not match.")

    def test_cronscript_succeeds(self):
        # The process-apport-blobs cronscript will run all pending
        # ProcessApportBlobJobs.
        getUtility(IProcessApportBlobJobSource).create(self.blob)
        transaction.commit()

        retcode, stdout, stderr = run_script(
            'cronscripts/process-apport-blobs.py', [],
            expect_returncode=0)
        self.assertEqual('', stdout)
        self.assertIn(
            'INFO    Ran 1 ProcessApportBlobJob jobs.\n', stderr)

    def test_getFileBugData(self):
        # The IProcessApportBlobJobSource.getFileBugData() method
        # returns the +filebug data parsed from the blob as a
        # FileBugData object.
        job = getUtility(IProcessApportBlobJobSource).create(self.blob)
        job.run()
        transaction.commit()

        # Rather irritatingly, the filebug_data object is wrapped in a
        # security proxy, so we remove it for the purposes of this
        # comparison.
        filebug_data = job.getFileBugData()
        self.assertTrue(
            isinstance(removeSecurityProxy(filebug_data), FileBugData),
            "job.getFileBugData() should return a FileBugData instance.")

        # The attributes of the FileBugData match the data stored in the
        # processed_data dict.
        processed_data = job.metadata.get('processed_data', None)
        self._assertFileBugDataMatchesDict(filebug_data, processed_data)


class TestTemporaryBlobStorageAddView(TestCaseWithFactory):
    """Test case for the TemporaryBlobStorageAddView."""

    layer = LaunchpadFunctionalLayer

    def setUp(self):
        super(TestTemporaryBlobStorageAddView, self).setUp()

        # Create a BLOB using existing testing data.
        testfiles = os.path.join(config.root, 'lib/lp/bugs/tests/testfiles')
        blob_file = open(
            os.path.join(testfiles, 'extra_filebug_data.msg'))
        self.blob_data = blob_file.read()
        blob_file.close()

        person = self.factory.makePerson()
        self.product = self.factory.makeProduct()
        login_person(person)

    def _create_blob_and_job_using_storeblob(self):
        """Helper method to create a BLOB and ProcessApportBlobJob."""
        view = create_initialized_view(
            getUtility(ILaunchpadRoot), '+storeblob')

        # The view's store_blob method stores the blob in the database
        # and returns its UUID.
        blob_uuid = view.store_blob(self.blob_data)
        transaction.commit()

        return blob_uuid

    def _create_and_traverse_filebug_view(self, blob_uuid):
        """Create a +filebug view for a given blob id and return it."""
        view = create_initialized_view(
            self.product, '+filebug', path_info='/%s' % blob_uuid)

        # We need to call publishTraverse() on the view to ensure that
        # the extra_data_token attribute gets populated.
        view.publishTraverse(view.request, blob_uuid)
        return view

    def test_blob_has_been_processed(self):
        # Using the TemporaryBlobStorageAddView to upload a new BLOB
        # will show blob as being processed
        blob_uuid = self._create_blob_and_job_using_storeblob()
        blob = getUtility(ITemporaryStorageManager).fetch(blob_uuid)

        self.assertFalse(
            blob.hasBeenProcessed(),
            "BLOB should not be processed, but indicates it has.")

    def test_blob_get_processed_data(self):
        # Using the TemporaryBlobStorageAddView to upload a new BLOB
        # should indicate there two attachments were processed.
        blob_uuid = self._create_blob_and_job_using_storeblob()
        blob = getUtility(ITemporaryStorageManager).fetch(blob_uuid)
        job = getUtility(IProcessApportBlobJobSource).getByBlobUUID(blob_uuid)
        job.job.start()
        job.job.complete()
        job.run()
        blob_meta = blob.getProcessedData()

        self.assertEqual(
            len(blob_meta['attachments']), 2,
            "BLOB metadata: %s" % str(blob_meta))

    def test_adding_blob_adds_job(self):
        # Using the TemporaryBlobStorageAddView to upload a new BLOB
        # will add a new ProcessApportBlobJob for that BLOB.
        blob_uuid = self._create_blob_and_job_using_storeblob()
        blob = getUtility(ITemporaryStorageManager).fetch(blob_uuid)
        job = getUtility(IProcessApportBlobJobSource).getByBlobUUID(blob_uuid)

        self.assertEqual(
            blob, job.blob,
            "BLOB attached to Job returned by getByBlobUUID() did not match "
            "expected BLOB.")

    def test_filebug_extra_data_processing_job(self):
        # The +filebug view can retrieve the ProcessApportBlobJob for a
        # given BLOB UUID. This is available via its
        # extra_data_processing_job property.
        blob_uuid = self._create_blob_and_job_using_storeblob()
        view = self._create_and_traverse_filebug_view(blob_uuid)

        job = getUtility(IProcessApportBlobJobSource).getByBlobUUID(blob_uuid)
        job_from_view = view.extra_data_processing_job
        self.assertEqual(job, job_from_view, "Jobs didn't match.")

        # If a no UUID is passed to +filebug, its
        # extra_data_processing_job property will return None.
        view = create_initialized_view(self.product, '+filebug')
        job_from_view = view.extra_data_processing_job
        self.assertEqual(
            None, job_from_view,
            "Job returned by extra_data_processing_job should be None.")

    def test_filebug_extra_data_to_process(self):
        # The +filebug view has a property, extra_data_to_process, which
        # indicates whether or not an Apport blob has been processed.
        blob_uuid = self._create_blob_and_job_using_storeblob()
        view = self._create_and_traverse_filebug_view(blob_uuid)

        job_from_view = view.extra_data_processing_job

        # Because the job hasn't yet been run the view's extra_data_to_process
        # property will return True.
        self.assertEqual(
            JobStatus.WAITING, job_from_view.job.status,
            "Job should be WAITING, is in fact %s" %
            job_from_view.job.status.title)
        self.assertTrue(
            view.extra_data_to_process,
            "view.extra_data_to_process should be True while job is WAITING.")

        # If the job is started bug hasn't completed, extra_data_to_process
        # will remain True.
        job_from_view.job.start()
        self.assertEqual(
            JobStatus.RUNNING, job_from_view.job.status,
            "Job should be RUNNING, is in fact %s" %
            job_from_view.job.status.title)
        self.assertTrue(
            view.extra_data_to_process,
            "view.extra_data_to_process should be True while job is RUNNING.")

        # Once the job is complete, extra_data_to_process will be False
        job_from_view.job.complete()
        self.assertEqual(
            JobStatus.COMPLETED, job_from_view.job.status,
            "Job should be COMPLETED, is in fact %s" %
            job_from_view.job.status.title)
        self.assertFalse(
            view.extra_data_to_process,
            "view.extra_data_to_process should be False when job is "
            "COMPLETED.")

        # If there's no job - for example if someone visits the +filebug
        # page normally, example - extra_data_to_process will always be
        # False.
        view = create_initialized_view(self.product, '+filebug')
        self.assertEqual(
            None, view.extra_data_processing_job,
            "extra_data_processing_job should be None when there's no job "
            "for a view.")
        self.assertFalse(
            view.extra_data_to_process,
            "view.extra_data_to_process should be False when there is no "
            "job.")