58
62
librarian are running and making sure that the key is attached to the
59
63
relevant launchpad person.
61
>>> from lp.testing.keyserver import KeyServerTac
62
>>> keyserver = KeyServerTac()
65
>>> from lp.testing.keyserver import KeyServerTac
66
>>> keyserver = KeyServerTac()
65
69
Import public keyring into current LPDB.
67
>>> from canonical.launchpad.ftests import import_public_test_keys
68
>>> import_public_test_keys()
71
>>> from canonical.launchpad.ftests import import_public_test_keys
72
>>> import_public_test_keys()
70
74
Having set up that infrastructure we need to prepare a breezy distroseries
71
75
for the ubuntutest distribution.
73
>>> from lp.registry.interfaces.pocket import PackagePublishingPocket
74
>>> from lp.registry.model.distribution import Distribution
75
>>> from lp.soyuz.enums import PackageUploadStatus
76
>>> from lp.soyuz.scripts.initialize_distroseries import (
77
... InitializeDistroSeries)
78
>>> from canonical.launchpad.database.librarian import LibraryFileAlias
79
>>> ubuntu = Distribution.byName('ubuntu')
80
>>> breezy_autotest = ubuntu['breezy-autotest']
81
>>> ubuntutest = Distribution.byName('ubuntutest')
82
>>> breezy = ubuntutest.newSeries(
83
... 'breezy', 'Breezy Badger', 'The Breezy Badger',
84
... 'Black and White', 'Someone', '5.10', None,
85
... breezy_autotest.owner)
86
>>> ids = InitializeDistroSeries(breezy, [breezy_autotest.id])
88
>>> breezy.changeslist = 'breezy-changes@ubuntu.com'
89
>>> fake_chroot = LibraryFileAlias.get(1)
90
>>> unused = breezy['i386'].addOrUpdateChroot(fake_chroot)
77
>>> from lp.registry.interfaces.pocket import PackagePublishingPocket
78
>>> from lp.registry.model.distribution import Distribution
79
>>> from lp.soyuz.enums import PackageUploadStatus
80
>>> from lp.soyuz.scripts.initialize_distroseries import (
81
... InitializeDistroSeries)
82
>>> from canonical.launchpad.database.librarian import LibraryFileAlias
83
>>> ubuntu = Distribution.byName('ubuntu')
84
>>> breezy_autotest = ubuntu['breezy-autotest']
85
>>> ubuntutest = Distribution.byName('ubuntutest')
86
>>> breezy = ubuntutest.newSeries(
87
... 'breezy', 'Breezy Badger', 'The Breezy Badger',
88
... 'Black and White', 'Someone', '5.10', None,
89
... breezy_autotest.owner)
90
>>> ids = InitializeDistroSeries(breezy, [breezy_autotest.id])
92
>>> breezy.changeslist = 'breezy-changes@ubuntu.com'
93
>>> fake_chroot = LibraryFileAlias.get(1)
94
>>> unused = breezy['i386'].addOrUpdateChroot(fake_chroot)
92
96
Add disk content for file inherited from ubuntu/breezy-autotest:
94
>>> from canonical.librarian.testing.server import fillLibrarianFile
95
>>> fillLibrarianFile(54)
98
>>> from canonical.librarian.testing.server import fillLibrarianFile
99
>>> fillLibrarianFile(54)
97
101
Now that the infrastructure is ready, we prepare a set of useful methods.
99
103
Firstly, we need a way to copy a test upload into the queue (but skip
100
104
lock files, which have names starting with a dot).
102
>>> from lp.archiveuploader.tests import datadir
103
>>> def punt_upload_into_queue(leaf, distro):
104
... inc_dir = os.path.join(incoming_dir, leaf, distro)
105
... os.makedirs(inc_dir)
106
... for file_leaf in os.listdir(datadir(os.path.join("suite", leaf))):
107
... os.system("cp %s %s" % (
108
... datadir(os.path.join("suite", leaf, file_leaf)), inc_dir))
106
>>> from lp.archiveuploader.tests import datadir
107
>>> def punt_upload_into_queue(leaf, distro):
108
... inc_dir = os.path.join(incoming_dir, leaf, distro)
109
... os.makedirs(inc_dir)
110
... for file_leaf in os.listdir(datadir(os.path.join("suite", leaf))):
111
... os.system("cp %s %s" % (
112
... datadir(os.path.join("suite", leaf, file_leaf)), inc_dir))
110
114
We need a way to count the items in a queue directory
112
>>> def count_items(queue):
113
... return len(queue)
116
>>> def count_items(queue):
117
... return len(queue)
115
119
And then we need a way to process the uploads from the queue
118
>>> from canonical.config import config
119
>>> from lp.services.log.logger import FakeLogger
120
>>> from lp.soyuz.scripts.soyuz_process_upload import (
122
>>> from canonical.testing.layers import LaunchpadZopelessLayer
123
>>> def process_uploads(upload_policy, series, loglevel):
124
... """Simulate process-upload.py script run.
126
... :param upload_policy: context in which to consider the upload
127
... (equivalent to script's --context option).
128
... :param series: distro series to give back from.
129
... (equivalent to script's --series option).
130
... :param loglevel: logging level (as defined in logging module). Any
131
... log messages below this level will be suppressed.
133
... args = [temp_dir, "-C", upload_policy]
134
... if series is not None:
135
... args.extend(["-s", series])
136
... # Run script under 'uploader' DB user. The dbuser argument to the
137
... # script constructor is ignored, so we must change DB users here.
138
... LaunchpadZopelessLayer.txn.commit()
139
... LaunchpadZopelessLayer.switchDbUser(config.uploader.dbuser)
140
... process = ProcessUpload(
141
... 'process-upload', dbuser='ignored', test_args=args)
142
... process.logger = FakeLogger()
143
... if loglevel is not None:
144
... process.logger.setLevel(loglevel)
145
... process.txn = LaunchpadZopelessLayer.txn
147
... LaunchpadZopelessLayer.switchDbUser('launchpad')
122
>>> from canonical.config import config
123
>>> from lp.services.log.logger import FakeLogger
124
>>> from lp.soyuz.scripts.soyuz_process_upload import (
126
>>> from canonical.testing.layers import LaunchpadZopelessLayer
127
>>> def process_uploads(upload_policy, series, loglevel):
128
... """Simulate process-upload.py script run.
130
... :param upload_policy: context in which to consider the upload
131
... (equivalent to script's --context option).
132
... :param series: distro series to give back from.
133
... (equivalent to script's --series option).
134
... :param loglevel: logging level (as defined in logging module).
135
... Any log messages below this level will be suppressed.
137
... args = [temp_dir, "-C", upload_policy]
138
... if series is not None:
139
... args.extend(["-s", series])
140
... # Run script under 'uploader' DB user. The dbuser argument to the
141
... # script constructor is ignored, so we must change DB users here.
142
... LaunchpadZopelessLayer.txn.commit()
143
... LaunchpadZopelessLayer.switchDbUser(config.uploader.dbuser)
144
... process = ProcessUpload(
145
... 'process-upload', dbuser='ignored', test_args=args)
146
... process.logger = FakeLogger()
147
... if loglevel is not None:
148
... process.logger.setLevel(loglevel)
149
... process.txn = LaunchpadZopelessLayer.txn
151
... LaunchpadZopelessLayer.switchDbUser('launchpad')
149
153
And we need a way to process the accepted queue
151
>>> from zope.component import getUtility
152
>>> from canonical.launchpad.ftests import (
156
>>> from lp.registry.interfaces.distribution import IDistributionSet
158
>>> login("foo.bar@canonical.com")
160
>>> def process_accepted(distro):
161
... distribution = getUtility(IDistributionSet)[distro]
162
... for series in distribution.series:
163
... items = series.getQueueItems(
164
... status=PackageUploadStatus.ACCEPTED)
165
... for item in items:
166
... item.realiseUpload()
155
>>> from zope.component import getUtility
156
>>> from canonical.launchpad.ftests import (
160
>>> from lp.registry.interfaces.distribution import IDistributionSet
162
>>> login("foo.bar@canonical.com")
164
>>> def process_accepted(distro):
165
... distribution = getUtility(IDistributionSet)[distro]
166
... for series in distribution.series:
167
... items = series.getQueueItems(
168
... status=PackageUploadStatus.ACCEPTED)
169
... for item in items:
170
... item.realiseUpload()
170
174
If an upload of ours ends up in the NEW queue, we need a way to process
171
175
it into the accepted queue
173
>>> def process_new(distro, series):
174
... distribution = getUtility(IDistributionSet)[distro]
175
... if series is None:
176
... series = "breezy"
177
... dr, pocket = distribution.getDistroSeriesAndPocket(series)
178
... items = dr.getQueueItems(status=PackageUploadStatus.NEW)
179
... for item in items:
180
... item.setAccepted()
182
... items = dr.getQueueItems(status=PackageUploadStatus.UNAPPROVED)
183
... for item in items:
184
... item.setAccepted()
177
>>> def process_new(distro, series):
178
... distribution = getUtility(IDistributionSet)[distro]
179
... if series is None:
180
... series = "breezy"
181
... dr, pocket = distribution.getDistroSeriesAndPocket(series)
182
... items = dr.getQueueItems(status=PackageUploadStatus.NEW)
183
... for item in items:
184
... item.setAccepted()
186
... items = dr.getQueueItems(status=PackageUploadStatus.UNAPPROVED)
187
... for item in items:
188
... item.setAccepted()
187
191
Finally, as a very simplistic publishing process, we may need to punt any
188
192
given upload into the published state, so here's a very simplistic publisher
190
>>> from lp.soyuz.model.publishing import (
191
... SourcePackagePublishingHistory as SPPH,
192
... BinaryPackagePublishingHistory as BPPH)
193
>>> from lp.soyuz.enums import PackagePublishingStatus as PPS
194
>>> from canonical.database.constants import UTC_NOW
195
>>> def simple_publish(distro):
196
... srcs_to_publish = SPPH.select("""
197
... SourcePackagePublishingHistory.distroseries = DistroSeries.id
198
... AND DistroSeries.distribution = Distribution.id
199
... AND Distribution.name = '%s'
200
... AND SourcePackagePublishingHistory.status = 1""" % distro,
201
... clauseTables=['DistroSeries', 'Distribution'])
202
... bins_to_publish = BPPH.select("""
203
... BinaryPackagePublishingHistory.distroarchseries =
204
... DistroArchSeries.id
205
... AND DistroArchSeries.distroseries = DistroSeries.id
206
... AND DistroSeries.distribution = Distribution.id
207
... AND Distribution.name = '%s'
208
... AND BinaryPackagePublishingHistory.status = 1""" % distro,
209
... clauseTables=['DistroArchSeries', 'DistroSeries',
211
... published_one = False
212
... for src in srcs_to_publish:
213
... src.status = PPS.PUBLISHED
214
... src.datepublished = UTC_NOW
216
... published_one = True
217
... for bin in bins_to_publish:
218
... bin.status = PPS.PUBLISHED
219
... bin.datepublished = UTC_NOW
221
... published_one = True
222
... return published_one
225
We'll be doing a lot of uploads with sanity checks, and expect them to succeed.
226
A helper function, simulate_upload does that with all the checking.
229
>>> from lp.services.mail import stub
231
>>> def simulate_upload(
232
... leafname, is_new=False, upload_policy='anything',
233
... series=None, distro="ubuntutest", loglevel=logging.WARN):
234
... """Process upload(s). Options are as for process_uploads()."""
235
... punt_upload_into_queue(leafname, distro=distro)
236
... process_uploads(upload_policy, series, loglevel)
237
... # We seem to be leaving a lock file behind here for some reason.
238
... # Naturally it doesn't count as an unprocessed incoming file, which
239
... # is what we're really looking for.
240
... lockfile = os.path.join(incoming_dir, '.lock')
241
... if os.access(lockfile, os.F_OK):
242
... os.remove(lockfile)
243
... assert len(os.listdir(incoming_dir)) == 0, (
244
... "Incoming should be empty: %s" % os.listdir(incoming_dir))
246
... rejected_contents = os.listdir(rejected_dir)
247
... if len(rejected_contents) > 0:
248
... # Clean up rejected entry
249
... shutil.rmtree(os.path.join(rejected_dir, leafname))
250
... print "Rejected uploads: %s" % rejected_contents
253
... assert len(os.listdir(failed_dir)) == 0, (
254
... "Failed upload(s): %s" % os.listdir(failed_dir))
256
... process_new(distro=distro, series=series)
257
... process_accepted(distro=distro)
258
... assert simple_publish(distro=distro), (
259
... "Should publish at least one item")
260
... if loglevel is None or loglevel <= logging.INFO:
261
... print "Upload complete."
263
>>> from lp.testing.mail_helpers import pop_notifications
264
>>> def read_email():
265
... """Pop all emails from the test mailbox, and summarize them.
267
... For each message, prints "To:" followed by recipients; "Subject:"
268
... followed by subject line; and message body followed by a blank line.
270
... for message in pop_notifications(commit=False):
271
... print "To:", message['to']
272
... print "Subject:", message['subject']
273
... print message.get_payload()[0].as_string()
194
>>> from lp.soyuz.model.publishing import (
195
... SourcePackagePublishingHistory as SPPH,
196
... BinaryPackagePublishingHistory as BPPH)
197
>>> from lp.soyuz.enums import PackagePublishingStatus as PPS
198
>>> from canonical.database.constants import UTC_NOW
199
>>> def simple_publish(distro):
200
... srcs_to_publish = SPPH.select("""
201
... SourcePackagePublishingHistory.distroseries = DistroSeries.id
202
... AND DistroSeries.distribution = Distribution.id
203
... AND Distribution.name = '%s'
204
... AND SourcePackagePublishingHistory.status = 1""" % distro,
205
... clauseTables=['DistroSeries', 'Distribution'])
206
... bins_to_publish = BPPH.select("""
207
... BinaryPackagePublishingHistory.distroarchseries =
208
... DistroArchSeries.id
209
... AND DistroArchSeries.distroseries = DistroSeries.id
210
... AND DistroSeries.distribution = Distribution.id
211
... AND Distribution.name = '%s'
212
... AND BinaryPackagePublishingHistory.status = 1""" % distro,
213
... clauseTables=['DistroArchSeries', 'DistroSeries',
215
... published_one = False
216
... for src in srcs_to_publish:
217
... src.status = PPS.PUBLISHED
218
... src.datepublished = UTC_NOW
220
... published_one = True
221
... for bin in bins_to_publish:
222
... bin.status = PPS.PUBLISHED
223
... bin.datepublished = UTC_NOW
225
... published_one = True
226
... return published_one
229
We'll be doing a lot of uploads with sanity checks, and expect them to
230
succeed. A helper function, simulate_upload does that with all the checking.
233
>>> from lp.services.mail import stub
235
>>> def simulate_upload(
236
... leafname, is_new=False, upload_policy='anything',
237
... series=None, distro="ubuntutest", loglevel=logging.WARN):
238
... """Process upload(s). Options are as for process_uploads()."""
239
... punt_upload_into_queue(leafname, distro=distro)
240
... process_uploads(upload_policy, series, loglevel)
241
... # We seem to be leaving a lock file behind here for some reason.
242
... # Naturally it doesn't count as an unprocessed incoming file,
243
... # which is what we're really looking for.
244
... lockfile = os.path.join(incoming_dir, '.lock')
245
... if os.access(lockfile, os.F_OK):
246
... os.remove(lockfile)
247
... assert len(os.listdir(incoming_dir)) == 0, (
248
... "Incoming should be empty: %s" % os.listdir(incoming_dir))
250
... rejected_contents = os.listdir(rejected_dir)
251
... if len(rejected_contents) > 0:
252
... # Clean up rejected entry
253
... shutil.rmtree(os.path.join(rejected_dir, leafname))
254
... print "Rejected uploads: %s" % rejected_contents
257
... assert len(os.listdir(failed_dir)) == 0, (
258
... "Failed upload(s): %s" % os.listdir(failed_dir))
260
... process_new(distro=distro, series=series)
261
... process_accepted(distro=distro)
262
... assert simple_publish(distro=distro), (
263
... "Should publish at least one item")
264
... if loglevel is None or loglevel <= logging.INFO:
265
... print "Upload complete."
267
>>> from lp.testing.mail_helpers import pop_notifications
268
>>> def read_email():
269
... """Pop all emails from the test mailbox, and summarize them.
271
... For each message, prints "To:" followed by recipients; "Subject:"
272
... followed by subject line; and message body followed by a blank
275
... for message in pop_notifications(commit=False):
276
... print "To:", message['to']
277
... print "Subject:", message['subject']
278
... print message.get_payload()[0].as_string()
276
281
The 'bar' package' is an arch-all package. We have four stages to the
277
282
bar test. Each stage should be simple enough. First we have a new
279
284
overridable binary. This tests the simple overriding of both sources
280
285
and arch-independent binaries.
282
>>> simulate_upload('bar_1.0-1', is_new=True, loglevel=logging.INFO)
283
INFO Processing upload
287
>>> simulate_upload('bar_1.0-1_binary', is_new=True)
289
>>> simulate_upload('bar_1.0-2')
291
>>> simulate_upload('bar_1.0-2_binary')
287
>>> simulate_upload('bar_1.0-1', is_new=True, loglevel=logging.INFO)
288
INFO Processing upload
292
>>> simulate_upload('bar_1.0-1_binary', is_new=True)
294
>>> simulate_upload('bar_1.0-2')
296
>>> simulate_upload('bar_1.0-2_binary')
293
298
Check the rejection of a malicious version of bar package which refers
294
299
to a different 'bar_1.0.orig.tar.gz'.
296
>>> stub.test_emails = []
297
>>> simulate_upload('bar_1.0-3', loglevel=logging.ERROR)
298
Rejected uploads: ['bar_1.0-3']
301
>>> stub.test_emails = []
302
>>> simulate_upload('bar_1.0-3', loglevel=logging.ERROR)
303
Rejected uploads: ['bar_1.0-3']
302
Foo Bar <foo.bar@canonical.com>,
303
Daniel Silverstone <daniel.silverstone@canonical.com>
304
Subject: bar_1.0-3_source.changes rejected
307
Foo Bar <foo.bar@canonical.com>,
308
Daniel Silverstone <daniel.silverstone@canonical.com>
309
Subject: bar_1.0-3_source.changes rejected
307
312
Force weird behavior with rfc2047 sentences containing '.' on
308
313
bar_1.0-4, which caused bug # 41102.
310
>>> from lp.registry.interfaces.person import IPersonSet
311
>>> name16 = getUtility(IPersonSet).getByName('name16')
312
>>> name16.displayname = "Foo B. Bar"
313
>>> syncUpdate(name16)
315
>>> from lp.registry.interfaces.person import IPersonSet
316
>>> name16 = getUtility(IPersonSet).getByName('name16')
317
>>> name16.displayname = "Foo B. Bar"
318
>>> syncUpdate(name16)
315
320
Check the email recipient for displayname containing special chars,
316
321
'.', must be rfc2047 compliant:
318
>>> simulate_upload('bar_1.0-4')
319
>>> uninteresting_email = stub.test_emails.pop()
321
To: "Foo B. Bar" <foo.bar@canonical.com>,
322
Celso Providelo <celso.providelo@canonical.com>
323
Subject: [ubuntutest/breezy] bar 1.0-4 (Accepted)
324
Content-Type: text/plain; charset="utf-8"
326
Content-Transfer-Encoding: quoted-printable
328
bar (1.0-4) breezy; urgency=3Dlow
330
* Changer using non-preferred email
332
Date: Tue, 25 Apr 2006 10:36:14 -0300
333
Changed-By: Celso R. Providelo <cprov@ubuntu.com>
334
Maintainer: Launchpad team <launchpad@lists.canonical.com>
335
Signed-By: "Foo B. Bar" <foo.bar@canonical.com>
336
http://launchpad.dev/ubuntutest/breezy/+source/bar/1.0-4
340
Announcing to breezy-changes@ubuntu.com
342
Thank you for your contribution to Ubuntu Test.
346
You are receiving this email because you are the uploader, maintainer or
347
signer of the above package.
323
>>> simulate_upload('bar_1.0-4')
324
>>> uninteresting_email = stub.test_emails.pop()
326
To: "Foo B. Bar" <foo.bar@canonical.com>,
327
Celso Providelo <celso.providelo@canonical.com>
328
Subject: [ubuntutest/breezy] bar 1.0-4 (Accepted)
329
Content-Type: text/plain; charset="utf-8"
331
Content-Transfer-Encoding: quoted-printable
333
bar (1.0-4) breezy; urgency=3Dlow
335
* Changer using non-preferred email
337
Date: Tue, 25 Apr 2006 10:36:14 -0300
338
Changed-By: Celso R. Providelo <cprov@ubuntu.com>
339
Maintainer: Launchpad team <launchpad@lists.canonical.com>
340
Signed-By: "Foo B. Bar" <foo.bar@canonical.com>
341
http://launchpad.dev/ubuntutest/breezy/+source/bar/1.0-4
345
Announcing to breezy-changes@ubuntu.com
347
Thank you for your contribution to Ubuntu Test.
351
You are receiving this email because you are the uploader, maintainer or
352
signer of the above package.
353
>>> name16.displayname = "Foo Bar"
354
>>> syncUpdate(name16)
358
>>> name16.displayname = "Foo Bar"
359
>>> syncUpdate(name16)
356
361
Check if we forcibly add the changer as recipient for "sync" uploads,
357
362
which contains unsigned changesfile. Ensure it sends email to the
360
>>> stub.test_emails = []
365
>>> stub.test_emails = []
362
>>> simulate_upload('bar_1.0-5', upload_policy='sync')
364
To: Celso Providelo <celso.providelo@canonical.com>
365
Subject: [ubuntutest/breezy] bar 1.0-5 (Accepted)
367
>>> simulate_upload('bar_1.0-5', upload_policy='sync')
369
To: Celso Providelo <celso.providelo@canonical.com>
370
Subject: [ubuntutest/breezy] bar 1.0-5 (Accepted)
369
374
Add a new series of bar sourcepackage, rename its binary package to
370
375
'bar-bin', upload the binary and look for a spurious sourcepackagename
371
376
created with the binary package name.
373
>>> simulate_upload('bar_1.0-6', upload_policy='sync')
374
>>> simulate_upload('bar_1.0-6_binary', is_new=True)
376
>>> from lp.registry.interfaces.sourcepackagename import ISourcePackageNameSet
377
>>> spn_set = getUtility(ISourcePackageNameSet)
378
>>> assert spn_set.queryByName('bar-bin') is None
381
== Source Uploads using epochs ==
378
>>> simulate_upload('bar_1.0-6', upload_policy='sync')
379
>>> simulate_upload('bar_1.0-6_binary', is_new=True)
381
>>> from lp.registry.interfaces.sourcepackagename import (
382
... ISourcePackageNameSet)
383
>>> spn_set = getUtility(ISourcePackageNameSet)
384
>>> assert spn_set.queryByName('bar-bin') is None
387
Source Uploads using epochs
388
---------------------------
383
390
As described in Debian Policy
384
391
(http://www.debian.org/doc/debian-policy/ch-controlfields.html)
584
598
allowed a security upload, we need to use a released distroseries,
587
>>> from lp.soyuz.model.section import (
588
... Section, SectionSelection)
589
>>> warty = ubuntu['warty']
590
>>> devel = Section.selectOneBy(name="devel")
591
>>> ss = SectionSelection(distroseries=warty, section=devel)
601
>>> from lp.soyuz.model.section import (
602
... Section, SectionSelection)
603
>>> warty = ubuntu['warty']
604
>>> devel = Section.selectOneBy(name="devel")
605
>>> ss = SectionSelection(distroseries=warty, section=devel)
594
... 'baz_1.0-1', is_new=True, upload_policy="anything",
595
... series="warty-security", distro="ubuntu")
608
... 'baz_1.0-1', is_new=True, upload_policy="anything",
609
... series="warty-security", distro="ubuntu")
597
611
Check there's a SourcePackageRelease with no build.
599
>>> from lp.buildmaster.enums import BuildStatus
600
>>> from lp.registry.interfaces.pocket import PackagePublishingPocket
601
>>> from lp.registry.model.sourcepackagename import SourcePackageName
602
>>> from lp.soyuz.model.binarypackagebuild import BinaryPackageBuild
603
>>> from lp.soyuz.model.sourcepackagerelease import SourcePackageRelease
604
>>> spn = SourcePackageName.selectOneBy(name="baz")
605
>>> spr = SourcePackageRelease.selectOneBy(sourcepackagenameID=spn.id)
607
>>> builds = BinaryPackageBuild.selectBy(source_package_release_id=spr_id)
608
>>> len(list(builds))
613
>>> from lp.buildmaster.enums import BuildStatus
614
>>> from lp.registry.model.sourcepackagename import SourcePackageName
615
>>> from lp.soyuz.model.binarypackagebuild import BinaryPackageBuild
616
>>> from lp.soyuz.model.sourcepackagerelease import SourcePackageRelease
617
>>> spn = SourcePackageName.selectOneBy(name="baz")
618
>>> spr = SourcePackageRelease.selectOneBy(sourcepackagenameID=spn.id)
620
>>> builds = BinaryPackageBuild.selectBy(source_package_release_id=spr_id)
621
>>> len(list(builds))
611
624
Manually create a build for this spr in i386. This simulates the
612
625
buildd-queuebuilder having run inbetween the source and binary uploads.
614
>>> warty_i386 = ubuntu['warty']['i386']
615
>>> main_archive = ubuntu.main_archive
616
>>> build = spr.createBuild(warty_i386, PackagePublishingPocket.SECURITY,
617
... main_archive, status=BuildStatus.NEEDSBUILD)
627
>>> warty_i386 = ubuntu['warty']['i386']
628
>>> main_archive = ubuntu.main_archive
629
>>> build = spr.createBuild(warty_i386, PackagePublishingPocket.SECURITY,
630
... main_archive, status=BuildStatus.NEEDSBUILD)
619
632
Check build created
621
>>> len(list(BinaryPackageBuild.selectBy(source_package_release_id=spr_id)))
634
>>> len(list(BinaryPackageBuild.selectBy(
635
... source_package_release_id=spr_id)))
624
638
Upload the i386 binary:
627
... 'baz_1.0-1_single_binary', is_new=True, upload_policy="anything",
628
... distro="ubuntu", series="warty-security")
641
... 'baz_1.0-1_single_binary', is_new=True, upload_policy="anything",
642
... distro="ubuntu", series="warty-security")
630
644
Should still just have one build, and it should now be FULLYBUILT.
632
>>> from canonical.database.sqlbase import clear_current_connection_cache
633
>>> clear_current_connection_cache()
646
>>> from canonical.database.sqlbase import clear_current_connection_cache
647
>>> clear_current_connection_cache()
635
>>> builds = list(BinaryPackageBuild.selectBy(
636
... source_package_release_id=spr_id))
639
>>> builds[0].status == BuildStatus.FULLYBUILT
649
>>> builds = list(BinaryPackageBuild.selectBy(
650
... source_package_release_id=spr_id))
653
>>> builds[0].status == BuildStatus.FULLYBUILT
643
657
Regression test for bug 54039. Currently must be here, see bug 54158.
655
669
First a couple helpers.
659
>>> from canonical.launchpad.ftests.script import run_script
661
>>> def run_publish_distro(careful=False, careful_publishing=False):
662
... """Run publish-distro on ubuntutest with given extra args.
664
... :param careful: turns on all "careful" options to the publish-distro
665
... script. Equivalent to the script's --careful option.
666
... :param careful_publishing: passes the --careful-publishing option
667
... to the publish-distro script.
669
... args = ["-v", "-d", "ubuntutest"]
671
... args.append("-C")
672
... if careful_publishing:
673
... args.append("-P")
674
... script = os.path.join(config.root, "scripts", "publish-distro.py")
675
... result, stdout, stderr = run_script(script, args)
678
... print "Script returned", result
680
>>> def release_file_has_uncompressed_packages(path):
681
... """Return whether the release file includes uncompressed Packages."""
682
... release_file = open(path)
683
... release_contents = release_file.read()
684
... release_file.close()
685
... target_string = "Packages\n"
686
... return release_contents.find(target_string) != -1
673
>>> from canonical.launchpad.ftests.script import run_script
675
>>> def run_publish_distro(careful=False, careful_publishing=False):
676
... """Run publish-distro on ubuntutest with given extra args.
678
... :param careful: turns on all "careful" options to the
679
... publish-distro script. Equivalent to the script's --careful
681
... :param careful_publishing: passes the --careful-publishing option
682
... to the publish-distro script.
684
... args = ["-v", "-d", "ubuntutest"]
686
... args.append("-C")
687
... if careful_publishing:
688
... args.append("-P")
689
... script = os.path.join(config.root, "scripts", "publish-distro.py")
690
... result, stdout, stderr = run_script(script, args)
693
... print "Script returned", result
695
>>> def release_file_has_uncompressed_packages(path):
696
... """Does the release file include uncompressed Packages?"""
697
... release_file = open(path)
698
... release_contents = release_file.read()
699
... release_file.close()
700
... target_string = "Packages\n"
701
... return release_contents.find(target_string) != -1
689
704
First publish the distro carefully, to get everything in place.
690
705
Before this can happen we need to set up some dummy librarian files for
691
706
files that are published in the sample data.
693
>>> fillLibrarianFile(66)
694
>>> fillLibrarianFile(67)
695
>>> fillLibrarianFile(68)
696
>>> fillLibrarianFile(70)
708
>>> fillLibrarianFile(66)
709
>>> fillLibrarianFile(67)
710
>>> fillLibrarianFile(68)
711
>>> fillLibrarianFile(70)
698
>>> run_publish_distro(careful=True)
699
DEBUG Initializing zopeless.
700
DEBUG Distribution: ubuntutest
702
DEBUG Added /var/tmp/archive/ubuntutest/pool/universe/b/bar/bar_1.0-2_i386.deb from library
703
DEBUG Added /var/tmp/archive/ubuntutest/pool/universe/b/bar/bar_1.0-1_i386.deb from library
713
>>> run_publish_distro(careful=True)
714
DEBUG Initializing zopeless.
715
DEBUG Distribution: ubuntutest
717
DEBUG Added /var/tmp/archive/ubuntutest/pool/universe/b/bar/bar_1.0-2_i386.deb from library
718
DEBUG Added /var/tmp/archive/ubuntutest/pool/universe/b/bar/bar_1.0-1_i386.deb from library
707
722
Delete the uncompressed Packages and Sources files from the archive folder.
708
723
This simulates what cron.daily does between publishing runs.
710
>>> os.system('find /var/tmp/archive/ubuntutest \\( -name "Packages" '
711
... '-o -name "Sources" \\) -exec rm "{}" \\;')
725
>>> os.system('find /var/tmp/archive/ubuntutest \\( -name "Packages" '
726
... '-o -name "Sources" \\) -exec rm "{}" \\;')
714
729
Record the timestamp of a release file we expect to be rewritten,
715
730
which we'll need later.
717
>>> release_timestamp = os.stat('/var/tmp/archive/ubuntutest/dists/'
718
... 'breezy/Release')[stat.ST_MTIME]
732
>>> release_timestamp = os.stat('/var/tmp/archive/ubuntutest/dists/'
733
... 'breezy/Release')[stat.ST_MTIME]
720
735
Re-publish the distribution, with careful publishing only. This will mean
721
736
only pockets into which we've done some publication will have apt-ftparchive