1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
|
# Copyright 2009-2011 Canonical Ltd. This software is licensed under the
# GNU Affero General Public License version 3 (see the file LICENSE).
__all__ = [
'GLOBAL_PUBLISHER_LOCK',
'Publisher',
'getPublisher',
]
__metaclass__ = type
from datetime import datetime
import errno
import hashlib
import logging
import os
import shutil
from debian.deb822 import (
_multivalued,
Release,
)
from lp.archivepublisher import HARDCODED_COMPONENT_ORDER
from lp.archivepublisher.config import getPubConfig
from lp.archivepublisher.diskpool import DiskPool
from lp.archivepublisher.domination import Dominator
from lp.archivepublisher.htaccess import (
htpasswd_credentials_for_archive,
write_htaccess,
write_htpasswd,
)
from lp.archivepublisher.interfaces.archivesigningkey import (
IArchiveSigningKey,
)
from lp.archivepublisher.model.ftparchive import FTPArchiveHandler
from lp.archivepublisher.utils import (
get_ppa_reference,
RepositoryIndexFile,
)
from lp.registry.interfaces.pocket import PackagePublishingPocket
from lp.registry.interfaces.series import SeriesStatus
from lp.services.database.sqlbase import sqlvalues
from lp.services.librarian.client import LibrarianClient
from lp.services.utils import file_exists
from lp.soyuz.enums import (
ArchivePurpose,
ArchiveStatus,
BinaryPackageFormat,
PackagePublishingStatus,
)
# Use this as the lock file name for all scripts that may manipulate
# archives in the filesystem. In a Launchpad(Cron)Script, set
# lockfilename to this value to make it use the shared lock.
GLOBAL_PUBLISHER_LOCK = 'launchpad-publisher.lock'
def reorder_components(components):
"""Return a list of the components provided.
The list will be ordered by the semi arbitrary rules of ubuntu.
Over time this method needs to be removed and replaced by having
component ordering codified in the database.
"""
remaining = list(components)
ordered = []
for comp in HARDCODED_COMPONENT_ORDER:
if comp in remaining:
ordered.append(comp)
remaining.remove(comp)
ordered.extend(remaining)
return ordered
def get_suffixed_indices(path):
"""Return a set of paths to compressed copies of the given index."""
return set([path + suffix for suffix in ('', '.gz', '.bz2')])
def _getDiskPool(pubconf, log):
"""Return a DiskPool instance for a given PubConf.
It ensures the given archive location matches the minimal structure
required.
"""
log.debug("Making directories as needed.")
pubconf.setupArchiveDirs()
log.debug("Preparing on-disk pool representation.")
dp = DiskPool(pubconf.poolroot, pubconf.temproot,
logging.getLogger("DiskPool"))
# Set the diskpool's log level to INFO to suppress debug output
dp.logger.setLevel(logging.INFO)
return dp
def _setupHtaccess(archive, pubconf, log):
"""Setup .htaccess/.htpasswd files for an archive.
"""
if not archive.private:
# FIXME: JRV 20101108 leftover .htaccess and .htpasswd files
# should be removed when support for making existing 3PA's public
# is added; bug=376072
return
htaccess_path = os.path.join(pubconf.htaccessroot, ".htaccess")
htpasswd_path = os.path.join(pubconf.htaccessroot, ".htpasswd")
# After the initial htaccess/htpasswd files
# are created generate_ppa_htaccess is responsible for
# updating the tokens.
if not os.path.exists(htaccess_path):
log.debug("Writing htaccess file.")
write_htaccess(htaccess_path, pubconf.htaccessroot)
passwords = htpasswd_credentials_for_archive(archive)
write_htpasswd(htpasswd_path, passwords)
def getPublisher(archive, allowed_suites, log, distsroot=None):
"""Return an initialized Publisher instance for the given context.
The callsites can override the location where the archive indexes will
be stored via 'distroot' argument.
"""
if archive.purpose != ArchivePurpose.PPA:
log.debug("Finding configuration for %s %s."
% (archive.distribution.name, archive.displayname))
else:
log.debug("Finding configuration for '%s' PPA."
% archive.owner.name)
pubconf = getPubConfig(archive)
disk_pool = _getDiskPool(pubconf, log)
_setupHtaccess(archive, pubconf, log)
if distsroot is not None:
log.debug("Overriding dists root with %s." % distsroot)
pubconf.distsroot = distsroot
log.debug("Preparing publisher.")
return Publisher(log, pubconf, disk_pool, archive, allowed_suites)
class I18nIndex(_multivalued):
"""Represents an i18n/Index file."""
_multivalued_fields = {
"sha1": ["sha1", "size", "name"],
}
@property
def _fixed_field_lengths(self):
fixed_field_lengths = {}
for key in self._multivalued_fields:
length = self._get_size_field_length(key)
fixed_field_lengths[key] = {"size": length}
return fixed_field_lengths
def _get_size_field_length(self, key):
lengths = [len(str(item['size'])) for item in self[key]]
return max(lengths)
class Publisher(object):
"""Publisher is the class used to provide the facility to publish
files in the pool of a Distribution. The publisher objects will be
instantiated by the archive build scripts and will be used throughout
the processing of each DistroSeries and DistroArchSeries in question
"""
def __init__(self, log, config, diskpool, archive, allowed_suites=None,
library=None):
"""Initialize a publisher.
Publishers need the pool root dir and a DiskPool object.
Optionally we can pass a list of tuples, (distroseries.name, pocket),
which will restrict the publisher actions, only suites listed in
allowed_suites will be modified.
"""
self.log = log
self._config = config
self.distro = archive.distribution
self.archive = archive
self.allowed_suites = allowed_suites
if not os.path.isdir(config.poolroot):
raise ValueError("Root %s is not a directory or does "
"not exist" % config.poolroot)
self._diskpool = diskpool
if library is None:
self._library = LibrarianClient()
else:
self._library = library
# Track which distroseries pockets have been dirtied by a
# change, and therefore need domination/apt-ftparchive work.
# This is a set of tuples in the form (distroseries.name, pocket)
self.dirty_pockets = set()
# Track which pockets need release files. This will contain more
# than dirty_pockets in the case of a careful index run.
# This is a set of tuples in the form (distroseries.name, pocket)
self.release_files_needed = set()
def isDirty(self, distroseries, pocket):
"""True if a publication has happened in this release and pocket."""
return (distroseries.name, pocket) in self.dirty_pockets
def markPocketDirty(self, distroseries, pocket):
"""Mark a pocket dirty only if it's allowed."""
if self.isAllowed(distroseries, pocket):
self.dirty_pockets.add((distroseries.name, pocket))
def isAllowed(self, distroseries, pocket):
"""Whether or not the given suite should be considered.
Return True either if the self.allowed_suite is empty (was not
specified in command line) or if the given suite is included in it.
Otherwise, return False.
"""
return (not self.allowed_suites or
(distroseries.name, pocket) in self.allowed_suites)
def A_publish(self, force_publishing):
"""First step in publishing: actual package publishing.
Asks each DistroSeries to publish itself, which causes
publishing records to be updated, and files to be placed on disk
where necessary.
If self.allowed_suites is set, restrict the publication procedure
to them.
"""
self.log.debug("* Step A: Publishing packages")
if self.archive.purpose in (
ArchivePurpose.PRIMARY,
ArchivePurpose.PARTNER,
):
# For PRIMARY and PARTNER archives, skip OBSOLETE and FUTURE
# series. We will never want to publish anything in them, so it
# isn't worth thinking about whether they have pending
# publications.
consider_series = [
series
for series in self.distro.series
if series.status not in (
SeriesStatus.OBSOLETE,
SeriesStatus.FUTURE,
)]
else:
# Other archives may have reasons to continue building at least
# for OBSOLETE series. For example, a PPA may be continuing to
# provide custom builds for users who haven't upgraded yet.
consider_series = self.distro.series
for distroseries in consider_series:
for pocket in self.archive.getPockets():
allowed = (
not self.allowed_suites or
(distroseries.name, pocket) in self.allowed_suites)
if allowed:
more_dirt = distroseries.publish(
self._diskpool, self.log, self.archive, pocket,
is_careful=force_publishing)
self.dirty_pockets.update(more_dirt)
else:
self.log.debug(
"* Skipping %s/%s", distroseries.name, pocket.name)
def A2_markPocketsWithDeletionsDirty(self):
"""An intermediate step in publishing to detect deleted packages.
Mark pockets containing deleted packages (status DELETED or
OBSOLETE), scheduledeletiondate NULL and dateremoved NULL as
dirty, to ensure that they are processed in death row.
"""
from lp.soyuz.model.publishing import (
SourcePackagePublishingHistory, BinaryPackagePublishingHistory)
self.log.debug("* Step A2: Mark pockets with deletions as dirty")
# Query part that is common to both queries below.
base_query = """
archive = %s AND
status = %s AND
scheduleddeletiondate IS NULL AND
dateremoved is NULL
""" % sqlvalues(self.archive,
PackagePublishingStatus.DELETED)
# We need to get a set of (distroseries, pocket) tuples that have
# publications that are waiting to be deleted. Each tuple is
# added to the dirty_pockets set.
# Loop for each pocket in each distroseries:
for distroseries in self.distro.series:
for pocket in self.archive.getPockets():
if self.cannotModifySuite(distroseries, pocket):
# We don't want to mark release pockets dirty in a
# stable distroseries, no matter what other bugs
# that precede here have dirtied it.
continue
clauses = [base_query]
clauses.append("pocket = %s" % sqlvalues(pocket))
clauses.append("distroseries = %s" % sqlvalues(distroseries))
# Make the source publications query.
source_query = " AND ".join(clauses)
sources = SourcePackagePublishingHistory.select(source_query)
if sources.count() > 0:
self.markPocketDirty(distroseries, pocket)
# No need to check binaries if the pocket is already
# dirtied from a source.
continue
# Make the binary publications query.
clauses = [base_query]
clauses.append("pocket = %s" % sqlvalues(pocket))
clauses.append("DistroArchSeries = DistroArchSeries.id")
clauses.append("DistroArchSeries.distroseries = %s" %
sqlvalues(distroseries))
binary_query = " AND ".join(clauses)
binaries = BinaryPackagePublishingHistory.select(binary_query,
clauseTables=['DistroArchSeries'])
if binaries.count() > 0:
self.markPocketDirty(distroseries, pocket)
def B_dominate(self, force_domination):
"""Second step in publishing: domination."""
self.log.debug("* Step B: dominating packages")
judgejudy = Dominator(self.log, self.archive)
for distroseries in self.distro.series:
for pocket in self.archive.getPockets():
if not force_domination:
if not self.isDirty(distroseries, pocket):
self.log.debug("Skipping domination for %s/%s" %
(distroseries.name, pocket.name))
continue
self.checkDirtySuiteBeforePublishing(distroseries, pocket)
judgejudy.judgeAndDominate(distroseries, pocket)
def C_doFTPArchive(self, is_careful):
"""Does the ftp-archive step: generates Sources and Packages."""
self.log.debug("* Step C: Set apt-ftparchive up and run it")
apt_handler = FTPArchiveHandler(self.log, self._config,
self._diskpool, self.distro,
self)
apt_handler.run(is_careful)
def C_writeIndexes(self, is_careful):
"""Write Index files (Packages & Sources) using LP information.
Iterates over all distroseries and its pockets and components.
"""
self.log.debug("* Step C': write indexes directly from DB")
for distroseries in self.distro:
for pocket in self.archive.getPockets():
if not is_careful:
if not self.isDirty(distroseries, pocket):
self.log.debug("Skipping index generation for %s/%s" %
(distroseries.name, pocket.name))
continue
self.checkDirtySuiteBeforePublishing(distroseries, pocket)
self.release_files_needed.add((distroseries.name, pocket))
components = self.archive.getComponentsForSeries(distroseries)
for component in components:
self._writeComponentIndexes(
distroseries, pocket, component)
def D_writeReleaseFiles(self, is_careful):
"""Write out the Release files for the provided distribution.
If is_careful is specified, we include all pockets of all releases.
Otherwise we include only pockets flagged as true in dirty_pockets.
"""
self.log.debug("* Step D: Generating Release files.")
for distroseries in self.distro:
for pocket in self.archive.getPockets():
if not is_careful:
if not self.isDirty(distroseries, pocket):
self.log.debug("Skipping release files for %s/%s" %
(distroseries.name, pocket.name))
continue
self.checkDirtySuiteBeforePublishing(distroseries, pocket)
self._writeSuite(distroseries, pocket)
def _writeComponentIndexes(self, distroseries, pocket, component):
"""Write Index files for single distroseries + pocket + component.
Iterates over all supported architectures and 'sources', no
support for installer-* yet.
Write contents using LP info to an extra plain file (Packages.lp
and Sources.lp .
"""
suite_name = distroseries.getSuite(pocket)
self.log.debug("Generate Indexes for %s/%s"
% (suite_name, component.name))
self.log.debug("Generating Sources")
source_index_root = os.path.join(
self._config.distsroot, suite_name, component.name, 'source')
source_index = RepositoryIndexFile(
source_index_root, self._config.temproot, 'Sources')
for spp in distroseries.getSourcePackagePublishing(
PackagePublishingStatus.PUBLISHED, pocket=pocket,
component=component, archive=self.archive):
stanza = spp.getIndexStanza().encode('utf8') + '\n\n'
source_index.write(stanza)
source_index.close()
for arch in distroseries.architectures:
if not arch.enabled:
continue
arch_path = 'binary-%s' % arch.architecturetag
self.log.debug("Generating Packages for %s" % arch_path)
package_index_root = os.path.join(
self._config.distsroot, suite_name, component.name, arch_path)
package_index = RepositoryIndexFile(
package_index_root, self._config.temproot, 'Packages')
di_index_root = os.path.join(
self._config.distsroot, suite_name, component.name,
'debian-installer', arch_path)
di_index = RepositoryIndexFile(
di_index_root, self._config.temproot, 'Packages')
for bpp in distroseries.getBinaryPackagePublishing(
archtag=arch.architecturetag, pocket=pocket,
component=component, archive=self.archive):
stanza = bpp.getIndexStanza().encode('utf-8') + '\n\n'
if (bpp.binarypackagerelease.binpackageformat in
(BinaryPackageFormat.DEB, BinaryPackageFormat.DDEB)):
package_index.write(stanza)
elif (bpp.binarypackagerelease.binpackageformat ==
BinaryPackageFormat.UDEB):
di_index.write(stanza)
else:
self.log.debug(
"Cannot publish %s because it is not a DEB or "
"UDEB file" % bpp.displayname)
package_index.close()
di_index.close()
def cannotModifySuite(self, distroseries, pocket):
"""Return True if the distroseries is stable and pocket is release."""
return (not distroseries.isUnstable() and
not self.archive.allowUpdatesToReleasePocket() and
pocket == PackagePublishingPocket.RELEASE)
def checkDirtySuiteBeforePublishing(self, distroseries, pocket):
"""Last check before publishing a dirty suite.
If the distroseries is stable and the archive doesn't allow updates
in RELEASE pocket (primary archives) we certainly have a problem,
better stop.
"""
if self.cannotModifySuite(distroseries, pocket):
raise AssertionError(
"Oops, tainting RELEASE pocket of %s." % distroseries)
def _getLabel(self):
"""Return the contents of the Release file Label field.
:return: a text that should be used as the value of the Release file
'Label' field.
"""
if self.archive.is_ppa:
return self.archive.displayname
elif self.archive.purpose == ArchivePurpose.PARTNER:
return "Partner archive"
else:
return self.distro.displayname
def _getOrigin(self):
"""Return the contents of the Release file Origin field.
Primary, Partner and Copy archives use the distribution displayname.
For PPAs we use a more specific value that follows
`get_ppa_reference`.
:return: a text that should be used as the value of the Release file
'Origin' field.
"""
# XXX al-maisan, 2008-11-19, bug=299981. If this file is released
# from a copy archive then modify the origin to indicate so.
if self.archive.purpose == ArchivePurpose.PARTNER:
return "Canonical"
if not self.archive.is_ppa:
return self.distro.displayname
return "LP-PPA-%s" % get_ppa_reference(self.archive)
def _writeReleaseFile(self, suite, release_data):
"""Write a Release file to the archive.
:param suite: The name of the suite whose Release file is to be
written.
:param release_data: A `debian.deb822.Release` object to write
to the filesystem.
"""
location = os.path.join(self._config.distsroot, suite)
if not file_exists(location):
os.makedirs(location)
with open(os.path.join(location, "Release"), "w") as release_file:
release_data.dump(release_file, "utf-8")
def _writeSuite(self, distroseries, pocket):
"""Write out the Release files for the provided suite."""
# XXX: kiko 2006-08-24: Untested method.
# As we generate file lists for apt-ftparchive we record which
# distroseriess and so on we need to generate Release files for.
# We store this in release_files_needed and consume the information
# when writeReleaseFiles is called.
if (distroseries.name, pocket) not in self.release_files_needed:
# If we don't need to generate a release for this release
# and pocket, don't!
return
all_components = [
comp.name for comp in
self.archive.getComponentsForSeries(distroseries)]
all_architectures = [
a.architecturetag for a in distroseries.enabled_architectures]
all_files = set()
for component in all_components:
self._writeSuiteSource(
distroseries, pocket, component, all_files)
for architecture in all_architectures:
self._writeSuiteArch(
distroseries, pocket, component, architecture, all_files)
self._writeSuiteI18n(
distroseries, pocket, component, all_files)
drsummary = "%s %s " % (self.distro.displayname,
distroseries.displayname)
if pocket == PackagePublishingPocket.RELEASE:
drsummary += distroseries.version
else:
drsummary += pocket.name.capitalize()
suite = distroseries.getSuite(pocket)
release_file = Release()
release_file["Origin"] = self._getOrigin()
release_file["Label"] = self._getLabel()
release_file["Suite"] = suite
release_file["Version"] = distroseries.version
release_file["Codename"] = distroseries.name
release_file["Date"] = datetime.utcnow().strftime(
"%a, %d %b %Y %k:%M:%S UTC")
release_file["Architectures"] = " ".join(
sorted(list(all_architectures)))
release_file["Components"] = " ".join(
reorder_components(all_components))
release_file["Description"] = drsummary
if (pocket == PackagePublishingPocket.BACKPORTS and
distroseries.backports_not_automatic):
release_file["NotAutomatic"] = "yes"
release_file["ButAutomaticUpgrades"] = "yes"
for filename in sorted(list(all_files), key=os.path.dirname):
entry = self._readIndexFileContents(suite, filename)
if entry is None:
continue
release_file.setdefault("MD5Sum", []).append({
"md5sum": hashlib.md5(entry).hexdigest(),
"name": filename,
"size": len(entry)})
release_file.setdefault("SHA1", []).append({
"sha1": hashlib.sha1(entry).hexdigest(),
"name": filename,
"size": len(entry)})
release_file.setdefault("SHA256", []).append({
"sha256": hashlib.sha256(entry).hexdigest(),
"name": filename,
"size": len(entry)})
self._writeReleaseFile(suite, release_file)
# Skip signature if the archive signing key is undefined.
if self.archive.signing_key is None:
self.log.debug("No signing key available, skipping signature.")
return
# Sign the repository.
archive_signer = IArchiveSigningKey(self.archive)
archive_signer.signRepository(suite)
def _writeSuiteArchOrSource(self, distroseries, pocket, component,
file_stub, arch_name, arch_path,
all_series_files):
"""Write out a Release file for an architecture or source."""
# XXX kiko 2006-08-24: Untested method.
suite = distroseries.getSuite(pocket)
self.log.debug("Writing Release file for %s/%s/%s" % (
suite, component, arch_path))
# Now, grab the actual (non-di) files inside each of
# the suite's architectures
file_stub = os.path.join(component, arch_path, file_stub)
all_series_files.update(get_suffixed_indices(file_stub))
all_series_files.add(os.path.join(component, arch_path, "Release"))
release_file = Release()
release_file["Archive"] = suite
release_file["Version"] = distroseries.version
release_file["Component"] = component
release_file["Origin"] = self._getOrigin()
release_file["Label"] = self._getLabel()
release_file["Architecture"] = arch_name
f = open(os.path.join(self._config.distsroot, suite,
component, arch_path, "Release"), "w")
try:
release_file.dump(f, "utf-8")
finally:
f.close()
def _writeSuiteSource(self, distroseries, pocket, component,
all_series_files):
"""Write out a Release file for a suite's sources."""
self._writeSuiteArchOrSource(
distroseries, pocket, component, 'Sources', 'source', 'source',
all_series_files)
def _writeSuiteArch(self, distroseries, pocket, component,
arch_name, all_series_files):
"""Write out a Release file for an architecture in a suite."""
file_stub = 'Packages'
arch_path = 'binary-' + arch_name
# Only the primary and PPA archives have debian-installer.
if self.archive.purpose != ArchivePurpose.PARTNER:
# Set up the debian-installer paths for main_archive.
# d-i paths are nested inside the component.
di_path = os.path.join(
component, "debian-installer", arch_path)
di_file_stub = os.path.join(di_path, file_stub)
all_series_files.update(get_suffixed_indices(di_file_stub))
self._writeSuiteArchOrSource(
distroseries, pocket, component, 'Packages', arch_name, arch_path,
all_series_files)
def _writeSuiteI18n(self, distroseries, pocket, component,
all_series_files):
"""Write out an Index file for translation files in a suite."""
suite = distroseries.getSuite(pocket)
self.log.debug("Writing Index file for %s/%s/i18n" % (
suite, component))
i18n_dir = os.path.join(self._config.distsroot, suite, component,
"i18n")
i18n_files = []
try:
for i18n_file in os.listdir(i18n_dir):
if not i18n_file.startswith('Translation-'):
continue
if not i18n_file.endswith('.bz2'):
# Save bandwidth: mirrors should only need the .bz2
# versions.
continue
i18n_files.append(i18n_file)
except OSError as e:
if e.errno != errno.ENOENT:
raise
if not i18n_files:
# If the i18n directory doesn't exist or is empty, we don't need
# to index it.
return
i18n_index = I18nIndex()
for i18n_file in sorted(i18n_files):
entry = self._readIndexFileContents(
suite, os.path.join(component, "i18n", i18n_file))
if entry is None:
continue
i18n_index.setdefault("SHA1", []).append({
"sha1": hashlib.sha1(entry).hexdigest(),
"name": i18n_file,
"size": len(entry)})
with open(os.path.join(i18n_dir, "Index"), "w") as f:
i18n_index.dump(f, "utf-8")
# Schedule this for inclusion in the Release file.
all_series_files.add(os.path.join(component, "i18n", "Index"))
def _readIndexFileContents(self, distroseries_name, file_name):
"""Read an index files' contents.
:param distroseries_name: Distro series name
:param file_name: Filename relative to the parent container directory.
:return: File contents, or None if the file could not be found.
"""
full_name = os.path.join(self._config.distsroot,
distroseries_name, file_name)
if not os.path.exists(full_name):
# The file we were asked to write out doesn't exist.
# Most likely we have an incomplete archive (E.g. no sources
# for a given distroseries). This is a non-fatal issue
self.log.debug("Failed to find " + full_name)
return None
in_file = open(full_name, 'r')
try:
return in_file.read()
finally:
in_file.close()
def deleteArchive(self):
"""Delete the archive.
Physically remove the entire archive from disk and set the archive's
status to DELETED.
Any errors encountered while removing the archive from disk will
be caught and an OOPS report generated.
"""
root_dir = os.path.join(
self._config.distroroot, self.archive.owner.name,
self.archive.name)
self.log.info(
"Attempting to delete archive '%s/%s' at '%s'." % (
self.archive.owner.name, self.archive.name, root_dir))
for directory in (root_dir, self._config.metaroot):
if not os.path.exists(directory):
continue
try:
shutil.rmtree(directory)
except (shutil.Error, OSError), e:
self.log.warning(
"Failed to delete directory '%s' for archive "
"'%s/%s'\n%s" % (
directory, self.archive.owner.name,
self.archive.name, e))
self.archive.status = ArchiveStatus.DELETED
self.archive.publish = False
|