419
def findBuildsByArchitecture(self, distroseries, archive):
420
"""Find associated builds, by architecture.
422
Looks for `BinaryPackageBuild` records for this source package
423
release, with publication records in the distroseries associated with
424
`distroarchseries`. There should be at most one of these per
427
:param distroarchseries: `DistroArchSeries` to look for.
428
:return: A dict mapping architecture tags (in string form,
429
e.g. 'i386') to `BinaryPackageBuild`s for that build.
431
# Avoid circular imports.
432
from lp.soyuz.model.binarypackagerelease import BinaryPackageRelease
433
from lp.soyuz.model.distroarchseries import DistroArchSeries
435
BuildDAS = ClassAlias(DistroArchSeries, 'BuildDAS')
436
PublishDAS = ClassAlias(DistroArchSeries, 'PublishDAS')
438
query = Store.of(self).find(
439
(BuildDAS.architecturetag, BinaryPackageBuild),
440
BinaryPackageBuild.source_package_release == self,
441
BinaryPackageRelease.buildID == BinaryPackageBuild.id,
442
BuildDAS.id == BinaryPackageBuild.distro_arch_series_id,
443
BinaryPackagePublishingHistory.binarypackagereleaseID ==
444
BinaryPackageRelease.id,
445
BinaryPackagePublishingHistory.archiveID == archive.id,
447
BinaryPackagePublishingHistory.distroarchseriesID,
448
PublishDAS.distroseriesID == distroseries.id,
449
# Architecture-independent binary package releases are built
450
# in the nominated arch-indep architecture but published in
451
# all architectures. This condition makes sure we consider
452
# only builds that have been published in their own
454
PublishDAS.architecturetag == BuildDAS.architecturetag)
455
results = list(query.config(distinct=True))
456
mapped_results = dict(results)
457
assert len(mapped_results) == len(results), (
458
"Found multiple build candidates per architecture: %s. "
459
"This may mean that we have a serious problem in our DB model. "
460
"Further investigation is required."
461
% [(tag, build.id) for tag, build in results])
462
return mapped_results
464
411
def getBuildByArch(self, distroarchseries, archive):
465
412
"""See ISourcePackageRelease."""
466
413
# First we try to follow any binaries built from the given source
467
414
# in a distroarchseries with the given architecturetag and published
468
415
# in the given (distroarchseries, archive) location.
469
# (Querying all architectures and then picking the right one out
470
# of the result turns out to be much faster than querying for
471
# just the architecture we want).
472
builds_by_arch = self.findBuildsByArchitecture(
473
distroarchseries.distroseries, archive)
474
build = builds_by_arch.get(distroarchseries.architecturetag)
475
if build is not None:
417
'BinaryPackagePublishingHistory', 'BinaryPackageRelease',
421
BinaryPackageBuild.source_package_release = %s AND
422
BinaryPackageRelease.build = BinaryPackageBuild.id AND
423
DistroArchSeries.id = BinaryPackageBuild.distro_arch_series AND
424
DistroArchSeries.architecturetag = %s AND
425
BinaryPackagePublishingHistory.binarypackagerelease =
426
BinaryPackageRelease.id AND
427
BinaryPackagePublishingHistory.distroarchseries = %s AND
428
BinaryPackagePublishingHistory.archive = %s
429
""" % sqlvalues(self, distroarchseries.architecturetag,
430
distroarchseries, archive)
432
select_results = BinaryPackageBuild.select(
433
query, clauseTables=clauseTables, distinct=True,
434
orderBy='-BinaryPackageBuild.id')
436
# XXX cprov 20080216: this if/elif/else block could be avoided or,
437
# at least, simplified if SelectOne accepts 'distinct' argument.
438
# The query above results in multiple identical builds for ..
439
results = list(select_results)
440
if len(results) == 1:
476
441
# If there was any published binary we can use its original build.
477
# This case covers the situations when both source and binaries
442
# This case covers the situations when both, source and binaries
478
443
# got copied from another location.
445
elif len(results) > 1:
446
# If more than one distinct build was found we have a problem.
447
# A build was created when it shouldn't, possible due to bug
448
# #181736. The broken build should be manually removed.
449
raise AssertionError(
450
"Found more than one build candidate: %s. It possibly "
451
"means we have a serious problem in out DB model, "
452
"further investigation is required." %
453
[build.id for build in results])
455
# If there was no published binary we have to try to find a
456
# suitable build in all possible location across the distroseries
457
# inheritance tree. See bellow.
481
# If there was no published binary we have to try to find a
482
# suitable build in all possible location across the distroseries
483
# inheritance tree. See below.
490
461
"BinaryPackageBuild.package_build = PackageBuild.id AND "
491
462
"PackageBuild.build_farm_job = BuildFarmJob.id AND "
631
603
return PackageDiff(
632
604
from_source=self, to_source=to_sourcepackagerelease,
633
605
requester=requester, status=status)
635
def aggregate_changelog(self, since_version):
636
"""See `ISourcePackagePublishingHistory`."""
637
if self.changelog is None:
640
apt_pkg.init_system()
642
changelog = self.changelog
643
# The python-debian API for parsing changelogs is pretty awful. The
644
# only useful way of extracting info is to use the iterator on
645
# Changelog and then compare versions.
647
for block in Changelog(changelog.read()):
648
version = block._raw_version
649
if (since_version and
650
apt_pkg.version_compare(version, since_version) <= 0):
652
# Poking in private attributes is not nice but again the
653
# API is terrible. We want to ensure that the name/date
654
# line is omitted from these composite changelogs.
655
block._no_trailer = True
657
# python-debian adds an extra blank line to the chunks
658
# so we'll have to sort this out.
659
chunks.append(str(block).rstrip())
660
except ChangelogCreateError:
662
if not since_version:
663
# If a particular version was not requested we just
664
# return the most recent changelog entry.
666
except ChangelogParseError:
669
output = "\n\n".join(chunks)
670
return output.decode("utf-8", "replace")
672
def getActiveArchSpecificPublications(self, archive, distroseries,
674
"""Find architecture-specific binary publications for this release.
676
For example, say source package release contains binary packages of:
677
* "foo" for i386 (pending in i386)
678
* "foo" for amd64 (published in amd64)
679
* "foo-common" for the "all" architecture (pending or published in
680
various real processor architectures)
682
In that case, this search will return foo(i386) and foo(amd64). The
683
dominator uses this when figuring out whether foo-common can be
684
superseded: we don't track dependency graphs, but we know that the
685
architecture-specific "foo" releases are likely to depend on the
686
architecture-independent foo-common release.
688
:param archive: The `Archive` to search.
689
:param distroseries: The `DistroSeries` to search.
690
:param pocket: The `PackagePublishingPocket` to search.
691
:return: A Storm result set of active, architecture-specific
692
`BinaryPackagePublishingHistory` objects for this source package
693
release and the given `archive`, `distroseries`, and `pocket`.
695
# Avoid circular imports.
696
from lp.soyuz.interfaces.publishing import active_publishing_status
697
from lp.soyuz.model.binarypackagerelease import BinaryPackageRelease
698
from lp.soyuz.model.distroarchseries import DistroArchSeries
700
return Store.of(self).find(
701
BinaryPackagePublishingHistory,
702
BinaryPackageBuild.source_package_release_id == self.id,
703
BinaryPackageRelease.build == BinaryPackageBuild.id,
704
BinaryPackagePublishingHistory.binarypackagereleaseID ==
705
BinaryPackageRelease.id,
706
BinaryPackagePublishingHistory.archiveID == archive.id,
707
BinaryPackagePublishingHistory.distroarchseriesID ==
709
DistroArchSeries.distroseriesID == distroseries.id,
710
BinaryPackagePublishingHistory.pocket == pocket,
711
BinaryPackagePublishingHistory.status.is_in(
712
active_publishing_status),
713
BinaryPackageRelease.architecturespecific == True)