48
46
FEATURE_FLAG_ENABLE_MODULE = u"soyuz.derived_series_jobs.enabled"
51
def make_metadata(sourcepackagename, parent_series):
52
"""Return JSON metadata for a job on `sourcepackagename`."""
49
def make_metadata(sourcepackagename_id, parent_series_id):
50
"""Return JSON metadata for a job on `sourcepackagename_id`."""
54
'sourcepackagename': sourcepackagename.id,
55
'parent_series': parent_series.id,
52
'sourcepackagename': sourcepackagename_id,
53
'parent_series': parent_series_id,
70
68
job = DistributionJob(
71
69
distribution=derived_series.distribution, distroseries=derived_series,
72
70
job_type=DistributionJobType.DISTROSERIESDIFFERENCE,
73
metadata=make_metadata(sourcepackagename, parent_series))
71
metadata=make_metadata(sourcepackagename.id, parent_series.id))
74
72
IMasterStore(DistributionJob).add(job)
75
73
return DistroSeriesDifferenceJob(job)
76
def compose_job_insertion_tuple(derived_series, parent_series,
77
sourcepackagename_id, job_id):
78
"""Compose tuple for insertion into `DistributionJob`.
80
:param derived_series: Derived `DistroSeries`.
81
:param parent_series: Parent `DistroSeries`.
82
:param sourcepackagename_id: ID of `SourcePackageName`.
83
:param job_id: associated `Job` id.
84
:return: A tuple of: derived distribution id, derived distroseries id,
85
job type, job id, JSON data map.
87
json = DistributionJob.serializeMetadata(make_metadata(
88
sourcepackagename_id, parent_series.id))
90
derived_series.distribution.id,
92
DistributionJobType.DISTROSERIESDIFFERENCE,
78
98
def create_multiple_jobs(derived_series, parent_series):
79
"""Create a `DistroSeriesDifferenceJob` for all the source packages in
99
"""Create `DistroSeriesDifferenceJob`s between parent and derived series.
82
101
:param derived_series: A `DistroSeries` that is assumed to be derived
84
103
:param parent_series: A `DistroSeries` that is a parent of
105
:return: A list of newly-created `DistributionJob` ids.
87
107
store = IStore(SourcePackageRelease)
88
108
source_package_releases = store.find(
89
109
SourcePackageRelease,
91
SourcePackagePublishingHistory.sourcepackagerelease ==
92
SourcePackageRelease.id,
93
SourcePackagePublishingHistory.distroseries == derived_series.id,
94
SourcePackagePublishingHistory.status.is_in(
95
active_publishing_status)))
110
SourcePackagePublishingHistory.sourcepackagerelease ==
111
SourcePackageRelease.id,
112
SourcePackagePublishingHistory.distroseries == derived_series.id,
113
SourcePackagePublishingHistory.status.is_in(active_publishing_status))
96
114
nb_jobs = source_package_releases.count()
97
115
sourcepackagenames = source_package_releases.values(
98
116
SourcePackageRelease.sourcepackagenameID)
99
117
job_ids = Job.createMultiple(store, nb_jobs)
101
def composeJobInsertionTuple(derived_series, parent_series,
102
sourcepackagename, job_id):
104
derived_series.distribution.id, derived_series.id,
105
DistributionJobType.DISTROSERIESDIFFERENCE, job_id,
106
DistributionJob.serializeMetadata(
107
{'sourcepackagename': sourcepackagename,
108
'parent_series': parent_series.id}))
109
format_string = "(%s)" % ", ".join(["%s"] * len(data))
110
return format_string % sqlvalues(*data)
113
composeJobInsertionTuple(
114
derived_series, parent_series, sourcepackagename, job_id)
115
for job_id, sourcepackagename in
116
zip(job_ids, sourcepackagenames)]
120
quote(compose_job_insertion_tuple(
121
derived_series, parent_series, sourcepackagename, job_id))
122
for job_id, sourcepackagename in zip(job_ids, sourcepackagenames)]
118
124
store = IStore(DistributionJob)
119
125
result = store.execute("""
161
167
runner some unnecessary work, but we don't expect a bit of
162
168
unnecessary work to be a big problem.
164
if derived_series is None:
166
dsp = getUtility(IDistroSeriesParentSet).getByDerivedSeries(
171
if parent.parent_series.distribution == derived_series.distribution:
172
# Differences within a distribution are not tracked.
170
if parent_series.distribution == derived_series.distribution:
171
# Differences within a distribution are not tracked.
174
173
existing_jobs = find_waiting_jobs(
175
174
derived_series, sourcepackagename, parent_series)
176
175
return len(existing_jobs) == 0
204
203
PackagePublishingPocket.BACKPORTS,
205
204
PackagePublishingPocket.PROPOSED):
208
parent_series = derived_series.getParentSeries()
209
# Create jobs for DSDs between the derived_series and its
211
for parent in parent_series:
213
derived_series, sourcepackagename, parent):
214
jobs.append(create_job(
215
derived_series, sourcepackagename, parent))
207
# Create jobs for DSDs between the derived_series' parents and
208
# the derived_series itself.
209
parent_series_jobs = [
210
create_job(derived_series, sourcepackagename, parent)
211
for parent in derived_series.getParentSeries()
212
if may_require_job(derived_series, sourcepackagename, parent)]
216
214
# Create jobs for DSDs between the derived_series and its
218
for child in derived_series.getDerivedSeries():
220
child, sourcepackagename, derived_series):
221
jobs.append(create_job(
222
child, sourcepackagename, derived_series))
216
derived_series_jobs = [
217
create_job(child, sourcepackagename, derived_series)
218
for child in derived_series.getDerivedSeries()
219
if may_require_job(child, sourcepackagename, derived_series)]
221
return parent_series_jobs + derived_series_jobs
226
224
def massCreateForSeries(cls, derived_series):