~launchpad-pqm/launchpad/devel

« back to all changes in this revision

Viewing changes to lib/lp/soyuz/model/distroseriesdifferencejob.py

  • Committer: Julian Edwards
  • Date: 2011-07-28 20:46:18 UTC
  • mfrom: (13553 devel)
  • mto: This revision was merged to the branch mainline in revision 13555.
  • Revision ID: julian.edwards@canonical.com-20110728204618-tivj2wx2oa9s32bx
merge trunk

Show diffs side-by-side

added added

removed removed

Lines of Context:
8
8
    'DistroSeriesDifferenceJob',
9
9
    ]
10
10
 
11
 
import simplejson
 
11
from storm.expr import And
12
12
from zope.component import getUtility
13
13
from zope.interface import (
14
14
    classProvides,
15
15
    implements,
16
16
    )
17
17
 
18
 
from canonical.database.sqlbase import quote
19
 
from lp.services.database.lpstorm import (
 
18
from canonical.database.sqlbase import sqlvalues
 
19
from canonical.launchpad.interfaces.lpstorm import (
20
20
    IMasterStore,
21
21
    IStore,
22
22
    )
23
23
from lp.registry.interfaces.distroseriesdifference import (
24
24
    IDistroSeriesDifferenceSource,
25
25
    )
 
26
from lp.registry.interfaces.distroseriesparent import IDistroSeriesParentSet
26
27
from lp.registry.interfaces.pocket import PackagePublishingPocket
27
28
from lp.registry.model.distroseries import DistroSeries
28
29
from lp.registry.model.distroseriesdifference import DistroSeriesDifference
47
48
FEATURE_FLAG_ENABLE_MODULE = u"soyuz.derived_series_jobs.enabled"
48
49
 
49
50
 
50
 
def make_metadata(sourcepackagename_id, parent_series_id):
51
 
    """Return JSON metadata for a job on `sourcepackagename_id`."""
 
51
def make_metadata(sourcepackagename, parent_series):
 
52
    """Return JSON metadata for a job on `sourcepackagename`."""
52
53
    return {
53
 
        'sourcepackagename': sourcepackagename_id,
54
 
        'parent_series': parent_series_id,
 
54
        'sourcepackagename': sourcepackagename.id,
 
55
        'parent_series': parent_series.id,
55
56
    }
56
57
 
57
58
 
69
70
    job = DistributionJob(
70
71
        distribution=derived_series.distribution, distroseries=derived_series,
71
72
        job_type=DistributionJobType.DISTROSERIESDIFFERENCE,
72
 
        metadata=make_metadata(sourcepackagename.id, parent_series.id))
 
73
        metadata=make_metadata(sourcepackagename, parent_series))
73
74
    IMasterStore(DistributionJob).add(job)
74
75
    return DistroSeriesDifferenceJob(job)
75
76
 
76
77
 
77
 
def compose_job_insertion_tuple(derived_series, parent_series,
78
 
                                sourcepackagename_id, job_id):
79
 
    """Compose tuple for insertion into `DistributionJob`.
80
 
 
81
 
    :param derived_series: Derived `DistroSeries`.
82
 
    :param parent_series: Parent `DistroSeries`.
83
 
    :param sourcepackagename_id: ID of `SourcePackageName`.
84
 
    :param job_id: associated `Job` id.
85
 
    :return: A tuple of: derived distribution id, derived distroseries id,
86
 
        job type, job id, JSON data map.
87
 
    """
88
 
    json = simplejson.dumps(
89
 
        make_metadata(sourcepackagename_id, parent_series.id))
90
 
    return (
91
 
        derived_series.distribution.id,
92
 
        derived_series.id,
93
 
        DistributionJobType.DISTROSERIESDIFFERENCE,
94
 
        job_id,
95
 
        json,
96
 
        )
97
 
 
98
 
 
99
78
def create_multiple_jobs(derived_series, parent_series):
100
 
    """Create `DistroSeriesDifferenceJob`s between parent and derived series.
 
79
    """Create a `DistroSeriesDifferenceJob` for all the source packages in
 
80
    archive.
101
81
 
102
82
    :param derived_series: A `DistroSeries` that is assumed to be derived
103
83
        from another one.
104
84
    :param parent_series: A `DistroSeries` that is a parent of
105
85
        `derived_series`.
106
 
    :return: A list of newly-created `DistributionJob` ids.
107
86
    """
108
87
    store = IStore(SourcePackageRelease)
109
88
    source_package_releases = store.find(
110
89
        SourcePackageRelease,
111
 
        SourcePackagePublishingHistory.sourcepackagerelease ==
112
 
            SourcePackageRelease.id,
113
 
        SourcePackagePublishingHistory.distroseries == derived_series.id,
114
 
        SourcePackagePublishingHistory.status.is_in(active_publishing_status))
 
90
        And(
 
91
            SourcePackagePublishingHistory.sourcepackagerelease ==
 
92
                SourcePackageRelease.id,
 
93
            SourcePackagePublishingHistory.distroseries == derived_series.id,
 
94
            SourcePackagePublishingHistory.status.is_in(
 
95
                active_publishing_status)))
115
96
    nb_jobs = source_package_releases.count()
116
 
 
117
 
    if nb_jobs == 0:
118
 
        return []
119
 
 
120
97
    sourcepackagenames = source_package_releases.values(
121
98
        SourcePackageRelease.sourcepackagenameID)
122
99
    job_ids = Job.createMultiple(store, nb_jobs)
123
100
 
124
 
    job_tuples = [
125
 
        quote(compose_job_insertion_tuple(
126
 
            derived_series, parent_series, sourcepackagename, job_id))
127
 
        for job_id, sourcepackagename in zip(job_ids, sourcepackagenames)]
 
101
    def composeJobInsertionTuple(derived_series, parent_series,
 
102
                                 sourcepackagename, job_id):
 
103
        data = (
 
104
            derived_series.distribution.id, derived_series.id,
 
105
            DistributionJobType.DISTROSERIESDIFFERENCE, job_id,
 
106
            DistributionJob.serializeMetadata(
 
107
                {'sourcepackagename': sourcepackagename,
 
108
                 'parent_series': parent_series.id}))
 
109
        format_string = "(%s)" % ", ".join(["%s"] * len(data))
 
110
        return format_string % sqlvalues(*data)
 
111
 
 
112
    job_contents = [
 
113
        composeJobInsertionTuple(
 
114
            derived_series, parent_series, sourcepackagename, job_id)
 
115
        for job_id, sourcepackagename in
 
116
            zip(job_ids, sourcepackagenames)]
128
117
 
129
118
    store = IStore(DistributionJob)
130
119
    result = store.execute("""
132
121
            distribution, distroseries, job_type, job, json_data)
133
122
        VALUES %s
134
123
        RETURNING id
135
 
        """ % ", ".join(job_tuples))
 
124
        """ % ", ".join(job_contents))
136
125
    return [job_id for job_id, in result]
137
126
 
138
127
 
142
131
    # the metadata string.  It's fragile, but this is only an
143
132
    # optimization.  It's not actually disastrous to create
144
133
    # redundant jobs occasionally.
145
 
    json_metadata = make_metadata(sourcepackagename.id, parent_series.id)
 
134
    json_metadata = DistributionJob.serializeMetadata(
 
135
        make_metadata(sourcepackagename, parent_series))
146
136
 
147
137
    # Use master store because we don't like outdated information
148
138
    # here.
153
143
        DistributionJob.job_type ==
154
144
            DistributionJobType.DISTROSERIESDIFFERENCE,
155
145
        DistributionJob.distroseries == derived_series,
156
 
        DistributionJob.metadata == json_metadata,
 
146
        DistributionJob._json_data == json_metadata,
157
147
        DistributionJob.job_id.is_in(Job.ready_jobs))
158
148
 
159
149
    return [
171
161
    runner some unnecessary work, but we don't expect a bit of
172
162
    unnecessary work to be a big problem.
173
163
    """
174
 
    if parent_series.distribution == derived_series.distribution:
175
 
        # Differences within a distribution are not tracked.
176
 
        return False
 
164
    if derived_series is None:
 
165
        return False
 
166
    dsp = getUtility(IDistroSeriesParentSet).getByDerivedSeries(
 
167
        derived_series)
 
168
    if dsp.count() == 0:
 
169
        return False
 
170
    for parent in dsp:
 
171
        if parent.parent_series.distribution == derived_series.distribution:
 
172
            # Differences within a distribution are not tracked.
 
173
            return False
177
174
    existing_jobs = find_waiting_jobs(
178
175
        derived_series, sourcepackagename, parent_series)
179
176
    return len(existing_jobs) == 0
199
196
        """See `IDistroSeriesDifferenceJobSource`."""
200
197
        if not getFeatureFlag(FEATURE_FLAG_ENABLE_MODULE):
201
198
            return
202
 
 
203
199
        # -backports and -proposed are not really part of a standard
204
200
        # distribution's packages so we're ignoring them here.  They can
205
201
        # always be manually synced by the users if necessary, in the
206
202
        # rare occasions that they require them.
207
 
        ignored_pockets = [
 
203
        if pocket in (
208
204
            PackagePublishingPocket.BACKPORTS,
209
 
            PackagePublishingPocket.PROPOSED,
210
 
            ]
211
 
        if pocket in ignored_pockets:
 
205
            PackagePublishingPocket.PROPOSED):
212
206
            return
213
 
 
214
 
        # Create jobs for DSDs between the derived_series' parents and
215
 
        # the derived_series itself.
216
 
        parent_series_jobs = [
217
 
            create_job(derived_series, sourcepackagename, parent)
218
 
            for parent in derived_series.getParentSeries()
219
 
                if may_require_job(derived_series, sourcepackagename, parent)]
220
 
 
 
207
        jobs = []
 
208
        parent_series = derived_series.getParentSeries()
 
209
        # Create jobs for DSDs between the derived_series and its
 
210
        # parents.
 
211
        for parent in parent_series:
 
212
            if may_require_job(
 
213
                derived_series, sourcepackagename, parent):
 
214
                jobs.append(create_job(
 
215
                    derived_series, sourcepackagename, parent))
221
216
        # Create jobs for DSDs between the derived_series and its
222
217
        # children.
223
 
        derived_series_jobs = [
224
 
            create_job(child, sourcepackagename, derived_series)
225
 
            for child in derived_series.getDerivedSeries()
226
 
                if may_require_job(child, sourcepackagename, derived_series)]
227
 
 
228
 
        return parent_series_jobs + derived_series_jobs
229
 
 
230
 
    @classmethod
231
 
    def createForSPPHs(cls, spphs):
232
 
        """See `IDistroSeriesDifferenceJobSource`."""
233
 
        # XXX JeroenVermeulen 2011-08-25, bug=834499: This won't do for
234
 
        # some of the mass deletions we're planning to support.
235
 
        # Optimize.
236
 
        for spph in spphs:
237
 
            if spph.archive.is_main:
238
 
                cls.createForPackagePublication(
239
 
                    spph.distroseries,
240
 
                    spph.sourcepackagerelease.sourcepackagename, spph.pocket)
 
218
        for child in derived_series.getDerivedSeries():
 
219
            if may_require_job(
 
220
                child, sourcepackagename, derived_series):
 
221
                jobs.append(create_job(
 
222
                    child, sourcepackagename, derived_series))
 
223
        return jobs
241
224
 
242
225
    @classmethod
243
226
    def massCreateForSeries(cls, derived_series):
272
255
                jobs_by_dsd.setdefault(dsd, []).append(cls(job))
273
256
        return jobs_by_dsd
274
257
 
275
 
    def __repr__(self):
276
 
        """Returns an informative representation of the job."""
277
 
        parts = "%s for " % self.__class__.__name__
278
 
        name = self.sourcepackagename
279
 
        if not name:
280
 
            parts += "no package name (!)"
281
 
        else:
282
 
            parts += "package %s" % name
283
 
        parts += " from %s to %s" % (self.parent_series.name,
284
 
                                     self.derived_series.name)
285
 
        return "<%s>" % parts
286
 
 
287
258
    @property
288
259
    def sourcepackagename(self):
289
260
        return SourcePackageName.get(self.metadata['sourcepackagename'])
344
315
 
345
316
        ds_diff = self.getMatchingDSD()
346
317
        if ds_diff is None:
347
 
            getUtility(IDistroSeriesDifferenceSource).new(
 
318
            ds_diff = getUtility(IDistroSeriesDifferenceSource).new(
348
319
                self.distroseries, self.sourcepackagename, self.parent_series)
349
320
        else:
350
321
            ds_diff.update()