1
# Copyright 2009 Canonical Ltd. This software is licensed under the
2
# GNU Affero General Public License version 3 (see the file LICENSE).
4
# pylint: disable-msg=E0211,E0213
7
"""Common build base classes."""
10
from __future__ import with_statement
15
'handle_status_for_build',
19
from cStringIO import StringIO
26
from storm.store import Store
27
from zope.component import getUtility
28
from zope.security.proxy import removeSecurityProxy
30
from canonical.config import config
31
from canonical.database.sqlbase import (
32
clear_current_connection_cache,
34
flush_database_updates,
36
from canonical.launchpad.helpers import filenameToContentType
37
from canonical.launchpad.interfaces.librarian import ILibraryFileAliasSet
38
from canonical.librarian.utils import copy_and_close
39
from lp.buildmaster.interfaces.buildbase import (
40
BUILDD_MANAGER_LOG_NAME,
43
from lp.buildmaster.model.buildqueue import BuildQueue
44
from lp.registry.interfaces.pocket import pocketsuffix
47
UPLOAD_LOG_FILENAME = 'uploader.log'
50
def handle_status_for_build(build, status, librarian, slave_status,
52
"""Find and call the correct method for handling the build status.
54
This is extracted from build base so that the implementation
55
can be shared by the newer IPackageBuild class.
57
logger = logging.getLogger(BUILDD_MANAGER_LOG_NAME)
59
if build_class is None:
60
build_class = BuildBase
61
method = getattr(build_class, '_handleStatus_' + status, None)
64
logger.critical("Unknown BuildStatus '%s' for builder '%s'"
65
% (status, build.buildqueue_record.builder.url))
68
method(build, librarian, slave_status, logger)
72
"""A mixin class providing functionality for farm jobs that build a
75
Note: this class does not implement IBuildBase as we currently duplicate
76
the properties defined on IBuildBase on the inheriting class tables.
77
BuildBase cannot therefore implement IBuildBase itself, as storm requires
78
that the corresponding __storm_table__ be defined for the class. Instead,
79
the classes using the BuildBase mixin must ensure that they implement
82
policy_name = 'buildd'
85
def getUploadDirLeaf(build_cookie, now=None):
86
"""See `IPackageBuild`."""
87
# UPLOAD_LEAF: <TIMESTAMP>-<BUILD-COOKIE>
89
now = datetime.datetime.now()
90
return '%s-%s' % (now.strftime("%Y%m%d-%H%M%S"), build_cookie)
93
def getUploadDir(upload_leaf):
94
"""Return the directory that things will be stored in."""
95
return os.path.join(config.builddmaster.root, 'incoming', upload_leaf)
98
def getUploaderCommand(build, upload_leaf, uploader_logfilename):
99
"""See `IBuildBase`."""
100
root = os.path.abspath(config.builddmaster.root)
101
uploader_command = list(config.builddmaster.uploader.split())
103
# add extra arguments for processing a binary upload
105
"--log-file", "%s" % uploader_logfilename,
106
"-d", "%s" % build.distribution.name,
107
"-s", "%s" % (build.distro_series.name +
108
pocketsuffix[build.pocket]),
109
"-b", "%s" % build.id,
110
"-J", "%s" % upload_leaf,
111
'--context=%s' % build.policy_name,
115
uploader_command.extend(extra_args)
116
return uploader_command
118
def _getProxiedFileURL(self, library_file):
119
"""Return the 'http_url' of a `ProxiedLibraryFileAlias`."""
120
# Avoiding circular imports.
121
from canonical.launchpad.browser.librarian import (
122
ProxiedLibraryFileAlias)
124
proxied_file = ProxiedLibraryFileAlias(library_file, self)
125
return proxied_file.http_url
128
def build_log_url(self):
129
"""See `IBuildBase`."""
130
if self.buildlog is None:
132
return self._getProxiedFileURL(self.buildlog)
135
def getUploadLogContent(root, leaf):
136
"""Retrieve the upload log contents.
138
:param root: Root directory for the uploads
139
:param leaf: Leaf for this particular upload
140
:return: Contents of log file or message saying no log file was found.
142
# Retrieve log file content.
143
possible_locations = (
144
'failed', 'failed-to-move', 'rejected', 'accepted')
145
for location_dir in possible_locations:
146
log_filepath = os.path.join(root, location_dir, leaf,
148
if os.path.exists(log_filepath):
149
with open(log_filepath, 'r') as uploader_log_file:
150
return uploader_log_file.read()
152
return 'Could not find upload log file'
155
def upload_log_url(self):
156
"""See `IBuildBase`."""
157
if self.upload_log is None:
159
return self._getProxiedFileURL(self.upload_log)
161
def handleStatus(self, status, librarian, slave_status):
162
"""See `IBuildBase`."""
163
return handle_status_for_build(
164
self, status, librarian, slave_status, self.__class__)
167
def _handleStatus_OK(build, librarian, slave_status, logger):
168
"""Handle a package that built successfully.
170
Once built successfully, we pull the files, store them in a
171
directory, store build information and push them through the
174
filemap = slave_status['filemap']
176
logger.info("Processing successful build %s from builder %s" % (
177
build.buildqueue_record.specific_job.build.title,
178
build.buildqueue_record.builder.name))
179
# Explode before collect a binary that is denied in this
180
# distroseries/pocket
181
if not build.archive.allowUpdatesToReleasePocket():
182
assert build.distro_series.canUploadToPocket(build.pocket), (
183
"%s (%s) can not be built for pocket %s: illegal status"
184
% (build.title, build.id, build.pocket.name))
186
# ensure we have the correct build root as:
187
# <BUILDMASTER_ROOT>/incoming/<UPLOAD_LEAF>/<TARGET_PATH>/[FILES]
188
root = os.path.abspath(config.builddmaster.root)
190
# create a single directory to store build result files
191
upload_leaf = build.getUploadDirLeaf(
192
'%s-%s' % (build.id, build.buildqueue_record.id))
193
upload_dir = build.getUploadDir(upload_leaf)
194
logger.debug("Storing build result at '%s'" % upload_dir)
196
# Build the right UPLOAD_PATH so the distribution and archive
197
# can be correctly found during the upload:
198
# <archive_id>/distribution_name
199
# for all destination archive types.
200
archive = build.archive
201
distribution_name = build.distribution.name
202
target_path = '%s/%s' % (archive.id, distribution_name)
203
upload_path = os.path.join(upload_dir, target_path)
204
os.makedirs(upload_path)
206
slave = removeSecurityProxy(build.buildqueue_record.builder.slave)
207
successful_copy_from_slave = True
208
for filename in filemap:
209
logger.info("Grabbing file: %s" % filename)
210
out_file_name = os.path.join(upload_path, filename)
211
# If the evaluated output file name is not within our
212
# upload path, then we don't try to copy this or any
214
if not os.path.realpath(out_file_name).startswith(upload_path):
215
successful_copy_from_slave = False
217
"A slave tried to upload the file '%s' "
218
"for the build %d." % (filename, build.id))
220
out_file = open(out_file_name, "wb")
221
slave_file = slave.getFile(filemap[filename])
222
copy_and_close(slave_file, out_file)
224
# We only attempt the upload if we successfully copied all the
225
# files from the slave.
226
if successful_copy_from_slave:
227
uploader_logfilename = os.path.join(
228
upload_dir, UPLOAD_LOG_FILENAME)
229
uploader_command = build.getUploaderCommand(
230
build, upload_leaf, uploader_logfilename)
231
logger.debug("Saving uploader log at '%s'" % uploader_logfilename)
233
logger.info("Invoking uploader on %s" % root)
234
logger.info("%s" % uploader_command)
236
uploader_process = subprocess.Popen(
237
uploader_command, stdout=subprocess.PIPE,
238
stderr=subprocess.PIPE)
240
# Nothing should be written to the stdout/stderr.
241
upload_stdout, upload_stderr = uploader_process.communicate()
243
# XXX cprov 2007-04-17: we do not check uploader_result_code
244
# anywhere. We need to find out what will be best strategy
245
# when it failed HARD (there is a huge effort in process-upload
246
# to not return error, it only happen when the code is broken).
247
uploader_result_code = uploader_process.returncode
248
logger.info("Uploader returned %d" % uploader_result_code)
250
# Quick and dirty hack to carry on on process-upload failures
251
if os.path.exists(upload_dir):
252
logger.warning("The upload directory did not get moved.")
253
failed_dir = os.path.join(root, "failed-to-move")
254
if not os.path.exists(failed_dir):
256
os.rename(upload_dir, os.path.join(failed_dir, upload_leaf))
258
# The famous 'flush_updates + clear_cache' will make visible
259
# the DB changes done in process-upload, considering that the
260
# transaction was set with ISOLATION_LEVEL_READ_COMMITED
263
cur.execute('SHOW transaction_isolation')
264
isolation_str = cur.fetchone()[0]
265
assert isolation_str == 'read committed', (
266
'BuildMaster/BuilderGroup transaction isolation should be '
267
'ISOLATION_LEVEL_READ_COMMITTED (not "%s")' % isolation_str)
269
original_slave = build.buildqueue_record.builder.slave
271
# XXX Robert Collins, Celso Providelo 2007-05-26 bug=506256:
272
# 'Refreshing' objects procedure is forced on us by using a
273
# different process to do the upload, but as that process runs
274
# in the same unix account, it is simply double handling and we
275
# would be better off to do it within this process.
276
flush_database_updates()
277
clear_current_connection_cache()
279
# XXX cprov 2007-06-15: Re-issuing removeSecurityProxy is forced on
280
# us by sqlobject refreshing the builder object during the
281
# transaction cache clearing. Once we sort the previous problem
282
# this step should probably not be required anymore.
283
build.buildqueue_record.builder.setSlaveForTesting(
284
removeSecurityProxy(original_slave))
286
# Store build information, build record was already updated during
288
build.storeBuildInfo(build, librarian, slave_status)
290
# Retrive the up-to-date build record and perform consistency
291
# checks. The build record should be updated during the binary
292
# upload processing, if it wasn't something is broken and needs
293
# admins attention. Even when we have a FULLYBUILT build record,
294
# if it is not related with at least one binary, there is also
296
# For both situations we will mark the builder as FAILEDTOUPLOAD
297
# and the and update the build details (datebuilt, duration,
298
# buildlog, builder) in LP. A build-failure-notification will be
299
# sent to the lp-build-admin celebrity and to the sourcepackagerelease
300
# uploader about this occurrence. The failure notification will
301
# also contain the information required to manually reprocess the
302
# binary upload when it was the case.
303
if (build.status != BuildStatus.FULLYBUILT or
304
not successful_copy_from_slave or
305
not build.verifySuccessfulUpload()):
306
logger.warning("Build %s upload failed." % build.id)
307
build.status = BuildStatus.FAILEDTOUPLOAD
308
uploader_log_content = build.getUploadLogContent(root,
310
# Store the upload_log_contents in librarian so it can be
311
# accessed by anyone with permission to see the build.
312
build.storeUploadLog(uploader_log_content)
313
# Notify the build failure.
314
build.notify(extra_info=uploader_log_content)
317
"Gathered %s %d completely" % (
318
build.__class__.__name__, build.id))
320
# Release the builder for another job.
321
build.buildqueue_record.builder.cleanSlave()
322
# Remove BuildQueue record.
323
build.buildqueue_record.destroySelf()
326
def _handleStatus_PACKAGEFAIL(build, librarian, slave_status, logger):
327
"""Handle a package that had failed to build.
329
Build has failed when trying the work with the target package,
330
set the job status as FAILEDTOBUILD, store available info and
331
remove Buildqueue entry.
333
build.status = BuildStatus.FAILEDTOBUILD
334
build.storeBuildInfo(build, librarian, slave_status)
335
build.buildqueue_record.builder.cleanSlave()
337
build.buildqueue_record.destroySelf()
340
def _handleStatus_DEPFAIL(build, librarian, slave_status, logger):
341
"""Handle a package that had missing dependencies.
343
Build has failed by missing dependencies, set the job status as
344
MANUALDEPWAIT, store available information, remove BuildQueue
345
entry and release builder slave for another job.
347
build.status = BuildStatus.MANUALDEPWAIT
348
build.storeBuildInfo(build, librarian, slave_status)
349
logger.critical("***** %s is MANUALDEPWAIT *****"
350
% build.buildqueue_record.builder.name)
351
build.buildqueue_record.builder.cleanSlave()
352
build.buildqueue_record.destroySelf()
355
def _handleStatus_CHROOTFAIL(build, librarian, slave_status,
357
"""Handle a package that had failed when unpacking the CHROOT.
359
Build has failed when installing the current CHROOT, mark the
360
job as CHROOTFAIL, store available information, remove BuildQueue
361
and release the builder.
363
build.status = BuildStatus.CHROOTWAIT
364
build.storeBuildInfo(build, librarian, slave_status)
365
logger.critical("***** %s is CHROOTWAIT *****" %
366
build.buildqueue_record.builder.name)
367
build.buildqueue_record.builder.cleanSlave()
369
build.buildqueue_record.destroySelf()
372
def _handleStatus_BUILDERFAIL(build, librarian, slave_status, logger):
373
"""Handle builder failures.
375
Build has been failed when trying to build the target package,
376
The environment is working well, so mark the job as NEEDSBUILD again
377
and 'clean' the builder to do another jobs.
379
logger.warning("***** %s has failed *****"
380
% build.buildqueue_record.builder.name)
381
build.buildqueue_record.builder.failBuilder(
382
"Builder returned BUILDERFAIL when asked for its status")
384
build.storeBuildInfo(build, librarian, slave_status)
385
build.buildqueue_record.reset()
388
def _handleStatus_GIVENBACK(build, librarian, slave_status, logger):
389
"""Handle automatic retry requested by builder.
391
GIVENBACK pseudo-state represents a request for automatic retry
392
later, the build records is delayed by reducing the lastscore to
395
logger.warning("***** %s is GIVENBACK by %s *****"
396
% (build.buildqueue_record.specific_job.build.title,
397
build.buildqueue_record.builder.name))
398
build.storeBuildInfo(build, librarian, slave_status)
399
# XXX cprov 2006-05-30: Currently this information is not
400
# properly presented in the Web UI. We will discuss it in
401
# the next Paris Summit, infinity has some ideas about how
402
# to use this content. For now we just ensure it's stored.
403
build.buildqueue_record.builder.cleanSlave()
404
build.buildqueue_record.reset()
407
def getLogFromSlave(build):
408
"""See `IBuildBase`."""
409
return build.buildqueue_record.builder.transferSlaveFileToLibrarian(
410
'buildlog', build.buildqueue_record.getLogFileName(),
414
def storeBuildInfo(build, librarian, slave_status):
415
"""See `IBuildBase`."""
416
# XXX michaeln 2010-05-05 bug=567922
417
# As this method is temporarily static until BuildBase is
418
# removed and the implementation moved to PackageBuild,
419
# self.attr_name is temporarily build.attr_name, which
420
# means we cannot set the build attributes.
421
naked_build = removeSecurityProxy(build)
422
naked_build.log = build.getLogFromSlave(build)
423
naked_build.builder = build.buildqueue_record.builder
424
# XXX cprov 20060615 bug=120584: Currently buildduration includes
425
# the scanner latency, it should really be asking the slave for
426
# the duration spent building locally.
427
naked_build.date_finished = datetime.datetime.now(pytz.UTC)
428
if slave_status.get('dependencies') is not None:
429
build.dependencies = unicode(slave_status.get('dependencies'))
431
build.dependencies = None
434
def createUploadLog(build, content, filename=None):
435
"""Creates a file on the librarian for the upload log.
437
:return: ILibraryFileAlias for the upload log file.
439
# The given content is stored in the librarian, restricted as
440
# necessary according to the targeted archive's privacy. The content
441
# object's 'upload_log' attribute will point to the
442
# `LibrarianFileAlias`.
444
assert build.upload_log is None, (
445
"Upload log information already exists and cannot be overridden.")
448
filename = 'upload_%s_log.txt' % build.id
449
contentType = filenameToContentType(filename)
450
file_size = len(content)
451
file_content = StringIO(content)
452
restricted = build.is_private
454
return getUtility(ILibraryFileAliasSet).create(
455
filename, file_size, file_content, contentType=contentType,
456
restricted=restricted)
458
def storeUploadLog(self, content):
459
"""See `IBuildBase`."""
460
library_file = self.createUploadLog(self, content)
461
self.upload_log = library_file
464
def queueBuild(build, suspended=False):
465
"""See `IBuildBase`"""
466
specific_job = build.makeJob()
468
# This build queue job is to be created in a suspended state.
470
specific_job.job.suspend()
472
duration_estimate = build.estimateDuration()
473
queue_entry = BuildQueue(
474
estimated_duration=duration_estimate,
475
job_type=build.build_farm_job_type,
476
job=specific_job.job, processor=specific_job.processor,
477
virtualized=specific_job.virtualized)
478
Store.of(build).add(queue_entry)