launchpad-reviewers team mailing list archive
-
launchpad-reviewers team
-
Mailing list archive
-
Message #00868
[Merge] lp:~jelmer/launchpad/506256-remove-popen into lp:launchpad/devel
Jelmer Vernooij has proposed merging lp:~jelmer/launchpad/506256-remove-popen into lp:launchpad/devel.
Requested reviews:
Launchpad code reviewers (launchpad-reviewers): code
buildd-manager currently invokes the uploadprocessor on binaries that it has fetched from the buildd slaves.
As this process is synchronous is blocks the buildd manager from doing other things at the same time - such as scheduling new builds - time during which the buildd slaves are idling.
This branch changes the buildd manager to move build results out of the way into a queue that can independently be processed by process-upload (extensions for process-upload to support this were landed earlier).
Tests:
./bin/test lp.archiveuploader
./bin/test lp.buildmaster
There is still a single test testNoFiles() in the archiveuploader that is itself buggy. I'm looking into this at the moment, but wanted to submit the branch for review earlier because of the upcoming PQM closure. The fix for this test shouldn't involve any changes outside of the test itself.
QA:
This branch has been running on dogfood for the past week or so in its current form and has been working well. We've tested with multiple buildds and thrown several hundred builds at it.
Deployment:
A cron job needs to be set up to run the following command regularly:
LPCURRENT/scripts/process-upload.py -C buildd --builds -v $QUEUE
--
https://code.launchpad.net/~jelmer/launchpad/506256-remove-popen/+merge/34549
Your team Launchpad code reviewers is requested to review the proposed merge of lp:~jelmer/launchpad/506256-remove-popen into lp:launchpad/devel.
=== modified file 'lib/canonical/launchpad/webapp/tales.py'
--- lib/canonical/launchpad/webapp/tales.py 2010-08-27 11:19:54 +0000
+++ lib/canonical/launchpad/webapp/tales.py 2010-09-03 14:49:06 +0000
@@ -995,6 +995,7 @@
BuildStatus.CHROOTWAIT: {'src': "/@@/build-chrootwait"},
BuildStatus.SUPERSEDED: {'src': "/@@/build-superseded"},
BuildStatus.BUILDING: {'src': "/@@/processing"},
+ BuildStatus.UPLOADING: {'src': "/@@/processing"},
BuildStatus.FAILEDTOUPLOAD: {'src': "/@@/build-failedtoupload"},
}
=== modified file 'lib/lp/archiveuploader/tests/test_uploadprocessor.py'
--- lib/lp/archiveuploader/tests/test_uploadprocessor.py 2010-08-27 11:19:54 +0000
+++ lib/lp/archiveuploader/tests/test_uploadprocessor.py 2010-09-03 14:49:06 +0000
@@ -31,6 +31,7 @@
from canonical.launchpad.testing.fakepackager import FakePackager
from canonical.launchpad.webapp.errorlog import ErrorReportingUtility
from canonical.testing import LaunchpadZopelessLayer
+
from lp.app.errors import NotFoundError
from lp.archiveuploader.uploadpolicy import (
AbstractUploadPolicy,
@@ -1861,17 +1862,24 @@
self.uploadprocessor = self.setupBreezyAndGetUploadProcessor()
def testInvalidLeafName(self):
+ # Directories with invalid leaf names should be skipped,
+ # and a warning logged.
upload_dir = self.queueUpload("bar_1.0-1")
self.uploadprocessor.processBuildUpload(upload_dir, "bar_1.0-1")
self.assertLogContains('Unable to extract build id from leaf '
'name bar_1.0-1, skipping.')
def testNoBuildEntry(self):
+ # Directories with that refer to a nonexisting build
+ # should be skipped and a warning logged.
upload_dir = self.queueUpload("bar_1.0-1", queue_entry="42-60")
- self.assertRaises(NotFoundError, self.uploadprocessor.processBuildUpload,
- upload_dir, "42-60")
+ self.uploadprocessor.processBuildUpload(upload_dir, "42-60")
+ self.assertLogContains("Unable to find package build with id 42. Skipping.")
def testNoFiles(self):
+ # If the upload directory is empty, the upload
+ # will fail.
+
# Upload a source package
upload_dir = self.queueUpload("bar_1.0-1")
self.processUpload(self.uploadprocessor, upload_dir)
@@ -1887,16 +1895,19 @@
build.jobStarted()
build.builder = self.factory.makeBuilder()
+ build.status = BuildStatus.UPLOADING
+
# Upload and accept a binary for the primary archive source.
shutil.rmtree(upload_dir)
self.layer.txn.commit()
- leaf_name = "%d-%d" % (build.id, 60)
+ leaf_name = build.getUploadDirLeaf("%d-60" % build.package_build.id)
os.mkdir(os.path.join(self.incoming_folder, leaf_name))
self.options.context = 'buildd'
self.options.builds = True
self.uploadprocessor.processBuildUpload(self.incoming_folder, leaf_name)
self.layer.txn.commit()
- self.assertEquals(BuildStatus.FAILEDTOUPLOAD, build.status)
+ self.assertEquals(
+ BuildStatus.FAILEDTOUPLOAD, build.status)
log_contents = build.upload_log.read()
self.assertTrue('ERROR: Exception while processing upload '
in log_contents)
@@ -1904,6 +1915,8 @@
in log_contents)
def testSuccess(self):
+ # Properly uploaded binaries should result in the
+ # build status changing to FULLYBUILT.
# Upload a source package
upload_dir = self.queueUpload("bar_1.0-1")
self.processUpload(self.uploadprocessor, upload_dir)
@@ -1919,10 +1932,12 @@
build.jobStarted()
build.builder = self.factory.makeBuilder()
+ build.status = BuildStatus.UPLOADING
+
# Upload and accept a binary for the primary archive source.
shutil.rmtree(upload_dir)
self.layer.txn.commit()
- leaf_name = "%d-%d" % (build.id, 60)
+ leaf_name = build.getUploadDirLeaf("%d-60" % build.package_build.id)
upload_dir = self.queueUpload("bar_1.0-1_binary",
queue_entry=leaf_name)
self.options.context = 'buildd'
@@ -1930,7 +1945,8 @@
self.uploadprocessor.processBuildUpload(self.incoming_folder, leaf_name)
self.layer.txn.commit()
self.assertEquals(BuildStatus.FULLYBUILT, build.status)
- log_lines = build.upload_log.read().splitlines()
+ log_contents = build.upload_log.read()
+ log_lines = log_contents.splitlines()
self.assertTrue(
'INFO: Processing upload bar_1.0-1_i386.changes' in log_lines)
self.assertTrue(
@@ -1942,10 +1958,12 @@
"""Tests for parse_build_upload_leaf_name."""
def test_valid(self):
- self.assertEquals((42, 60), parse_build_upload_leaf_name("42-60"))
+ self.assertEquals(
+ (42, 60), parse_build_upload_leaf_name("20100812-300-42-60"))
def test_invalid_chars(self):
- self.assertRaises(ValueError, parse_build_upload_leaf_name, "a42-460")
+ self.assertRaises(
+ ValueError, parse_build_upload_leaf_name, "aaba-a42-460")
def test_no_dash(self):
self.assertRaises(ValueError, parse_build_upload_leaf_name, "32")
=== modified file 'lib/lp/archiveuploader/uploadprocessor.py'
--- lib/lp/archiveuploader/uploadprocessor.py 2010-08-27 11:19:54 +0000
+++ lib/lp/archiveuploader/uploadprocessor.py 2010-09-03 14:49:06 +0000
@@ -81,7 +81,7 @@
IArchiveSet,
NoSuchPPA,
)
-from lp.soyuz.interfaces.binarypackagebuild import IBinaryPackageBuildSet
+from lp.buildmaster.interfaces.packagebuild import IPackageBuildSet
__all__ = [
@@ -106,7 +106,7 @@
:param name: Directory name.
:return: Tuple with build id and build queue record id.
"""
- (build_id_str, queue_record_str) = name.split("-", 1)
+ (build_id_str, queue_record_str) = name.split("-")[-2:]
try:
return int(build_id_str), int(queue_record_str)
except TypeError:
@@ -212,7 +212,17 @@
self.log.warn("Unable to extract build id from leaf name %s,"
" skipping." % upload)
return
- build = getUtility(IBinaryPackageBuildSet).getByBuildID(int(build_id))
+ build = getUtility(IPackageBuildSet).getByID(build_id)
+ if build is None:
+ self.log.warn(
+ "Unable to find package build with id %d. Skipping." %
+ build_id)
+ return
+ if build.status != BuildStatus.UPLOADING:
+ self.log.warn(
+ "Expected build status to be 'UPLOADING', was %s. Skipping.",
+ build.status.name)
+ return
self.log.debug("Build %s found" % build.id)
logger = BufferLogger()
upload_path = os.path.join(fsroot, upload)
=== modified file 'lib/lp/buildmaster/enums.py'
--- lib/lp/buildmaster/enums.py 2010-08-27 15:03:18 +0000
+++ lib/lp/buildmaster/enums.py 2010-09-03 14:49:06 +0000
@@ -97,6 +97,13 @@
will be notified via process-upload about the reason of the rejection.
""")
+ UPLOADING = DBItem(8, """
+ Uploading build
+
+ The build has completed and is waiting to be process by the
+ upload processor.
+ """)
+
class BuildFarmJobType(DBEnumeratedType):
"""Soyuz build farm job type.
@@ -128,6 +135,3 @@
Generate translation templates from a bazaar branch.
""")
-
-
-
=== modified file 'lib/lp/buildmaster/interfaces/packagebuild.py'
--- lib/lp/buildmaster/interfaces/packagebuild.py 2010-08-27 11:19:54 +0000
+++ lib/lp/buildmaster/interfaces/packagebuild.py 2010-09-03 14:49:06 +0000
@@ -91,13 +91,6 @@
title=_("Distribution series"), required=True,
description=_("Shortcut for its distribution series.")))
- def getUploaderCommand(package_build, upload_leaf, uploader_logfilename):
- """Get the command to run as the uploader.
-
- :return: A list of command line arguments, beginning with the
- executable.
- """
-
def getUploadDirLeaf(build_cookie, now=None):
"""Return the directory-leaf where files to be uploaded are stored.
@@ -106,24 +99,13 @@
directory name. If not provided, defaults to now.
"""
- def getUploadDir(upload_leaf):
- """Return the full directory where files to be uploaded are stored.
-
- :param upload_leaf: The leaf directory name where things will be
- stored.
+ def getBuildCookie():
+ """Return the build cookie (build id and build queue record id).
"""
def getLogFromSlave(build):
"""Get last buildlog from slave. """
- def getUploadLogContent(root, leaf):
- """Retrieve the upload log contents.
-
- :param root: Root directory for the uploads
- :param leaf: Leaf for this particular upload
- :return: Contents of log file or message saying no log file was found.
- """
-
def estimateDuration():
"""Estimate the build duration."""
@@ -200,3 +182,9 @@
will be returned.
:return: a `ResultSet` representing the requested package builds.
"""
+ def getByID(build_id):
+ """Get a `IPackageBuild` by build id.
+
+ :param build_id: Build id.
+ :return: The `IPackageBuild`.
+ """
=== modified file 'lib/lp/buildmaster/model/packagebuild.py'
--- lib/lp/buildmaster/model/packagebuild.py 2010-08-30 13:58:15 +0000
+++ lib/lp/buildmaster/model/packagebuild.py 2010-09-03 14:49:06 +0000
@@ -14,7 +14,6 @@
import datetime
import logging
import os.path
-import subprocess
from cStringIO import StringIO
from lazr.delegates import delegates
@@ -36,11 +35,6 @@
from canonical.config import config
from canonical.database.enumcol import DBEnum
-from canonical.database.sqlbase import (
- clear_current_connection_cache,
- cursor,
- flush_database_updates,
- )
from canonical.launchpad.browser.librarian import ProxiedLibraryFileAlias
from canonical.launchpad.helpers import filenameToContentType
from canonical.launchpad.interfaces.librarian import ILibraryFileAliasSet
@@ -73,7 +67,6 @@
SLAVE_LOG_FILENAME = 'buildlog'
-UPLOAD_LOG_FILENAME = 'uploader.log'
class PackageBuild(BuildFarmJobDerived, Storm):
@@ -164,30 +157,9 @@
timestamp = now.strftime("%Y%m%d-%H%M%S")
return '%s-%s' % (timestamp, build_cookie)
- def getUploadDir(self, upload_leaf):
- """See `IPackageBuild`."""
- return os.path.join(config.builddmaster.root, 'incoming', upload_leaf)
-
- @staticmethod
- def getUploaderCommand(package_build, upload_leaf, upload_logfilename):
- """See `IPackageBuild`."""
- root = os.path.abspath(config.builddmaster.root)
- uploader_command = list(config.builddmaster.uploader.split())
-
- # Add extra arguments for processing a package upload.
- extra_args = [
- "--log-file", "%s" % upload_logfilename,
- "-d", "%s" % package_build.distribution.name,
- "-s", "%s" % (
- package_build.distro_series.getSuite(package_build.pocket)),
- "-b", "%s" % package_build.id,
- "-J", "%s" % upload_leaf,
- '--context=%s' % package_build.policy_name,
- "%s" % root,
- ]
-
- uploader_command.extend(extra_args)
- return uploader_command
+ def getBuildCookie(self):
+ """See `IPackageBuild`."""
+ return '%s-%s' % (self.id, self.buildqueue_record.id)
@staticmethod
def getLogFromSlave(package_build):
@@ -198,26 +170,6 @@
package_build.buildqueue_record.getLogFileName(),
package_build.is_private)
- @staticmethod
- def getUploadLogContent(root, leaf):
- """Retrieve the upload log contents.
-
- :param root: Root directory for the uploads
- :param leaf: Leaf for this particular upload
- :return: Contents of log file or message saying no log file was found.
- """
- # Retrieve log file content.
- possible_locations = (
- 'failed', 'failed-to-move', 'rejected', 'accepted')
- for location_dir in possible_locations:
- log_filepath = os.path.join(root, location_dir, leaf,
- UPLOAD_LOG_FILENAME)
- if os.path.exists(log_filepath):
- with open(log_filepath, 'r') as uploader_log_file:
- return uploader_log_file.read()
- else:
- return 'Could not find upload log file'
-
def estimateDuration(self):
"""See `IPackageBuild`."""
raise NotImplementedError
@@ -346,19 +298,16 @@
root = os.path.abspath(config.builddmaster.root)
# Create a single directory to store build result files.
- upload_leaf = self.getUploadDirLeaf(
- '%s-%s' % (self.id, self.buildqueue_record.id))
- upload_dir = self.getUploadDir(upload_leaf)
- logger.debug("Storing build result at '%s'" % upload_dir)
+ upload_leaf = self.getUploadDirLeaf(self.getBuildCookie())
+ grab_dir = os.path.join(root, "grabbing", upload_leaf)
+ logger.debug("Storing build result at '%s'" % grab_dir)
# Build the right UPLOAD_PATH so the distribution and archive
# can be correctly found during the upload:
# <archive_id>/distribution_name
# for all destination archive types.
- archive = self.archive
- distribution_name = self.distribution.name
- target_path = '%s/%s' % (archive.id, distribution_name)
- upload_path = os.path.join(upload_dir, target_path)
+ upload_path = os.path.join(
+ grab_dir, str(self.archive.id), self.distribution.name)
os.makedirs(upload_path)
slave = removeSecurityProxy(self.buildqueue_record.builder.slave)
@@ -382,101 +331,33 @@
# We only attempt the upload if we successfully copied all the
# files from the slave.
if successful_copy_from_slave:
- uploader_logfilename = os.path.join(
- upload_dir, UPLOAD_LOG_FILENAME)
- uploader_command = self.getUploaderCommand(
- self, upload_leaf, uploader_logfilename)
- logger.debug("Saving uploader log at '%s'" % uploader_logfilename)
-
- logger.info("Invoking uploader on %s" % root)
- logger.info("%s" % uploader_command)
-
- uploader_process = subprocess.Popen(
- uploader_command, stdout=subprocess.PIPE,
- stderr=subprocess.PIPE)
-
- # Nothing should be written to the stdout/stderr.
- upload_stdout, upload_stderr = uploader_process.communicate()
-
- # XXX cprov 2007-04-17: we do not check uploader_result_code
- # anywhere. We need to find out what will be best strategy
- # when it failed HARD (there is a huge effort in process-upload
- # to not return error, it only happen when the code is broken).
- uploader_result_code = uploader_process.returncode
- logger.info("Uploader returned %d" % uploader_result_code)
-
- # Quick and dirty hack to carry on on process-upload failures
- if os.path.exists(upload_dir):
- logger.warning("The upload directory did not get moved.")
- failed_dir = os.path.join(root, "failed-to-move")
- if not os.path.exists(failed_dir):
- os.mkdir(failed_dir)
- os.rename(upload_dir, os.path.join(failed_dir, upload_leaf))
-
- # The famous 'flush_updates + clear_cache' will make visible
- # the DB changes done in process-upload, considering that the
- # transaction was set with ISOLATION_LEVEL_READ_COMMITED
- # isolation level.
- cur = cursor()
- cur.execute('SHOW transaction_isolation')
- isolation_str = cur.fetchone()[0]
- assert isolation_str == 'read committed', (
- 'BuildMaster/BuilderGroup transaction isolation should be '
- 'ISOLATION_LEVEL_READ_COMMITTED (not "%s")' % isolation_str)
-
- original_slave = self.buildqueue_record.builder.slave
-
- # XXX Robert Collins, Celso Providelo 2007-05-26 bug=506256:
- # 'Refreshing' objects procedure is forced on us by using a
- # different process to do the upload, but as that process runs
- # in the same unix account, it is simply double handling and we
- # would be better off to do it within this process.
- flush_database_updates()
- clear_current_connection_cache()
-
- # XXX cprov 2007-06-15: Re-issuing removeSecurityProxy is forced on
- # us by sqlobject refreshing the builder object during the
- # transaction cache clearing. Once we sort the previous problem
- # this step should probably not be required anymore.
- self.buildqueue_record.builder.setSlaveForTesting(
- removeSecurityProxy(original_slave))
+ logger.info(
+ "Gathered %s %d completely. Moving %s to uploader queue." % (
+ self.__class__.__name__, self.id, upload_leaf))
+ target_dir = os.path.join(root, "incoming")
+ self.status = BuildStatus.UPLOADING
+ else:
+ logger.warning(
+ "Copy from slave for build %s was unsuccessful.", self.id)
+ self.status = BuildStatus.FAILEDTOUPLOAD
+ self.notify(extra_info='Copy from slave was unsuccessful.')
+ target_dir = os.path.join(root, "failed")
+
+ if not os.path.exists(target_dir):
+ os.mkdir(target_dir)
+
+ # Move the directory used to grab the binaries into
+ # the incoming directory so the upload processor never
+ # sees half-finished uploads.
+ os.rename(grab_dir, os.path.join(target_dir, upload_leaf))
# Store build information, build record was already updated during
# the binary upload.
self.storeBuildInfo(self, librarian, slave_status)
- # Retrive the up-to-date build record and perform consistency
- # checks. The build record should be updated during the binary
- # upload processing, if it wasn't something is broken and needs
- # admins attention. Even when we have a FULLYBUILT build record,
- # if it is not related with at least one binary, there is also
- # a problem.
- # For both situations we will mark the builder as FAILEDTOUPLOAD
- # and the and update the build details (datebuilt, duration,
- # buildlog, builder) in LP. A build-failure-notification will be
- # sent to the lp-build-admin celebrity and to the sourcepackagerelease
- # uploader about this occurrence. The failure notification will
- # also contain the information required to manually reprocess the
- # binary upload when it was the case.
- if (self.status != BuildStatus.FULLYBUILT or
- not successful_copy_from_slave or
- not self.verifySuccessfulUpload()):
- logger.warning("Build %s upload failed." % self.id)
- self.status = BuildStatus.FAILEDTOUPLOAD
- uploader_log_content = self.getUploadLogContent(root,
- upload_leaf)
- # Store the upload_log_contents in librarian so it can be
- # accessed by anyone with permission to see the build.
- self.storeUploadLog(uploader_log_content)
- # Notify the build failure.
- self.notify(extra_info=uploader_log_content)
- else:
- logger.info(
- "Gathered %s %d completely" % (
- self.__class__.__name__, self.id))
-
# Release the builder for another job.
self.buildqueue_record.builder.cleanSlave()
+
# Remove BuildQueue record.
self.buildqueue_record.destroySelf()
@@ -583,6 +464,7 @@
unfinished_states = [
BuildStatus.NEEDSBUILD,
BuildStatus.BUILDING,
+ BuildStatus.UPLOADING,
BuildStatus.SUPERSEDED]
if status is None or status in unfinished_states:
result_set.order_by(
@@ -592,3 +474,8 @@
Desc(BuildFarmJob.date_finished), BuildFarmJob.id)
return result_set
+
+ def getByID(self, build_id):
+ """See `IPackageBuildSet`."""
+ store = getUtility(IStoreSelector).get(MAIN_STORE, DEFAULT_FLAVOR)
+ return store.find(PackageBuild, PackageBuild.id == build_id).one()
=== modified file 'lib/lp/buildmaster/tests/test_packagebuild.py'
--- lib/lp/buildmaster/tests/test_packagebuild.py 2010-08-30 13:58:15 +0000
+++ lib/lp/buildmaster/tests/test_packagebuild.py 2010-09-03 14:49:06 +0000
@@ -20,6 +20,9 @@
LaunchpadFunctionalLayer,
LaunchpadZopelessLayer,
)
+from lp.archiveuploader.uploadprocessor import (
+ parse_build_upload_leaf_name,
+ )
from lp.buildmaster.enums import (
BuildFarmJobType,
BuildStatus,
@@ -32,7 +35,6 @@
from lp.buildmaster.model.packagebuild import PackageBuild
from lp.registry.interfaces.pocket import (
PackagePublishingPocket,
- pocketsuffix,
)
from lp.soyuz.tests.soyuzbuilddhelpers import WaitingSlave
from lp.testing import (
@@ -190,16 +192,6 @@
'%s-%s' % (now.strftime("%Y%m%d-%H%M%S"), build_cookie),
upload_leaf)
- def test_getUploadDir(self):
- # getUploadDir is the absolute path to the directory in which things
- # are uploaded to.
- build_cookie = self.factory.getUniqueInteger()
- upload_leaf = self.package_build.getUploadDirLeaf(build_cookie)
- upload_dir = self.package_build.getUploadDir(upload_leaf)
- self.assertEqual(
- os.path.join(config.builddmaster.root, 'incoming', upload_leaf),
- upload_dir)
-
class TestPackageBuildSet(TestPackageBuildBase):
@@ -241,6 +233,18 @@
self.package_build_set.getBuildsForArchive(
self.archive, pocket=PackagePublishingPocket.UPDATES))
+ def test_getBuildByID(self):
+ # getByID returns the build with the specified id, if it exists.
+ store = Store.of(self.package_builds[0])
+ store.flush()
+ build = self.package_build_set.getByID(self.package_builds[0].id)
+ self.assertEqual(build, self.package_builds[0])
+
+ def test_getBuildByID_nonexistant(self):
+ # getByID returns None if the build does not exist.
+ self.assertIs(None,
+ self.package_build_set.getByID(43243242))
+
class TestGetUploadMethodsMixin:
"""Tests for `IPackageBuild` that need objects from the rest of LP."""
@@ -281,23 +285,23 @@
self.assertEquals(
'foo', self.build.getUploadLogContent(os.getcwd(), "myleaf"))
- def test_getUploaderCommand(self):
- upload_leaf = self.factory.getUniqueString('upload-leaf')
- config_args = list(config.builddmaster.uploader.split())
- log_file = self.factory.getUniqueString('logfile')
- config_args.extend(
- ['--log-file', log_file,
- '-d', self.build.distribution.name,
- '-s', (self.build.distro_series.name
- + pocketsuffix[self.build.pocket]),
- '-b', str(self.build.id),
- '-J', upload_leaf,
- '--context=%s' % self.build.policy_name,
- os.path.abspath(config.builddmaster.root),
- ])
- uploader_command = self.build.getUploaderCommand(
- self.build, upload_leaf, log_file)
- self.assertEqual(config_args, uploader_command)
+ def test_getBuildCookie(self):
+ # A build cookie is made up of the package build id and record id.
+ # The uploadprocessor relies on this format.
+ cookie = self.build.getBuildCookie()
+ expected_cookie = "%d-%d" % (
+ self.build.id, self.build.buildqueue_record.id)
+ self.assertEquals(expected_cookie, cookie)
+
+ def test_getUploadDirLeafCookie_parseable(self):
+ # getUploadDirLeaf should return a directory name
+ # that is parseable by the upload processor.
+ upload_leaf = self.build.getUploadDirLeaf(
+ self.build.getBuildCookie())
+ (build_id, queue_record_id) = parse_build_upload_leaf_name(upload_leaf)
+ self.assertEqual(build_id, self.build.id)
+ self.assertEqual(
+ queue_record_id, self.build.buildqueue_record.id)
class TestHandleStatusMixin:
=== modified file 'lib/lp/code/browser/sourcepackagerecipebuild.py'
--- lib/lp/code/browser/sourcepackagerecipebuild.py 2010-08-27 11:19:54 +0000
+++ lib/lp/code/browser/sourcepackagerecipebuild.py 2010-09-03 14:49:06 +0000
@@ -82,6 +82,7 @@
return 'No suitable builders'
return {
BuildStatus.NEEDSBUILD: 'Pending build',
+ BuildStatus.UPLOADING: 'Build uploading',
BuildStatus.FULLYBUILT: 'Successful build',
BuildStatus.MANUALDEPWAIT: (
'Could not build because of missing dependencies'),
=== modified file 'lib/lp/soyuz/browser/archive.py'
--- lib/lp/soyuz/browser/archive.py 2010-08-31 11:31:04 +0000
+++ lib/lp/soyuz/browser/archive.py 2010-09-03 14:49:06 +0000
@@ -947,6 +947,7 @@
'NEEDSBUILD': 'Waiting to build',
'FAILEDTOBUILD': 'Failed to build:',
'BUILDING': 'Currently building',
+ 'UPLOADING': 'Currently uploading',
}
now = datetime.now(tz=pytz.UTC)
=== modified file 'lib/lp/soyuz/browser/publishing.py'
--- lib/lp/soyuz/browser/publishing.py 2010-08-31 11:11:09 +0000
+++ lib/lp/soyuz/browser/publishing.py 2010-09-03 14:49:06 +0000
@@ -227,6 +227,7 @@
"""Return the image path for the current build status summary."""
image_map = {
BuildSetStatus.BUILDING: '/@@/processing',
+ BuildSetStatus.UPLOADING: '/@@/processing',
BuildSetStatus.NEEDSBUILD: '/@@/build-needed',
BuildSetStatus.FAILEDTOBUILD: '/@@/no',
BuildSetStatus.FULLYBUILT_PENDING: '/@@/build-success-publishing'
=== modified file 'lib/lp/soyuz/doc/buildd-slavescanner.txt'
--- lib/lp/soyuz/doc/buildd-slavescanner.txt 2010-08-30 02:07:38 +0000
+++ lib/lp/soyuz/doc/buildd-slavescanner.txt 2010-09-03 14:49:06 +0000
@@ -385,7 +385,8 @@
DEBUG ...
DEBUG Initialising connection.
...
- DEBUG Removing lock file: /var/lock/process-upload-buildd.lock
+ DEBUG: Moving upload directory /var/tmp/builddmaster/incoming/...
+ ... to /var/tmp/builddmaster/failed/...
...
When a failure in processing the generated binaries occurs, the log
@@ -613,29 +614,13 @@
>>> print headers['content-type']
text/plain
-Check the log from the uploader run has made it into the upload directory:
+Check the failed builds have made it into the failed directory:
>>> failed_dir = os.path.join(config.builddmaster.root, 'failed')
>>> failed_uploads = sorted(os.listdir(failed_dir))
>>> len(failed_uploads)
2
- >>> failed_upload = failed_uploads[0]
- >>> uploader_log = open(os.path.join(failed_dir, failed_upload,
- ... 'uploader.log'))
-
- >>> print uploader_log.read()
- DEBUG ...
- DEBUG Initialising connection.
- DEBUG Beginning processing
- DEBUG Creating directory /var/tmp/builddmaster/accepted
- DEBUG Creating directory /var/tmp/builddmaster/rejected
- DEBUG Creating directory /var/tmp/builddmaster/failed
- ...
- DEBUG Rolling back any remaining transactions.
- DEBUG Removing lock file: /var/lock/process-upload-buildd.lock
- <BLANKLINE>
-
Remove build upload results root
>>> shutil.rmtree(config.builddmaster.root)
=== modified file 'lib/lp/soyuz/model/archive.py'
--- lib/lp/soyuz/model/archive.py 2010-08-31 11:31:04 +0000
+++ lib/lp/soyuz/model/archive.py 2010-09-03 14:49:06 +0000
@@ -1004,10 +1004,9 @@
BuildStatus.FAILEDTOUPLOAD,
BuildStatus.MANUALDEPWAIT,
),
- # The 'pending' count is a list because we may append to it
- # later.
'pending': [
BuildStatus.BUILDING,
+ BuildStatus.UPLOADING,
],
'succeeded': (
BuildStatus.FULLYBUILT,
@@ -1023,6 +1022,7 @@
BuildStatus.FAILEDTOUPLOAD,
BuildStatus.MANUALDEPWAIT,
BuildStatus.BUILDING,
+ BuildStatus.UPLOADING,
BuildStatus.FULLYBUILT,
BuildStatus.SUPERSEDED,
],