launchpad-reviewers team mailing list archive
-
launchpad-reviewers team
-
Mailing list archive
-
Message #01030
[Merge] lp:~jelmer/launchpad/506256-remove-popen-2 into lp:launchpad/devel
Jelmer Vernooij has proposed merging lp:~jelmer/launchpad/506256-remove-popen-2 into lp:launchpad/devel with lp:~jelmer/launchpad/506256-remove-popen as a prerequisite.
Requested reviews:
Launchpad code reviewers (launchpad-reviewers): code
--
https://code.launchpad.net/~jelmer/launchpad/506256-remove-popen-2/+merge/35412
Your team Launchpad code reviewers is requested to review the proposed merge of lp:~jelmer/launchpad/506256-remove-popen-2 into lp:launchpad/devel.
=== modified file 'database/schema/security.cfg'
--- database/schema/security.cfg 2010-09-10 02:46:28 +0000
+++ database/schema/security.cfg 2010-09-14 14:37:38 +0000
@@ -1130,9 +1130,12 @@
public.packagebuild = SELECT, INSERT, UPDATE
public.binarypackagebuild = SELECT, INSERT, UPDATE
public.sourcepackagerecipebuild = SELECT, UPDATE
-public.buildqueue = SELECT, INSERT, UPDATE
-public.job = SELECT, INSERT, UPDATE
-public.buildpackagejob = SELECT, INSERT, UPDATE
+public.sourcepackagerecipebuildjob = SELECT, UPDATE
+public.sourcepackagerecipe = SELECT, UPDATE
+public.buildqueue = SELECT, INSERT, UPDATE, DELETE
+public.job = SELECT, INSERT, UPDATE, DELETE
+public.buildpackagejob = SELECT, INSERT, UPDATE, DELETE
+public.builder = SELECT
# Thusly the librarian
public.libraryfilecontent = SELECT, INSERT
=== removed symlink 'lib/canonical/shipit'
=== target was u'../../sourcecode/shipit'
=== modified file 'lib/lp/archiveuploader/dscfile.py'
--- lib/lp/archiveuploader/dscfile.py 2010-09-09 17:02:33 +0000
+++ lib/lp/archiveuploader/dscfile.py 2010-09-14 14:37:38 +0000
@@ -630,35 +630,6 @@
cleanup_unpacked_dir(unpacked_dir)
self.logger.debug("Done")
- def findBuild(self):
- """Find and return the SourcePackageRecipeBuild, if one is specified.
-
- If by any chance an inconsistent build was found this method will
- raise UploadError resulting in a upload rejection.
- """
- build_id = getattr(self.policy.options, 'buildid', None)
- if build_id is None:
- return None
-
- build = getUtility(ISourcePackageRecipeBuildSource).getById(build_id)
-
- # The master verifies the status to confirm successful upload.
- build.status = BuildStatus.FULLYBUILT
- # If this upload is successful, any existing log is wrong and
- # unuseful.
- build.upload_log = None
-
- # Sanity check; raise an error if the build we've been
- # told to link to makes no sense.
- if (build.pocket != self.policy.pocket or
- build.distroseries != self.policy.distroseries or
- build.archive != self.policy.archive):
- raise UploadError(
- "Attempt to upload source specifying "
- "recipe build %s, where it doesn't fit." % build.id)
-
- return build
-
def storeInDatabase(self, build):
"""Store DSC information as a SourcePackageRelease record.
=== modified file 'lib/lp/archiveuploader/nascentupload.py'
--- lib/lp/archiveuploader/nascentupload.py 2010-08-27 14:27:22 +0000
+++ lib/lp/archiveuploader/nascentupload.py 2010-09-14 14:37:38 +0000
@@ -137,7 +137,7 @@
raise FatalUploadError(str(e))
return cls(changesfile, policy, logger)
- def process(self):
+ def process(self, build=None):
"""Process this upload, checking it against policy, loading it into
the database if it seems okay.
@@ -200,7 +200,7 @@
self.overrideArchive()
# Check upload rights for the signer of the upload.
- self.verify_acl()
+ self.verify_acl(build)
# Perform policy checks.
policy.checkUpload(self)
@@ -483,7 +483,7 @@
#
# Signature and ACL stuff
#
- def verify_acl(self):
+ def verify_acl(self, build=None):
"""Check the signer's upload rights.
The signer must have permission to upload to either the component
@@ -498,10 +498,13 @@
if self.binaryful:
return
- # Set up some convenient shortcut variables.
-
- uploader = self.policy.getUploader(self.changes)
- archive = self.policy.archive
+ # The build can have an explicit uploader, which may be different
+ # from the changes file signer. (i.e in case of daily source package
+ # builds)
+ if build is not None:
+ uploader = build.getUploader(self.changes)
+ else:
+ uploader = self.changes.signer
# If we have no signer, there's no ACL we can apply.
if uploader is None:
@@ -511,7 +514,7 @@
source_name = getUtility(
ISourcePackageNameSet).queryByName(self.changes.dsc.package)
- rejection_reason = archive.checkUpload(
+ rejection_reason = self.policy.archive.checkUpload(
uploader, self.policy.distroseries, source_name,
self.changes.dsc.component, self.policy.pocket, not self.is_new)
@@ -824,7 +827,7 @@
#
# Actually processing accepted or rejected uploads -- and mailing people
#
- def do_accept(self, notify=True):
+ def do_accept(self, notify=True, build=None):
"""Accept the upload into the queue.
This *MAY* in extreme cases cause a database error and thus
@@ -834,13 +837,14 @@
constraint.
:param notify: True to send an email, False to not send one.
+ :param build: The build associated with this upload.
"""
if self.is_rejected:
self.reject("Alas, someone called do_accept when we're rejected")
self.do_reject(notify)
return False
try:
- self.storeObjectsInDatabase()
+ self.storeObjectsInDatabase(build=build)
# Send the email.
# There is also a small corner case here where the DB transaction
@@ -923,7 +927,7 @@
#
# Inserting stuff in the database
#
- def storeObjectsInDatabase(self):
+ def storeObjectsInDatabase(self, build=None):
"""Insert this nascent upload into the database."""
# Queue entries are created in the NEW state by default; at the
@@ -939,7 +943,8 @@
sourcepackagerelease = None
if self.sourceful:
assert self.changes.dsc, "Sourceful upload lacks DSC."
- build = self.changes.dsc.findBuild()
+ if build is not None:
+ self.changes.dsc.checkBuild(build)
sourcepackagerelease = self.changes.dsc.storeInDatabase(build)
package_upload_source = self.queue_root.addSource(
sourcepackagerelease)
@@ -966,6 +971,7 @@
bpfs_to_create = sorted(
self.changes.binary_package_files,
key=lambda file: file.ddeb_file is not None)
+ assert build is None or len(bpfs_to_create) == 1
for binary_package_file in bpfs_to_create:
if self.sourceful:
# The reason we need to do this verification
@@ -980,11 +986,19 @@
sourcepackagerelease = (
binary_package_file.findSourcePackageRelease())
- build = binary_package_file.findBuild(sourcepackagerelease)
+ if build is None:
+ build = binary_package_file.findBuild(
+ sourcepackagerelease)
+ if build.source_package_release != sourcepackagerelease:
+ raise AssertionError(
+ "Attempt to upload binaries specifying build %s, "
+ "where they don't fit." % build.id)
+ binary_package_file.checkBuild(build)
assert self.queue_root.pocket == build.pocket, (
"Binary was not build for the claimed pocket.")
binary_package_file.storeInDatabase(build)
processed_builds.append(build)
+ build = None
# Store the related builds after verifying they were built
# from the same source.
=== modified file 'lib/lp/archiveuploader/nascentuploadfile.py'
--- lib/lp/archiveuploader/nascentuploadfile.py 2010-09-02 16:28:50 +0000
+++ lib/lp/archiveuploader/nascentuploadfile.py 2010-09-14 14:37:38 +0000
@@ -52,7 +52,6 @@
PackageUploadCustomFormat,
PackageUploadStatus,
)
-from lp.soyuz.interfaces.binarypackagebuild import IBinaryPackageBuildSet
from lp.soyuz.interfaces.binarypackagename import IBinaryPackageNameSet
from lp.soyuz.interfaces.component import IComponentSet
from lp.soyuz.interfaces.section import ISectionSet
@@ -338,6 +337,13 @@
"""Return an ISection for self.section_name."""
return getUtility(ISectionSet)[self.section_name]
+ def checkBuild(self, build):
+ """Check the status of the build this file is part of.
+
+ :param build: an `IPackageBuild` instance
+ """
+ raise NotImplementedError(self.checkBuild)
+
def extractUserDefinedFields(self, control):
"""Extract the user defined fields out of a control file list.
"""
@@ -381,6 +387,23 @@
yield UploadError("%s: should be %s according to changes file."
% (filename_version, version_chopped))
+ def checkBuild(self, build):
+ """See PackageUploadFile."""
+ # The master verifies the status to confirm successful upload.
+ build.status = BuildStatus.FULLYBUILT
+ # If this upload is successful, any existing log is wrong and
+ # unuseful.
+ build.upload_log = None
+
+ # Sanity check; raise an error if the build we've been
+ # told to link to makes no sense.
+ if (build.pocket != self.policy.pocket or
+ build.distroseries != self.policy.distroseries or
+ build.archive != self.policy.archive):
+ raise UploadError(
+ "Attempt to upload source specifying "
+ "recipe build %s, where it doesn't fit." % build.id)
+
class BaseBinaryUploadFile(PackageUploadFile):
"""Base methods for binary upload modeling."""
@@ -837,49 +860,45 @@
If by any chance an inconsistent build was found this method will
raise UploadError resulting in a upload rejection.
"""
- build_id = getattr(self.policy.options, 'buildid', None)
dar = self.policy.distroseries[self.archtag]
- if build_id is None:
- # Check if there's a suitable existing build.
- build = sourcepackagerelease.getBuildByArch(
- dar, self.policy.archive)
- if build is not None:
- build.status = BuildStatus.FULLYBUILT
- self.logger.debug("Updating build for %s: %s" % (
- dar.architecturetag, build.id))
- else:
- # No luck. Make one.
- # Usually happen for security binary uploads.
- build = sourcepackagerelease.createBuild(
- dar, self.policy.pocket, self.policy.archive,
- status=BuildStatus.FULLYBUILT)
- self.logger.debug("Build %s created" % build.id)
- else:
- build = getUtility(IBinaryPackageBuildSet).getByBuildID(build_id)
- self.logger.debug("Build %s found" % build.id)
- # Ensure gathered binary is related to a FULLYBUILT build
- # record. It will be check in slave-scanner procedure to
- # certify that the build was processed correctly.
+ # Check if there's a suitable existing build.
+ build = sourcepackagerelease.getBuildByArch(
+ dar, self.policy.archive)
+ if build is not None:
build.status = BuildStatus.FULLYBUILT
- # Also purge any previous failed upload_log stored, so its
- # content can be garbage-collected since it's not useful
- # anymore.
- build.upload_log = None
+ self.logger.debug("Updating build for %s: %s" % (
+ dar.architecturetag, build.id))
+ else:
+ # No luck. Make one.
+ # Usually happen for security binary uploads.
+ build = sourcepackagerelease.createBuild(
+ dar, self.policy.pocket, self.policy.archive,
+ status=BuildStatus.FULLYBUILT)
+ self.logger.debug("Build %s created" % build.id)
+ return build
+
+ def checkBuild(self, build):
+ """See PackageUploadFile."""
+ dar = self.policy.distroseries[self.archtag]
+ # Ensure gathered binary is related to a FULLYBUILT build
+ # record. It will be check in slave-scanner procedure to
+ # certify that the build was processed correctly.
+ build.status = BuildStatus.FULLYBUILT
+ # Also purge any previous failed upload_log stored, so its
+ # content can be garbage-collected since it's not useful
+ # anymore.
+ build.upload_log = None
# Sanity check; raise an error if the build we've been
- # told to link to makes no sense (ie. is not for the right
- # source package).
- if (build.source_package_release != sourcepackagerelease or
- build.pocket != self.policy.pocket or
+ # told to link to makes no sense.
+ if (build.pocket != self.policy.pocket or
build.distro_arch_series != dar or
build.archive != self.policy.archive):
raise UploadError(
"Attempt to upload binaries specifying "
"build %s, where they don't fit." % build.id)
- return build
-
def storeInDatabase(self, build):
"""Insert this binary release and build into the database."""
# Reencode everything we are supplying, because old packages
=== modified file 'lib/lp/archiveuploader/tests/__init__.py'
--- lib/lp/archiveuploader/tests/__init__.py 2010-08-26 20:08:43 +0000
+++ lib/lp/archiveuploader/tests/__init__.py 2010-09-14 14:37:38 +0000
@@ -64,17 +64,15 @@
class MockUploadOptions:
"""Mock upload policy options helper"""
- def __init__(self, distro='ubuntutest', distroseries=None, buildid=None):
+ def __init__(self, distro='ubuntutest', distroseries=None):
self.distro = distro
self.distroseries = distroseries
- self.buildid = buildid
-
-
-def getPolicy(name='anything', distro='ubuntu', distroseries=None,
- buildid=None):
+
+
+def getPolicy(name='anything', distro='ubuntu', distroseries=None):
"""Build and return an Upload Policy for the given context."""
policy = findPolicyByName(name)
- options = MockUploadOptions(distro, distroseries, buildid)
+ options = MockUploadOptions(distro, distroseries)
policy.setOptions(options)
return policy
=== modified file 'lib/lp/archiveuploader/tests/nascentupload.txt'
--- lib/lp/archiveuploader/tests/nascentupload.txt 2010-08-26 15:28:34 +0000
+++ lib/lp/archiveuploader/tests/nascentupload.txt 2010-09-14 14:37:38 +0000
@@ -27,7 +27,7 @@
... datadir, getPolicy, mock_logger, mock_logger_quiet)
>>> buildd_policy = getPolicy(
- ... name='buildd', distro='ubuntu', distroseries='hoary', buildid=1)
+ ... name='buildd', distro='ubuntu', distroseries='hoary')
>>> sync_policy = getPolicy(
... name='sync', distro='ubuntu', distroseries='hoary')
@@ -216,7 +216,7 @@
# Use the buildd policy as it accepts unsigned changes files and binary
# uploads.
>>> modified_buildd_policy = getPolicy(
- ... name='buildd', distro='ubuntu', distroseries='hoary', buildid=1)
+ ... name='buildd', distro='ubuntu', distroseries='hoary')
>>> ed_mismatched_upload = NascentUpload.from_changesfile_path(
... datadir("ed_0.2-20_i386.changes.mismatched-arch-unsigned"),
@@ -640,13 +640,12 @@
the 'buildd' upload policy and the build record id.
>>> buildd_policy = getPolicy(
- ... name='buildd', distro='ubuntu', distroseries='hoary',
- ... buildid=multibar_build.id)
+ ... name='buildd', distro='ubuntu', distroseries='hoary')
>>> multibar_bin_upload = NascentUpload.from_changesfile_path(
... datadir('suite/multibar_1.0-1/multibar_1.0-1_i386.changes'),
... buildd_policy, mock_logger_quiet)
- >>> multibar_bin_upload.process()
+ >>> multibar_bin_upload.process(build=multibar_build)
>>> success = multibar_bin_upload.do_accept()
Now that we have successfully processed the binaries coming from a
=== modified file 'lib/lp/archiveuploader/tests/test_buildduploads.py'
--- lib/lp/archiveuploader/tests/test_buildduploads.py 2010-08-26 15:28:34 +0000
+++ lib/lp/archiveuploader/tests/test_buildduploads.py 2010-09-14 14:37:38 +0000
@@ -112,7 +112,7 @@
# Store source queue item for future use.
self.source_queue = queue_item
- def _uploadBinary(self, archtag):
+ def _uploadBinary(self, archtag, build):
"""Upload the base binary.
Ensure it got processed and has a respective queue record.
@@ -121,7 +121,7 @@
self._prepareUpload(self.binary_dir)
self.uploadprocessor.processChangesFile(
os.path.join(self.queue_folder, "incoming", self.binary_dir),
- self.getBinaryChangesfileFor(archtag))
+ self.getBinaryChangesfileFor(archtag), build=build)
queue_item = self.uploadprocessor.last_processed_upload.queue_root
self.assertTrue(
queue_item is not None,
@@ -205,10 +205,9 @@
pubrec.datepublished = UTC_NOW
queue_item.setDone()
- def _setupUploadProcessorForBuild(self, build_candidate):
+ def _setupUploadProcessorForBuild(self):
"""Setup an UploadProcessor instance for a given buildd context."""
self.options.context = self.policy
- self.options.buildid = str(build_candidate.id)
self.uploadprocessor = self.getUploadProcessor(
self.layer.txn)
@@ -223,8 +222,8 @@
"""
# Upload i386 binary.
build_candidate = self._createBuild('i386')
- self._setupUploadProcessorForBuild(build_candidate)
- build_used = self._uploadBinary('i386')
+ self._setupUploadProcessorForBuild()
+ build_used = self._uploadBinary('i386', build_candidate)
self.assertEqual(build_used.id, build_candidate.id)
self.assertBuildsCreated(1)
@@ -239,8 +238,8 @@
# Upload powerpc binary
build_candidate = self._createBuild('powerpc')
- self._setupUploadProcessorForBuild(build_candidate)
- build_used = self._uploadBinary('powerpc')
+ self._setupUploadProcessorForBuild()
+ build_used = self._uploadBinary('powerpc', build_candidate)
self.assertEqual(build_used.id, build_candidate.id)
self.assertBuildsCreated(2)
=== modified file 'lib/lp/archiveuploader/tests/test_ppauploadprocessor.py'
--- lib/lp/archiveuploader/tests/test_ppauploadprocessor.py 2010-08-31 11:11:09 +0000
+++ lib/lp/archiveuploader/tests/test_ppauploadprocessor.py 2010-09-14 14:37:38 +0000
@@ -355,10 +355,10 @@
builds = self.name16.archive.getBuildRecords(name="bar")
[build] = builds
self.options.context = 'buildd'
- self.options.buildid = build.id
upload_dir = self.queueUpload(
"bar_1.0-1_binary_universe", "~name16/ubuntu")
- self.processUpload(self.uploadprocessor, upload_dir)
+ self.processUpload(
+ self.uploadprocessor, upload_dir, build=build)
# No mails are sent for successful binary uploads.
self.assertEqual(len(stub.test_emails), 0,
@@ -405,9 +405,9 @@
# Binary upload to the just-created build record.
self.options.context = 'buildd'
- self.options.buildid = build.id
upload_dir = self.queueUpload("bar_1.0-1_binary", "~name16/ubuntu")
- self.processUpload(self.uploadprocessor, upload_dir)
+ self.processUpload(
+ self.uploadprocessor, upload_dir, build=build)
# The binary upload was accepted and it's waiting in the queue.
queue_items = self.breezy.getQueueItems(
@@ -459,9 +459,9 @@
# Binary upload to the just-created build record.
self.options.context = 'buildd'
- self.options.buildid = build_bar_i386.id
upload_dir = self.queueUpload("bar_1.0-1_binary", "~cprov/ubuntu")
- self.processUpload(self.uploadprocessor, upload_dir)
+ self.processUpload(
+ self.uploadprocessor, upload_dir, build=build_bar_i386)
# The binary upload was accepted and it's waiting in the queue.
queue_items = self.breezy.getQueueItems(
@@ -760,9 +760,9 @@
builds = self.name16.archive.getBuildRecords(name='bar')
[build] = builds
self.options.context = 'buildd'
- self.options.buildid = build.id
upload_dir = self.queueUpload("bar_1.0-1_binary", "~name16/ubuntu")
- self.processUpload(self.uploadprocessor, upload_dir)
+ self.processUpload(
+ self.uploadprocessor, upload_dir, build=build)
# The binary upload was accepted and it's waiting in the queue.
queue_items = self.breezy.getQueueItems(
@@ -804,10 +804,9 @@
# Binary uploads should exhibit the same behaviour:
[build] = self.name16.archive.getBuildRecords(name="bar")
self.options.context = 'buildd'
- self.options.buildid = build.id
upload_dir = self.queueUpload(
"bar_1.0-1_contrib_binary", "~name16/ubuntu")
- self.processUpload(self.uploadprocessor, upload_dir)
+ self.processUpload(self.uploadprocessor, upload_dir, build=build)
queue_items = self.breezy.getQueueItems(
status=PackageUploadStatus.ACCEPTED, name="bar",
version="1.0-1", exact_match=True, archive=self.name16.archive)
@@ -1306,14 +1305,14 @@
builds = self.name16.archive.getBuildRecords(name='bar')
[build] = builds
self.options.context = 'buildd'
- self.options.buildid = build.id
# Stuff 1024 MiB in name16 PPA, so anything will be above the
# default quota limit, 1024 MiB.
self._fillArchive(self.name16.archive, 1024 * (2 ** 20))
upload_dir = self.queueUpload("bar_1.0-1_binary", "~name16/ubuntu")
- self.processUpload(self.uploadprocessor, upload_dir)
+ self.processUpload(
+ self.uploadprocessor, upload_dir, build=build)
# The binary upload was accepted, and it's waiting in the queue.
queue_items = self.breezy.getQueueItems(
=== modified file 'lib/lp/archiveuploader/tests/test_recipeuploads.py'
--- lib/lp/archiveuploader/tests/test_recipeuploads.py 2010-08-27 11:19:54 +0000
+++ lib/lp/archiveuploader/tests/test_recipeuploads.py 2010-09-14 14:37:38 +0000
@@ -10,6 +10,9 @@
from storm.store import Store
from zope.component import getUtility
+from lp.archiveuploader.uploadprocessor import (
+ UploadStatusEnum,
+ )
from lp.archiveuploader.tests.test_uploadprocessor import (
TestUploadProcessorBase,
)
@@ -17,7 +20,6 @@
from lp.code.interfaces.sourcepackagerecipebuild import (
ISourcePackageRecipeBuildSource,
)
-from lp.soyuz.enums import PackageUploadStatus
class TestSourcePackageRecipeBuildUploads(TestUploadProcessorBase):
@@ -40,8 +42,7 @@
requester=self.recipe.owner)
Store.of(self.build).flush()
- self.options.context = 'recipe'
- self.options.buildid = self.build.id
+ self.options.context = 'buildd'
self.uploadprocessor = self.getUploadProcessor(
self.layer.txn)
@@ -54,19 +55,14 @@
self.assertIs(None, self.build.source_package_release)
self.assertEqual(False, self.build.verifySuccessfulUpload())
self.queueUpload('bar_1.0-1', '%d/ubuntu' % self.build.archive.id)
- self.uploadprocessor.processChangesFile(
+ result = self.uploadprocessor.processChangesFile(
os.path.join(self.queue_folder, "incoming", 'bar_1.0-1'),
- '%d/ubuntu/bar_1.0-1_source.changes' % self.build.archive.id)
+ '%d/ubuntu/bar_1.0-1_source.changes' % self.build.archive.id,
+ build=self.build)
self.layer.txn.commit()
- queue_item = self.uploadprocessor.last_processed_upload.queue_root
- self.assertTrue(
- queue_item is not None,
+ self.assertEquals(UploadStatusEnum.ACCEPTED, result,
"Source upload failed\nGot: %s" % "\n".join(self.log.lines))
- self.assertEqual(PackageUploadStatus.DONE, queue_item.status)
- spr = queue_item.sources[0].sourcepackagerelease
- self.assertEqual(self.build, spr.source_package_recipe_build)
- self.assertEqual(spr, self.build.source_package_release)
self.assertEqual(BuildStatus.FULLYBUILT, self.build.status)
self.assertEqual(True, self.build.verifySuccessfulUpload())
=== modified file 'lib/lp/archiveuploader/tests/test_uploadprocessor.py'
--- lib/lp/archiveuploader/tests/test_uploadprocessor.py 2010-09-14 14:37:27 +0000
+++ lib/lp/archiveuploader/tests/test_uploadprocessor.py 2010-09-14 14:37:38 +0000
@@ -153,7 +153,7 @@
self.options = MockOptions()
self.options.base_fsroot = self.queue_folder
- self.options.builds = True
+ self.options.builds = False
self.options.leafname = None
self.options.distro = "ubuntu"
self.options.distroseries = None
@@ -172,9 +172,13 @@
super(TestUploadProcessorBase, self).tearDown()
def getUploadProcessor(self, txn):
- def getPolicy(distro):
+ def getPolicy(distro, build):
self.options.distro = distro.name
policy = findPolicyByName(self.options.context)
+ if self.options.builds:
+ policy.distroseries = build.distro_series
+ policy.pocket = build.pocket
+ policy.archive = build.archive
policy.setOptions(self.options)
return policy
return UploadProcessor(
@@ -288,7 +292,7 @@
shutil.copytree(upload_dir, target_path)
return os.path.join(self.incoming_folder, queue_entry)
- def processUpload(self, processor, upload_dir):
+ def processUpload(self, processor, upload_dir, build=None):
"""Process an upload queue entry directory.
There is some duplication here with logic in UploadProcessor,
@@ -298,7 +302,8 @@
results = []
changes_files = processor.locateChangesFiles(upload_dir)
for changes_file in changes_files:
- result = processor.processChangesFile(upload_dir, changes_file)
+ result = processor.processChangesFile(
+ upload_dir, changes_file, build=build)
results.append(result)
return results
@@ -693,10 +698,10 @@
# Upload and accept a binary for the primary archive source.
shutil.rmtree(upload_dir)
self.options.context = 'buildd'
- self.options.buildid = bar_original_build.id
self.layer.txn.commit()
upload_dir = self.queueUpload("bar_1.0-1_binary")
- self.processUpload(uploadprocessor, upload_dir)
+ self.processUpload(uploadprocessor, upload_dir,
+ build=bar_original_build)
self.assertEqual(
uploadprocessor.last_processed_upload.is_rejected, False)
bar_bin_pubs = self.publishPackage('bar', '1.0-1', source=False)
@@ -724,10 +729,10 @@
shutil.rmtree(upload_dir)
self.options.context = 'buildd'
- self.options.buildid = bar_copied_build.id
upload_dir = self.queueUpload(
"bar_1.0-1_binary", "%s/ubuntu" % copy_archive.id)
- self.processUpload(uploadprocessor, upload_dir)
+ self.processUpload(uploadprocessor, upload_dir,
+ build=bar_copied_build)
# Make sure the upload succeeded.
self.assertEqual(
@@ -796,9 +801,9 @@
[bar_original_build] = bar_source_pub.createMissingBuilds()
self.options.context = 'buildd'
- self.options.buildid = bar_original_build.id
upload_dir = self.queueUpload("bar_1.0-1_binary")
- self.processUpload(uploadprocessor, upload_dir)
+ self.processUpload(
+ uploadprocessor, upload_dir, build=bar_original_build)
[bar_binary_pub] = self.publishPackage("bar", "1.0-1", source=False)
# Prepare ubuntu/breezy-autotest to build sources in i386.
@@ -818,10 +823,10 @@
# Re-upload the same 'bar-1.0-1' binary as if it was rebuilt
# in breezy-autotest context.
shutil.rmtree(upload_dir)
- self.options.buildid = bar_copied_build.id
self.options.distroseries = breezy_autotest.name
upload_dir = self.queueUpload("bar_1.0-1_binary")
- self.processUpload(uploadprocessor, upload_dir)
+ self.processUpload(uploadprocessor, upload_dir,
+ build=bar_copied_build)
[duplicated_binary_upload] = breezy_autotest.getQueueItems(
status=PackageUploadStatus.NEW, name='bar',
version='1.0-1', exact_match=True)
@@ -859,9 +864,9 @@
[bar_original_build] = bar_source_pub.getBuilds()
self.options.context = 'buildd'
- self.options.buildid = bar_original_build.id
upload_dir = self.queueUpload("bar_1.0-2_binary")
- self.processUpload(uploadprocessor, upload_dir)
+ self.processUpload(uploadprocessor, upload_dir,
+ build=bar_original_build)
[bar_binary_pub] = self.publishPackage("bar", "1.0-2", source=False)
# Create a COPY archive for building in non-virtual builds.
@@ -878,10 +883,10 @@
[bar_copied_build] = bar_copied_source.createMissingBuilds()
shutil.rmtree(upload_dir)
- self.options.buildid = bar_copied_build.id
upload_dir = self.queueUpload(
"bar_1.0-1_binary", "%s/ubuntu" % copy_archive.id)
- self.processUpload(uploadprocessor, upload_dir)
+ self.processUpload(uploadprocessor, upload_dir,
+ build=bar_copied_build)
# The binary just uploaded is accepted because it's destined for a
# copy archive and the PRIMARY and the COPY archives are isolated
@@ -1034,9 +1039,9 @@
self.breezy['i386'], PackagePublishingPocket.RELEASE,
self.ubuntu.main_archive)
self.layer.txn.commit()
- self.options.buildid = foocomm_build.id
upload_dir = self.queueUpload("foocomm_1.0-1_binary")
- self.processUpload(uploadprocessor, upload_dir)
+ self.processUpload(
+ uploadprocessor, upload_dir, build=foocomm_build)
contents = [
"Subject: foocomm_1.0-1_i386.changes rejected",
@@ -1044,10 +1049,8 @@
"where they don't fit."]
self.assertEmail(contents)
- # Reset upload queue directory for a new upload and the
- # uploadprocessor buildid option.
+ # Reset upload queue directory for a new upload.
shutil.rmtree(upload_dir)
- self.options.buildid = None
# Now upload a binary package of 'foocomm', letting a new build record
# with appropriate data be created by the uploadprocessor.
@@ -1881,7 +1884,7 @@
self.assertLogContains(
"Unable to find package build job with id 42. Skipping.")
- def testNoFiles(self):
+ def testBinaryPackageBuild_fail(self):
# If the upload directory is empty, the upload
# will fail.
@@ -1905,6 +1908,8 @@
# Upload and accept a binary for the primary archive source.
shutil.rmtree(upload_dir)
+
+ # Commit so the build cookie has the right ids.
self.layer.txn.commit()
leaf_name = build.getUploadDirLeaf(build.getBuildCookie())
os.mkdir(os.path.join(self.incoming_folder, leaf_name))
@@ -1925,7 +1930,7 @@
self.assertTrue('DEBUG: Moving upload directory '
in log_contents)
- def testSuccess(self):
+ def testBinaryPackageBuilds(self):
# Properly uploaded binaries should result in the
# build status changing to FULLYBUILT.
# Upload a source package
@@ -1946,6 +1951,8 @@
# Upload and accept a binary for the primary archive source.
shutil.rmtree(upload_dir)
+
+ # Commit so the build cookie has the right ids.
self.layer.txn.commit()
leaf_name = build.getUploadDirLeaf(build.getBuildCookie())
upload_dir = self.queueUpload("bar_1.0-1_binary",
@@ -1967,6 +1974,72 @@
'INFO: Committing the transaction and any mails associated with '
'this upload.' in log_lines)
+ def testSourcePackageRecipeBuild(self):
+ # Properly uploaded source packages should result in the
+ # build status changing to FULLYBUILT.
+
+ # Upload a source package
+ build = self.factory.makeSourcePackageRecipeBuild(sourcename=u"bar",
+ distroseries=self.breezy)
+ removeSecurityProxy(build).package_build.archive = self.ubuntu.main_archive
+ bq = self.factory.makeSourcePackageRecipeBuildJob(recipe_build=build)
+ # Commit so the build cookie has the right ids.
+ self.layer.txn.commit()
+ leaf_name = build.getUploadDirLeaf(build.getBuildCookie())
+ upload_dir = self.queueUpload(
+ "bar_1.0-1", queue_entry=leaf_name)
+ self.options.context = 'buildd'
+ self.options.builds = True
+ build.jobStarted()
+ # Commit so date_started is recorded and doesn't cause constraint
+ # violations later.
+ self.layer.txn.commit()
+ build.status = BuildStatus.UPLOADING
+ self.uploadprocessor.processBuildUpload(
+ self.incoming_folder, leaf_name)
+ self.layer.txn.commit()
+
+ import pdb; pdb.set_trace()
+ self.assertEquals(BuildStatus.FULLYBUILT, build.status)
+ self.assertEquals(None, build.builder)
+ self.assertIsNot(None, build.date_finished)
+ self.assertIsNot(None, build.duration)
+ log_contents = build.upload_log.read()
+ log_lines = log_contents.splitlines()
+ self.assertTrue(
+ 'INFO: Processing upload bar_1.0-1_source.changes' in log_lines)
+ self.assertTrue(
+ 'INFO: Committing the transaction and any mails associated with '
+ 'this upload.' in log_lines)
+
+ def testSourcePackageRecipeBuild_fail(self):
+ # A source package recipe build will fail if no files are present.
+
+ # Upload a source package
+ build = self.factory.makeSourcePackageRecipeBuild(sourcename=u"bar",
+ distroseries=self.breezy)
+ removeSecurityProxy(build).package_build.archive = self.ubuntu.main_archive
+ bq = self.factory.makeSourcePackageRecipeBuildJob(recipe_build=build)
+ # Commit so the build cookie has the right ids.
+ self.layer.txn.commit()
+ leaf_name = build.getUploadDirLeaf(build.getBuildCookie())
+ os.mkdir(os.path.join(self.incoming_folder, leaf_name))
+ self.options.context = 'buildd'
+ self.options.builds = True
+ build.jobStarted()
+ # Commit so date_started is recorded and doesn't cause constraint
+ # violations later.
+ self.layer.txn.commit()
+ build.status = BuildStatus.UPLOADING
+ self.uploadprocessor.processBuildUpload(
+ self.incoming_folder, leaf_name)
+ self.layer.txn.commit()
+
+ self.assertEquals(BuildStatus.FAILEDTOUPLOAD, build.status)
+ self.assertEquals(None, build.builder)
+ self.assertIsNot(None, build.date_finished)
+ self.assertIsNot(None, build.duration)
+
class ParseBuildUploadLeafNameTests(TestCase):
"""Tests for parse_build_upload_leaf_name."""
=== modified file 'lib/lp/archiveuploader/tests/uploadpolicy.txt'
--- lib/lp/archiveuploader/tests/uploadpolicy.txt 2010-08-18 14:03:15 +0000
+++ lib/lp/archiveuploader/tests/uploadpolicy.txt 2010-09-14 14:37:38 +0000
@@ -53,23 +53,16 @@
... distro = 'ubuntu'
... distroseries = None
>>> class MockOptions(MockAbstractOptions):
- ... buildid = 1
+ ... builds = True
>>> ab_opts = MockAbstractOptions()
>>> bd_opts = MockOptions()
>>> insecure_policy.setOptions(ab_opts)
- >>> insecure_policy.options is ab_opts
- True
>>> insecure_policy.distro.name
u'ubuntu'
>>> buildd_policy.setOptions(ab_opts)
- Traceback (most recent call last):
- ...
- UploadPolicyError: BuildID required for buildd context
>>> buildd_policy.setOptions(bd_opts)
- >>> buildd_policy.options is bd_opts
- True
>>> buildd_policy.distro.name
u'ubuntu'
=== modified file 'lib/lp/archiveuploader/uploadpolicy.py'
--- lib/lp/archiveuploader/uploadpolicy.py 2010-08-25 13:04:14 +0000
+++ lib/lp/archiveuploader/uploadpolicy.py 2010-09-14 14:37:38 +0000
@@ -11,7 +11,6 @@
"BuildDaemonUploadPolicy",
"findPolicyByName",
"IArchiveUploadPolicy",
- "SOURCE_PACKAGE_RECIPE_UPLOAD_POLICY_NAME",
"UploadPolicyError",
]
@@ -34,8 +33,6 @@
from lazr.enum import EnumeratedType, Item
-# Defined here so that uploadpolicy.py doesn't depend on lp.code.
-SOURCE_PACKAGE_RECIPE_UPLOAD_POLICY_NAME = 'recipe'
# Number of seconds in an hour (used later)
HOURS = 3600
@@ -128,13 +125,8 @@
raise AssertionError(
"Upload is not sourceful, binaryful or mixed.")
- def getUploader(self, changes):
- """Get the person who is doing the uploading."""
- return changes.signer
-
def setOptions(self, options):
"""Store the options for later."""
- self.options = options
# Extract and locate the distribution though...
self.distro = getUtility(IDistributionSet)[options.distro]
if options.distroseries is not None:
@@ -324,7 +316,6 @@
"""The build daemon upload policy is invoked by the slave scanner."""
name = 'buildd'
- accepted_type = ArchiveUploadType.BINARY_ONLY
def __init__(self):
super(BuildDaemonUploadPolicy, self).__init__()
@@ -333,22 +324,28 @@
self.unsigned_dsc_ok = True
def setOptions(self, options):
- AbstractUploadPolicy.setOptions(self, options)
- # We require a buildid to be provided
- if (getattr(options, 'buildid', None) is None and
- not getattr(options, 'builds', False)):
- raise UploadPolicyError("BuildID required for buildd context")
+ """Store the options for later."""
+ super(BuildDaemonUploadPolicy, self).setOptions(options)
+ options.builds = True
def policySpecificChecks(self, upload):
"""The buildd policy should enforce that the buildid matches."""
# XXX: dsilvers 2005-10-14 bug=3135:
# Implement this to check the buildid etc.
- pass
def rejectPPAUploads(self, upload):
"""Buildd policy allows PPA upload."""
return False
+ def validateUploadType(self, upload):
+ if upload.sourceful and upload.binaryful:
+ if self.accepted_type != ArchiveUploadType.MIXED_ONLY:
+ upload.reject(
+ "Source/binary (i.e. mixed) uploads are not allowed.")
+ elif not upload.sourceful and not upload.binaryful:
+ raise AssertionError(
+ "Upload is not sourceful, binaryful or mixed.")
+
class SyncUploadPolicy(AbstractUploadPolicy):
"""This policy is invoked when processing sync uploads."""
=== modified file 'lib/lp/archiveuploader/uploadprocessor.py'
--- lib/lp/archiveuploader/uploadprocessor.py 2010-09-14 14:37:27 +0000
+++ lib/lp/archiveuploader/uploadprocessor.py 2010-09-14 14:37:38 +0000
@@ -38,7 +38,7 @@
Each upload directory is saved after processing, in case it is needed for
debugging purposes. This is done by moving it to a directory inside the queue
directory, beside incoming, named after the result - 'failed', 'rejected' or
-'accepted'. Where there are no changes files, the upload is considered failed,
+
and where there is more than one changes file, the upload is assigned the
worst of the results from the various changes files found (in the order
above, failed being worst).
@@ -71,7 +71,6 @@
)
from lp.archiveuploader.uploadpolicy import (
BuildDaemonUploadPolicy,
- SOURCE_PACKAGE_RECIPE_UPLOAD_POLICY_NAME,
UploadPolicyError,
)
from lp.buildmaster.enums import (
@@ -207,6 +206,7 @@
The name of the leaf is the build id of the build.
Build uploads always contain a single package per leaf.
"""
+ upload_path = os.path.join(fsroot, upload)
try:
job_id = parse_build_upload_leaf_name(upload)
except ValueError:
@@ -220,20 +220,20 @@
"Unable to find package build job with id %d. Skipping." %
job_id)
return
+ logger = BufferLogger()
build = buildfarm_job.getSpecificJob()
if build.status != BuildStatus.UPLOADING:
self.log.warn(
- "Expected build status to be 'UPLOADING', was %s. Skipping.",
- build.status.name)
+ "Expected build status to be 'UPLOADING', was %s. "
+ "Moving to failed.", build.status.name)
+ self.moveProcessedUpload(upload_path, "failed", logger)
return
self.log.debug("Build %s found" % build.id)
- logger = BufferLogger()
- upload_path = os.path.join(fsroot, upload)
try:
[changes_file] = self.locateChangesFiles(upload_path)
logger.debug("Considering changefile %s" % changes_file)
result = self.processChangesFile(
- upload_path, changes_file, logger)
+ upload_path, changes_file, logger, build=build)
except (KeyboardInterrupt, SystemExit):
raise
except:
@@ -251,17 +251,14 @@
UploadStatusEnum.REJECTED: "rejected",
UploadStatusEnum.ACCEPTED: "accepted"}[result]
self.moveProcessedUpload(upload_path, destination, logger)
+ build.date_finished = datetime.datetime.now(pytz.UTC)
if not (result == UploadStatusEnum.ACCEPTED and
build.verifySuccessfulUpload() and
build.status == BuildStatus.FULLYBUILT):
build.status = BuildStatus.FAILEDTOUPLOAD
- build.date_finished = datetime.datetime.now(pytz.UTC)
build.notify(extra_info="Uploading build %s failed." % upload)
build.storeUploadLog(logger.buffer.getvalue())
- # Remove BuildQueue record.
- build.buildqueue_record.destroySelf()
-
def processUpload(self, fsroot, upload):
"""Process an upload's changes files, and move it to a new directory.
@@ -376,7 +373,8 @@
os.path.join(relative_path, filename))
return self.orderFilenames(changes_files)
- def processChangesFile(self, upload_path, changes_file, logger=None):
+ def processChangesFile(self, upload_path, changes_file, logger=None,
+ build=None):
"""Process a single changes file.
This is done by obtaining the appropriate upload policy (according
@@ -432,7 +430,7 @@
"https://help.launchpad.net/Packaging/PPA#Uploading "
"and update your configuration.")))
logger.debug("Finding fresh policy")
- policy = self._getPolicyForDistro(distribution)
+ policy = self._getPolicyForDistro(distribution, build)
policy.archive = archive
# DistroSeries overriding respect the following precedence:
@@ -450,10 +448,8 @@
# Reject source upload to buildd upload paths.
first_path = relative_path.split(os.path.sep)[0]
- is_not_buildd_nor_recipe_policy = policy.name not in [
- SOURCE_PACKAGE_RECIPE_UPLOAD_POLICY_NAME,
- BuildDaemonUploadPolicy.name]
- if first_path.isdigit() and is_not_buildd_nor_recipe_policy:
+ if (first_path.isdigit() and
+ policy.name != BuildDaemonUploadPolicy.name):
error_message = (
"Invalid upload path (%s) for this policy (%s)" %
(relative_path, policy.name))
@@ -472,7 +468,7 @@
result = UploadStatusEnum.ACCEPTED
try:
- upload.process()
+ upload.process(build)
except UploadPolicyError, e:
upload.reject("UploadPolicyError escaped upload.process: "
"%s " % e)
@@ -513,7 +509,8 @@
upload.do_reject(notify)
self.ztm.abort()
else:
- successful = upload.do_accept(notify=notify)
+ successful = upload.do_accept(
+ notify=notify, build=build)
if not successful:
result = UploadStatusEnum.REJECTED
logger.info(
=== modified file 'lib/lp/buildmaster/interfaces/packagebuild.py'
--- lib/lp/buildmaster/interfaces/packagebuild.py 2010-09-14 14:37:27 +0000
+++ lib/lp/buildmaster/interfaces/packagebuild.py 2010-09-14 14:37:38 +0000
@@ -71,10 +71,6 @@
title=_('Build farm job'), schema=IBuildFarmJob, required=True,
readonly=True, description=_('The base build farm job.'))
- policy_name = TextLine(
- title=_("Policy name"), required=True,
- description=_("The upload policy to use for handling these builds."))
-
current_component = Attribute(
'Component where the source related to this build was last '
'published.')
@@ -149,6 +145,14 @@
created in a suspended state.
"""
+ def getUploader(changes):
+ """Return the person responsible for the upload.
+
+ This is used to when checking permissions.
+
+ :param changes: Changes file from the upload.
+ """
+
class IPackageBuildSource(Interface):
"""A utility of this interface used to create _things_."""
=== modified file 'lib/lp/buildmaster/model/packagebuild.py'
--- lib/lp/buildmaster/model/packagebuild.py 2010-09-14 14:37:27 +0000
+++ lib/lp/buildmaster/model/packagebuild.py 2010-09-14 14:37:38 +0000
@@ -94,8 +94,6 @@
build_farm_job_id = Int(name='build_farm_job', allow_none=False)
build_farm_job = Reference(build_farm_job_id, 'BuildFarmJob.id')
- policy_name = 'buildd'
-
# The following two properties are part of the IPackageBuild
# interface, but need to be provided by derived classes.
distribution = None
@@ -239,6 +237,10 @@
"""See `IPackageBuild`."""
raise NotImplementedError
+ def getUploader(self, changes):
+ """See `IPackageBuild`."""
+ return changes.signer
+
class PackageBuildDerived:
"""Setup the delegation for package build.
@@ -360,6 +362,9 @@
# Release the builder for another job.
self.buildqueue_record.builder.cleanSlave()
+ # Remove BuildQueue record.
+ self.buildqueue_record.destroySelf()
+
def _handleStatus_PACKAGEFAIL(self, librarian, slave_status, logger):
"""Handle a package that had failed to build.
=== modified file 'lib/lp/buildmaster/tests/test_packagebuild.py'
--- lib/lp/buildmaster/tests/test_packagebuild.py 2010-09-14 14:37:27 +0000
+++ lib/lp/buildmaster/tests/test_packagebuild.py 2010-09-14 14:37:38 +0000
@@ -105,7 +105,6 @@
def test_default_values(self):
# PackageBuild has a number of default values.
- self.failUnlessEqual('buildd', self.package_build.policy_name)
self.failUnlessEqual(
'multiverse', self.package_build.current_component.name)
self.failUnlessEqual(None, self.package_build.distribution)
@@ -194,6 +193,15 @@
'%s-%s' % (now.strftime("%Y%m%d-%H%M%S"), build_cookie),
upload_leaf)
+ def test_getBuildCookie(self):
+ # A build cookie is made up of the package build id and record id.
+ # The uploadprocessor relies on this format.
+ Store.of(self.package_build).flush()
+ cookie = self.package_build.getBuildCookie()
+ expected_cookie = "%d-PACKAGEBUILD-%d" % (
+ self.package_build.id, self.package_build.build_farm_job.id)
+ self.assertEquals(expected_cookie, cookie)
+
class TestPackageBuildSet(TestPackageBuildBase):
@@ -249,14 +257,6 @@
super(TestGetUploadMethodsMixin, self).setUp()
self.build = self.makeBuild()
- def test_getBuildCookie(self):
- # A build cookie is made up of the package build id and record id.
- # The uploadprocessor relies on this format.
- cookie = self.build.getBuildCookie()
- expected_cookie = "%d-PACKAGEBUILD-%d" % (
- self.build.package_build.id, self.build.build_farm_job.id)
- self.assertEquals(expected_cookie, cookie)
-
def test_getUploadDirLeafCookie_parseable(self):
# getUploadDirLeaf should return a directory name
# that is parseable by the upload processor.
@@ -321,11 +321,14 @@
def test_handleStatus_OK_absolute_filepath(self):
# A filemap that tries to write to files outside of
# the upload directory will result in a failed upload.
+ builder = self.build.builder
self.build.handleStatus('OK', None, {
'filemap': {'/tmp/myfile.py': 'test_file_hash'},
})
self.assertEqual(BuildStatus.FAILEDTOUPLOAD, self.build.status)
self.assertResultCount(0, "failed")
+ self.assertIs(None, self.build.buildqueue_record)
+ self.assertIs(None, builder.currentjob)
def test_handleStatus_OK_relative_filepath(self):
# A filemap that tries to write to files outside of
=== modified file 'lib/lp/code/configure.zcml'
--- lib/lp/code/configure.zcml 2010-09-13 04:56:29 +0000
+++ lib/lp/code/configure.zcml 2010-09-14 14:37:38 +0000
@@ -923,7 +923,7 @@
<require permission="launchpad.View" interface="lp.code.interfaces.sourcepackagerecipebuild.ISourcePackageRecipeBuild"/>
<!-- This is needed for UploadProcessor to run. The permission isn't
important; launchpad.Edit isn't actually held by anybody. -->
- <require permission="launchpad.Edit" set_attributes="status upload_log" />
+ <require permission="launchpad.Edit" set_attributes="status upload_log date_finished" />
</class>
<securedutility
@@ -988,10 +988,6 @@
name="RECIPEBRANCHBUILD"
provides="lp.buildmaster.interfaces.buildfarmjob.IBuildFarmJob"/>
- <call
- callable="lp.code.model.sourcepackagerecipebuild.register_archive_upload_policy_adapter"
- />
-
<webservice:register module="lp.code.interfaces.webservice" />
<adapter
provides="lp.buildmaster.interfaces.buildfarmjob.ISpecificBuildFarmJob"
=== modified file 'lib/lp/code/model/sourcepackagerecipebuild.py'
--- lib/lp/code/model/sourcepackagerecipebuild.py 2010-09-09 17:02:33 +0000
+++ lib/lp/code/model/sourcepackagerecipebuild.py 2010-09-14 14:37:38 +0000
@@ -22,7 +22,6 @@
)
from storm.store import Store
from zope.component import (
- getGlobalSiteManager,
getUtility,
)
from zope.interface import (
@@ -39,12 +38,6 @@
)
from canonical.launchpad.webapp import errorlog
from lp.app.errors import NotFoundError
-from lp.archiveuploader.uploadpolicy import (
- ArchiveUploadType,
- BuildDaemonUploadPolicy,
- IArchiveUploadPolicy,
- SOURCE_PACKAGE_RECIPE_UPLOAD_POLICY_NAME,
- )
from lp.buildmaster.enums import (
BuildFarmJobType,
BuildStatus,
@@ -77,25 +70,10 @@
from lp.soyuz.model.sourcepackagerelease import SourcePackageRelease
-class SourcePackageRecipeUploadPolicy(BuildDaemonUploadPolicy):
- """Policy for uploading the results of a source package recipe build."""
-
- name = SOURCE_PACKAGE_RECIPE_UPLOAD_POLICY_NAME
- accepted_type = ArchiveUploadType.SOURCE_ONLY
-
- def getUploader(self, changes):
- """Return the person doing the upload."""
- build_id = int(getattr(self.options, 'buildid'))
- sprb = getUtility(ISourcePackageRecipeBuildSource).getById(build_id)
- return sprb.requester
-
-
class SourcePackageRecipeBuild(PackageBuildDerived, Storm):
__storm_table__ = 'SourcePackageRecipeBuild'
- policy_name = SourcePackageRecipeUploadPolicy.name
-
implements(ISourcePackageRecipeBuild)
classProvides(ISourcePackageRecipeBuildSource)
@@ -333,6 +311,10 @@
if self.status == BuildStatus.FULLYBUILT:
self.notify()
+ def getUploader(self, changes):
+ """See `IPackageBuild`."""
+ return self.requester
+
class SourcePackageRecipeBuildJob(BuildFarmJobOldDerived, Storm):
classProvides(ISourcePackageRecipeBuildJobSource)
@@ -384,13 +366,6 @@
return 2505 + self.build.archive.relative_build_score
-def register_archive_upload_policy_adapter():
- getGlobalSiteManager().registerUtility(
- component=SourcePackageRecipeUploadPolicy,
- provided=IArchiveUploadPolicy,
- name=SourcePackageRecipeUploadPolicy.name)
-
-
def get_recipe_build_for_build_farm_job(build_farm_job):
"""Return the SourcePackageRecipeBuild associated with a BuildFarmJob."""
store = Store.of(build_farm_job)
=== modified file 'lib/lp/code/model/tests/test_sourcepackagerecipebuild.py'
--- lib/lp/code/model/tests/test_sourcepackagerecipebuild.py 2010-09-14 14:37:27 +0000
+++ lib/lp/code/model/tests/test_sourcepackagerecipebuild.py 2010-09-14 14:37:38 +0000
@@ -309,6 +309,12 @@
job = sprb.build_farm_job.getSpecificJob()
self.assertEqual(sprb, job)
+ def test_getUploader(self):
+ # For ACL purposes the uploader is the build requester.
+ build = self.makeSourcePackageRecipeBuild()
+ self.assertEquals(build.requester,
+ build.getUploader(None))
+
class TestAsBuildmaster(TestCaseWithFactory):
=== modified file 'lib/lp/soyuz/doc/build-failedtoupload-workflow.txt'
--- lib/lp/soyuz/doc/build-failedtoupload-workflow.txt 2010-08-04 00:16:44 +0000
+++ lib/lp/soyuz/doc/build-failedtoupload-workflow.txt 2010-09-14 14:37:38 +0000
@@ -162,8 +162,7 @@
>>> buildd_policy = getPolicy(
... name='buildd',
... distro=failedtoupload_candidate.distribution.name,
- ... distroseries=failedtoupload_candidate.distro_series.name,
- ... buildid=failedtoupload_candidate.id)
+ ... distroseries=failedtoupload_candidate.distro_series.name)
>>> cdrkit_bin_upload = NascentUpload.from_changesfile_path(
... datadir('suite/cdrkit_1.0/cdrkit_1.0_i386.changes'),
@@ -171,7 +170,7 @@
>>> cdrkit_bin_upload.process()
>>> cdrkit_bin_upload.is_rejected
False
- >>> success = cdrkit_bin_upload.do_accept()
+ >>> success = cdrkit_bin_upload.do_accept(build=failedtoupload_candidate)
>>> print cdrkit_bin_upload.queue_root.status.name
NEW
=== modified file 'lib/lp/soyuz/doc/buildd-slavescanner.txt'
--- lib/lp/soyuz/doc/buildd-slavescanner.txt 2010-09-14 14:37:27 +0000
+++ lib/lp/soyuz/doc/buildd-slavescanner.txt 2010-09-14 14:37:38 +0000
@@ -339,8 +339,6 @@
>>> build.status.title
'Uploading build'
- >>> bqItem10.destroySelf()
-
=== Successfully collected and uploaded (FULLYBUILT) ===
Build item 6 has binary packages available in the sample data, letting us test
=== modified file 'lib/lp/soyuz/doc/distroseriesqueue-translations.txt'
--- lib/lp/soyuz/doc/distroseriesqueue-translations.txt 2010-08-24 15:29:01 +0000
+++ lib/lp/soyuz/doc/distroseriesqueue-translations.txt 2010-09-14 14:37:38 +0000
@@ -74,15 +74,14 @@
... dapper_amd64, PackagePublishingPocket.RELEASE, dapper.main_archive)
>>> buildd_policy = getPolicy(
- ... name='buildd', distro='ubuntu', distroseries='dapper',
- ... buildid=build.id)
+ ... name='buildd', distro='ubuntu', distroseries='dapper')
>>> pmount_upload = NascentUpload.from_changesfile_path(
... datadir('pmount_0.9.7-2ubuntu2_amd64.changes'),
... buildd_policy, mock_logger)
DEBUG: Changes file can be unsigned.
- >>> pmount_upload.process()
+ >>> pmount_upload.process(build=build)
DEBUG: Beginning processing.
DEBUG: Verifying the changes file.
DEBUG: Verifying files in upload.
@@ -105,9 +104,8 @@
>>> print len(dapper_pmount.getLatestTranslationsUploads())
0
- >>> success = pmount_upload.do_accept()
+ >>> success = pmount_upload.do_accept(build=build)
DEBUG: Creating queue entry
- DEBUG: Build ... found
...
# And all things worked.
=== modified file 'lib/lp/soyuz/doc/soyuz-set-of-uploads.txt'
--- lib/lp/soyuz/doc/soyuz-set-of-uploads.txt 2010-08-30 02:07:38 +0000
+++ lib/lp/soyuz/doc/soyuz-set-of-uploads.txt 2010-09-14 14:37:38 +0000
@@ -119,21 +119,17 @@
>>> from lp.soyuz.scripts.soyuz_process_upload import (
... ProcessUpload)
>>> from canonical.testing import LaunchpadZopelessLayer
- >>> def process_uploads(upload_policy, build_id, series, loglevel):
+ >>> def process_uploads(upload_policy, series, loglevel):
... """Simulate process-upload.py script run.
...
... :param upload_policy: context in which to consider the upload
... (equivalent to script's --context option).
- ... :param build_id: build to which to attach this upload.
- ... (equivalent to script's --buildid option).
... :param series: distro series to give back from.
... (equivalent to script's --series option).
... :param loglevel: logging level (as defined in logging module). Any
... log messages below this level will be suppressed.
... """
... args = [temp_dir, "-C", upload_policy]
- ... if build_id is not None:
- ... args.extend(["-b", build_id])
... if series is not None:
... args.extend(["-s", series])
... # Run script under 'uploader' DB user. The dbuser argument to the
@@ -230,11 +226,11 @@
>>> from lp.services.mail import stub
>>> def simulate_upload(
- ... leafname, is_new=False, upload_policy='anything', build_id=None,
+ ... leafname, is_new=False, upload_policy='anything',
... series=None, distro="ubuntutest", loglevel=logging.WARN):
... """Process upload(s). Options are as for process_uploads()."""
... punt_upload_into_queue(leafname, distro=distro)
- ... process_uploads(upload_policy, build_id, series, loglevel)
+ ... process_uploads(upload_policy, series, loglevel)
... # We seem to be leaving a lock file behind here for some reason.
... # Naturally it doesn't count as an unprocessed incoming file, which
... # is what we're really looking for.
@@ -289,19 +285,6 @@
>>> simulate_upload('bar_1.0-2')
-Check the rejection of bar_1.0-2_binary when uploaded to the wrong build id.
-
- >>> simulate_upload(
- ... 'bar_1.0-2_binary', upload_policy="buildd", build_id="2",
- ... loglevel=logging.ERROR)
- log> Exception while accepting:
- Attempt to upload binaries specifying build 2, where they don't fit.
- ...
- Rejected uploads: ['bar_1.0-2_binary']
-
-Try it again without the bogus build id. This succeeds without
-complaints.
-
>>> simulate_upload('bar_1.0-2_binary')
Check the rejection of a malicious version of bar package which refers
=== modified file 'lib/lp/soyuz/scripts/soyuz_process_upload.py'
--- lib/lp/soyuz/scripts/soyuz_process_upload.py 2010-08-20 20:31:18 +0000
+++ lib/lp/soyuz/scripts/soyuz_process_upload.py 2010-09-14 14:37:38 +0000
@@ -61,11 +61,6 @@
help="Distro series to give back from.")
self.parser.add_option(
- "-b", "--buildid", action="store", type="int", dest="buildid",
- metavar="BUILD",
- help="The build ID to which to attach this upload.")
-
- self.parser.add_option(
"-a", "--announce", action="store", dest="announcelist",
metavar="ANNOUNCELIST", help="Override the announcement list")
@@ -82,10 +77,15 @@
"%s is not a directory" % self.options.base_fsroot)
self.logger.debug("Initialising connection.")
- def getPolicy(distro):
+ def getPolicy(distro, build):
self.options.distro = distro.name
policy = findPolicyByName(self.options.context)
policy.setOptions(self.options)
+ if self.options.builds:
+ assert build, "--builds specified but no build"
+ policy.distroseries = build.distro_series
+ policy.pocket = build.pocket
+ policy.archive = build.archive
return policy
processor = UploadProcessor(self.options.base_fsroot,
self.options.dryrun, self.options.nomails, self.options.builds,
=== modified file 'lib/lp/soyuz/tests/test_binarypackagebuild.py'
--- lib/lp/soyuz/tests/test_binarypackagebuild.py 2010-09-09 17:02:33 +0000
+++ lib/lp/soyuz/tests/test_binarypackagebuild.py 2010-09-14 14:37:38 +0000
@@ -150,6 +150,15 @@
self.assertStatementCount(
0, self.build.getSpecificJob)
+ def test_getUploader(self):
+ # For ACL purposes the uploader is the changes file signer.
+
+ class MockChanges:
+ signer = "Somebody <somebody@xxxxxxxxxx>"
+
+ self.assertEquals("Somebody <somebody@xxxxxxxxxx>",
+ self.build.getUploader(MockChanges()))
+
class TestBuildUpdateDependencies(TestCaseWithFactory):
Follow ups