← Back to team overview

launchpad-reviewers team mailing list archive

[Merge] lp:~jelmer/launchpad/no-more-buildid into lp:launchpad/devel

 

Jelmer Vernooij has proposed merging lp:~jelmer/launchpad/no-more-buildid into lp:launchpad/devel with lp:~jelmer/launchpad/archiveuploader-build-handling as a prerequisite.

Requested reviews:
  Launchpad code reviewers (launchpad-reviewers): code


This removes the buildid argument from archiveuploader (the approved branch at lp:~jelmer/launchpad/506526-remove-popen removes the last use of it) and instead makes us pass build objects down.

tests: ./bin/test lp.archiveuploader
-- 
https://code.launchpad.net/~jelmer/launchpad/no-more-buildid/+merge/35572
Your team Launchpad code reviewers is requested to review the proposed merge of lp:~jelmer/launchpad/no-more-buildid into lp:launchpad/devel.
=== modified file 'lib/lp/archiveuploader/dscfile.py'
--- lib/lp/archiveuploader/dscfile.py	2010-09-15 19:43:56 +0000
+++ lib/lp/archiveuploader/dscfile.py	2010-09-15 19:43:57 +0000
@@ -630,15 +630,6 @@
             cleanup_unpacked_dir(unpacked_dir)
         self.logger.debug("Done")
 
-    def findBuild(self):
-        """Find and return the SourcePackageRecipeBuild, if one is specified.
-        """
-        build_id = getattr(self.policy.options, 'buildid', None)
-        if build_id is None:
-            return None
-
-        return getUtility(ISourcePackageRecipeBuildSource).getById(build_id)
-
     def storeInDatabase(self, build):
         """Store DSC information as a SourcePackageRelease record.
 

=== modified file 'lib/lp/archiveuploader/nascentupload.py'
--- lib/lp/archiveuploader/nascentupload.py	2010-09-15 19:43:56 +0000
+++ lib/lp/archiveuploader/nascentupload.py	2010-09-15 19:43:57 +0000
@@ -500,7 +500,7 @@
 
         # Set up some convenient shortcut variables.
 
-        uploader = self.policy.getUploader(self.changes)
+        uploader = self.policy.getUploader(self.changes, build)
         archive = self.policy.archive
 
         # If we have no signer, there's no ACL we can apply.
@@ -940,7 +940,6 @@
         sourcepackagerelease = None
         if self.sourceful:
             assert self.changes.dsc, "Sourceful upload lacks DSC."
-            build = self.changes.dsc.findBuild()
             if build is not None:
                 self.changes.dsc.checkBuild(build)
             sourcepackagerelease = self.changes.dsc.storeInDatabase(build)
@@ -983,19 +982,21 @@
                     sourcepackagerelease = (
                         binary_package_file.findSourcePackageRelease())
 
+                # Find the build for this particular binary package file.
                 if build is None:
-                    build = binary_package_file.findBuild(
+                    bpf_build = binary_package_file.findBuild(
                         sourcepackagerelease)
-                if build.source_package_release != sourcepackagerelease:
+                else:
+                    bpf_build = build
+                if bpf_build.source_package_release != sourcepackagerelease:
                     raise AssertionError(
                         "Attempt to upload binaries specifying build %s, "
-                        "where they don't fit." % build.id)
-                binary_package_file.checkBuild(build)
-                assert self.queue_root.pocket == build.pocket, (
+                        "where they don't fit." % bpf_build.id)
+                binary_package_file.checkBuild(bpf_build)
+                assert self.queue_root.pocket == bpf_build.pocket, (
                     "Binary was not build for the claimed pocket.")
-                binary_package_file.storeInDatabase(build)
-                processed_builds.append(build)
-                build = None
+                binary_package_file.storeInDatabase(bpf_build)
+                processed_builds.append(bpf_build)
 
             # Store the related builds after verifying they were built
             # from the same source.

=== modified file 'lib/lp/archiveuploader/nascentuploadfile.py'
--- lib/lp/archiveuploader/nascentuploadfile.py	2010-09-15 19:43:56 +0000
+++ lib/lp/archiveuploader/nascentuploadfile.py	2010-09-15 19:43:57 +0000
@@ -53,7 +53,6 @@
     PackageUploadCustomFormat,
     PackageUploadStatus,
     )
-from lp.soyuz.interfaces.binarypackagebuild import IBinaryPackageBuildSet
 from lp.soyuz.interfaces.binarypackagename import IBinaryPackageNameSet
 from lp.soyuz.interfaces.component import IComponentSet
 from lp.soyuz.interfaces.section import ISectionSet
@@ -860,30 +859,22 @@
         - Create a new build in FULLYBUILT status.
 
         """
-        build_id = getattr(self.policy.options, 'buildid', None)
         dar = self.policy.distroseries[self.archtag]
 
-        if build_id is None:
-            # Check if there's a suitable existing build.
-            build = sourcepackagerelease.getBuildByArch(
-                dar, self.policy.archive)
-            if build is not None:
-                build.status = BuildStatus.FULLYBUILT
-                self.logger.debug("Updating build for %s: %s" % (
-                    dar.architecturetag, build.id))
-            else:
-                # No luck. Make one.
-                # Usually happen for security binary uploads.
-                build = sourcepackagerelease.createBuild(
-                    dar, self.policy.pocket, self.policy.archive,
-                    status=BuildStatus.FULLYBUILT)
-                self.logger.debug("Build %s created" % build.id)
+        # Check if there's a suitable existing build.
+        build = sourcepackagerelease.getBuildByArch(
+            dar, self.policy.archive)
+        if build is not None:
+            build.status = BuildStatus.FULLYBUILT
+            self.logger.debug("Updating build for %s: %s" % (
+                dar.architecturetag, build.id))
         else:
-            build = getUtility(IBinaryPackageBuildSet).getByBuildID(build_id)
-            self.logger.debug("Build %s found" % build.id)
-            # Ensure gathered binary is related to a FULLYBUILT build
-            # record. It will be check in slave-scanner procedure to
-            # certify that the build was processed correctly.
+            # No luck. Make one.
+            # Usually happen for security binary uploads.
+            build = sourcepackagerelease.createBuild(
+                dar, self.policy.pocket, self.policy.archive,
+                status=BuildStatus.FULLYBUILT)
+            self.logger.debug("Build %s created" % build.id)
         return build
 
     def checkBuild(self, build):

=== modified file 'lib/lp/archiveuploader/tests/__init__.py'
--- lib/lp/archiveuploader/tests/__init__.py	2010-08-26 20:08:43 +0000
+++ lib/lp/archiveuploader/tests/__init__.py	2010-09-15 19:43:57 +0000
@@ -64,17 +64,15 @@
 class MockUploadOptions:
     """Mock upload policy options helper"""
 
-    def __init__(self, distro='ubuntutest', distroseries=None, buildid=None):
+    def __init__(self, distro='ubuntutest', distroseries=None):
         self.distro = distro
         self.distroseries = distroseries
-        self.buildid = buildid
-
-
-def getPolicy(name='anything', distro='ubuntu', distroseries=None,
-              buildid=None):
+
+
+def getPolicy(name='anything', distro='ubuntu', distroseries=None):
     """Build and return an Upload Policy for the given context."""
     policy = findPolicyByName(name)
-    options = MockUploadOptions(distro, distroseries, buildid)
+    options = MockUploadOptions(distro, distroseries)
     policy.setOptions(options)
     return policy
 

=== modified file 'lib/lp/archiveuploader/tests/nascentupload.txt'
--- lib/lp/archiveuploader/tests/nascentupload.txt	2010-08-26 15:28:34 +0000
+++ lib/lp/archiveuploader/tests/nascentupload.txt	2010-09-15 19:43:57 +0000
@@ -27,7 +27,7 @@
   ...    datadir, getPolicy, mock_logger, mock_logger_quiet)
 
   >>> buildd_policy = getPolicy(
-  ...     name='buildd', distro='ubuntu', distroseries='hoary', buildid=1)
+  ...     name='buildd', distro='ubuntu', distroseries='hoary')
 
   >>> sync_policy = getPolicy(
   ...     name='sync', distro='ubuntu', distroseries='hoary')
@@ -216,7 +216,7 @@
   # Use the buildd policy as it accepts unsigned changes files and binary
   # uploads.
   >>> modified_buildd_policy = getPolicy(
-  ...     name='buildd', distro='ubuntu', distroseries='hoary', buildid=1)
+  ...     name='buildd', distro='ubuntu', distroseries='hoary')
 
   >>> ed_mismatched_upload = NascentUpload.from_changesfile_path(
   ...     datadir("ed_0.2-20_i386.changes.mismatched-arch-unsigned"),
@@ -640,13 +640,12 @@
 the 'buildd' upload policy and the build record id.
 
   >>> buildd_policy = getPolicy(
-  ...     name='buildd', distro='ubuntu', distroseries='hoary',
-  ...     buildid=multibar_build.id)
+  ...     name='buildd', distro='ubuntu', distroseries='hoary')
 
   >>> multibar_bin_upload = NascentUpload.from_changesfile_path(
   ...     datadir('suite/multibar_1.0-1/multibar_1.0-1_i386.changes'),
   ...     buildd_policy, mock_logger_quiet)
-  >>> multibar_bin_upload.process()
+  >>> multibar_bin_upload.process(build=multibar_build)
   >>> success = multibar_bin_upload.do_accept()
 
 Now that we have successfully processed the binaries coming from a

=== modified file 'lib/lp/archiveuploader/tests/test_buildduploads.py'
--- lib/lp/archiveuploader/tests/test_buildduploads.py	2010-08-26 15:28:34 +0000
+++ lib/lp/archiveuploader/tests/test_buildduploads.py	2010-09-15 19:43:57 +0000
@@ -112,7 +112,7 @@
         # Store source queue item for future use.
         self.source_queue = queue_item
 
-    def _uploadBinary(self, archtag):
+    def _uploadBinary(self, archtag, build):
         """Upload the base binary.
 
         Ensure it got processed and has a respective queue record.
@@ -121,7 +121,7 @@
         self._prepareUpload(self.binary_dir)
         self.uploadprocessor.processChangesFile(
             os.path.join(self.queue_folder, "incoming", self.binary_dir),
-            self.getBinaryChangesfileFor(archtag))
+            self.getBinaryChangesfileFor(archtag), build=build)
         queue_item = self.uploadprocessor.last_processed_upload.queue_root
         self.assertTrue(
             queue_item is not None,
@@ -205,10 +205,9 @@
         pubrec.datepublished = UTC_NOW
         queue_item.setDone()
 
-    def _setupUploadProcessorForBuild(self, build_candidate):
+    def _setupUploadProcessorForBuild(self):
         """Setup an UploadProcessor instance for a given buildd context."""
         self.options.context = self.policy
-        self.options.buildid = str(build_candidate.id)
         self.uploadprocessor = self.getUploadProcessor(
             self.layer.txn)
 
@@ -223,8 +222,8 @@
         """
         # Upload i386 binary.
         build_candidate = self._createBuild('i386')
-        self._setupUploadProcessorForBuild(build_candidate)
-        build_used = self._uploadBinary('i386')
+        self._setupUploadProcessorForBuild()
+        build_used = self._uploadBinary('i386', build_candidate)
 
         self.assertEqual(build_used.id, build_candidate.id)
         self.assertBuildsCreated(1)
@@ -239,8 +238,8 @@
 
         # Upload powerpc binary
         build_candidate = self._createBuild('powerpc')
-        self._setupUploadProcessorForBuild(build_candidate)
-        build_used = self._uploadBinary('powerpc')
+        self._setupUploadProcessorForBuild()
+        build_used = self._uploadBinary('powerpc', build_candidate)
 
         self.assertEqual(build_used.id, build_candidate.id)
         self.assertBuildsCreated(2)

=== modified file 'lib/lp/archiveuploader/tests/test_ppauploadprocessor.py'
--- lib/lp/archiveuploader/tests/test_ppauploadprocessor.py	2010-08-31 11:11:09 +0000
+++ lib/lp/archiveuploader/tests/test_ppauploadprocessor.py	2010-09-15 19:43:57 +0000
@@ -355,10 +355,10 @@
         builds = self.name16.archive.getBuildRecords(name="bar")
         [build] = builds
         self.options.context = 'buildd'
-        self.options.buildid = build.id
         upload_dir = self.queueUpload(
             "bar_1.0-1_binary_universe", "~name16/ubuntu")
-        self.processUpload(self.uploadprocessor, upload_dir)
+        self.processUpload(
+            self.uploadprocessor, upload_dir, build=build)
 
         # No mails are sent for successful binary uploads.
         self.assertEqual(len(stub.test_emails), 0,
@@ -405,9 +405,9 @@
 
         # Binary upload to the just-created build record.
         self.options.context = 'buildd'
-        self.options.buildid = build.id
         upload_dir = self.queueUpload("bar_1.0-1_binary", "~name16/ubuntu")
-        self.processUpload(self.uploadprocessor, upload_dir)
+        self.processUpload(
+            self.uploadprocessor, upload_dir, build=build)
 
         # The binary upload was accepted and it's waiting in the queue.
         queue_items = self.breezy.getQueueItems(
@@ -459,9 +459,9 @@
 
         # Binary upload to the just-created build record.
         self.options.context = 'buildd'
-        self.options.buildid = build_bar_i386.id
         upload_dir = self.queueUpload("bar_1.0-1_binary", "~cprov/ubuntu")
-        self.processUpload(self.uploadprocessor, upload_dir)
+        self.processUpload(
+            self.uploadprocessor, upload_dir, build=build_bar_i386)
 
         # The binary upload was accepted and it's waiting in the queue.
         queue_items = self.breezy.getQueueItems(
@@ -760,9 +760,9 @@
         builds = self.name16.archive.getBuildRecords(name='bar')
         [build] = builds
         self.options.context = 'buildd'
-        self.options.buildid = build.id
         upload_dir = self.queueUpload("bar_1.0-1_binary", "~name16/ubuntu")
-        self.processUpload(self.uploadprocessor, upload_dir)
+        self.processUpload(
+            self.uploadprocessor, upload_dir, build=build)
 
         # The binary upload was accepted and it's waiting in the queue.
         queue_items = self.breezy.getQueueItems(
@@ -804,10 +804,9 @@
         # Binary uploads should exhibit the same behaviour:
         [build] = self.name16.archive.getBuildRecords(name="bar")
         self.options.context = 'buildd'
-        self.options.buildid = build.id
         upload_dir = self.queueUpload(
             "bar_1.0-1_contrib_binary", "~name16/ubuntu")
-        self.processUpload(self.uploadprocessor, upload_dir)
+        self.processUpload(self.uploadprocessor, upload_dir, build=build)
         queue_items = self.breezy.getQueueItems(
             status=PackageUploadStatus.ACCEPTED, name="bar",
             version="1.0-1", exact_match=True, archive=self.name16.archive)
@@ -1306,14 +1305,14 @@
         builds = self.name16.archive.getBuildRecords(name='bar')
         [build] = builds
         self.options.context = 'buildd'
-        self.options.buildid = build.id
 
         # Stuff 1024 MiB in name16 PPA, so anything will be above the
         # default quota limit, 1024 MiB.
         self._fillArchive(self.name16.archive, 1024 * (2 ** 20))
 
         upload_dir = self.queueUpload("bar_1.0-1_binary", "~name16/ubuntu")
-        self.processUpload(self.uploadprocessor, upload_dir)
+        self.processUpload(
+            self.uploadprocessor, upload_dir, build=build)
 
         # The binary upload was accepted, and it's waiting in the queue.
         queue_items = self.breezy.getQueueItems(

=== modified file 'lib/lp/archiveuploader/tests/test_recipeuploads.py'
--- lib/lp/archiveuploader/tests/test_recipeuploads.py	2010-08-27 11:19:54 +0000
+++ lib/lp/archiveuploader/tests/test_recipeuploads.py	2010-09-15 19:43:57 +0000
@@ -10,6 +10,9 @@
 from storm.store import Store
 from zope.component import getUtility
 
+from lp.archiveuploader.uploadprocessor import (
+    UploadStatusEnum,
+    )
 from lp.archiveuploader.tests.test_uploadprocessor import (
     TestUploadProcessorBase,
     )
@@ -17,7 +20,6 @@
 from lp.code.interfaces.sourcepackagerecipebuild import (
     ISourcePackageRecipeBuildSource,
     )
-from lp.soyuz.enums import PackageUploadStatus
 
 
 class TestSourcePackageRecipeBuildUploads(TestUploadProcessorBase):
@@ -41,7 +43,6 @@
 
         Store.of(self.build).flush()
         self.options.context = 'recipe'
-        self.options.buildid = self.build.id
 
         self.uploadprocessor = self.getUploadProcessor(
             self.layer.txn)
@@ -54,19 +55,14 @@
         self.assertIs(None, self.build.source_package_release)
         self.assertEqual(False, self.build.verifySuccessfulUpload())
         self.queueUpload('bar_1.0-1', '%d/ubuntu' % self.build.archive.id)
-        self.uploadprocessor.processChangesFile(
+        result = self.uploadprocessor.processChangesFile(
             os.path.join(self.queue_folder, "incoming", 'bar_1.0-1'),
-            '%d/ubuntu/bar_1.0-1_source.changes' % self.build.archive.id)
+            '%d/ubuntu/bar_1.0-1_source.changes' % self.build.archive.id,
+            build=self.build)
         self.layer.txn.commit()
 
-        queue_item = self.uploadprocessor.last_processed_upload.queue_root
-        self.assertTrue(
-            queue_item is not None,
+        self.assertEquals(UploadStatusEnum.ACCEPTED, result,
             "Source upload failed\nGot: %s" % "\n".join(self.log.lines))
 
-        self.assertEqual(PackageUploadStatus.DONE, queue_item.status)
-        spr = queue_item.sources[0].sourcepackagerelease
-        self.assertEqual(self.build, spr.source_package_recipe_build)
-        self.assertEqual(spr, self.build.source_package_release)
         self.assertEqual(BuildStatus.FULLYBUILT, self.build.status)
         self.assertEqual(True, self.build.verifySuccessfulUpload())

=== modified file 'lib/lp/archiveuploader/tests/test_uploadprocessor.py'
--- lib/lp/archiveuploader/tests/test_uploadprocessor.py	2010-09-02 16:28:50 +0000
+++ lib/lp/archiveuploader/tests/test_uploadprocessor.py	2010-09-15 19:43:57 +0000
@@ -149,7 +149,7 @@
 
         self.options = MockOptions()
         self.options.base_fsroot = self.queue_folder
-        self.options.builds = True
+        self.options.builds = False
         self.options.leafname = None
         self.options.distro = "ubuntu"
         self.options.distroseries = None
@@ -168,9 +168,13 @@
         super(TestUploadProcessorBase, self).tearDown()
 
     def getUploadProcessor(self, txn):
-        def getPolicy(distro):
+        def getPolicy(distro, build):
             self.options.distro = distro.name
             policy = findPolicyByName(self.options.context)
+            if self.options.builds:
+                policy.distroseries = build.distro_series
+                policy.pocket = build.pocket
+                policy.archive = build.archive
             policy.setOptions(self.options)
             return policy
         return UploadProcessor(
@@ -284,7 +288,7 @@
         shutil.copytree(upload_dir, target_path)
         return os.path.join(self.incoming_folder, queue_entry)
 
-    def processUpload(self, processor, upload_dir):
+    def processUpload(self, processor, upload_dir, build=None):
         """Process an upload queue entry directory.
 
         There is some duplication here with logic in UploadProcessor,
@@ -294,7 +298,8 @@
         results = []
         changes_files = processor.locateChangesFiles(upload_dir)
         for changes_file in changes_files:
-            result = processor.processChangesFile(upload_dir, changes_file)
+            result = processor.processChangesFile(
+                upload_dir, changes_file, build=build)
             results.append(result)
         return results
 
@@ -689,10 +694,10 @@
         # Upload and accept a binary for the primary archive source.
         shutil.rmtree(upload_dir)
         self.options.context = 'buildd'
-        self.options.buildid = bar_original_build.id
         self.layer.txn.commit()
         upload_dir = self.queueUpload("bar_1.0-1_binary")
-        self.processUpload(uploadprocessor, upload_dir)
+        self.processUpload(uploadprocessor, upload_dir,
+            build=bar_original_build)
         self.assertEqual(
             uploadprocessor.last_processed_upload.is_rejected, False)
         bar_bin_pubs = self.publishPackage('bar', '1.0-1', source=False)
@@ -720,10 +725,10 @@
 
         shutil.rmtree(upload_dir)
         self.options.context = 'buildd'
-        self.options.buildid = bar_copied_build.id
         upload_dir = self.queueUpload(
             "bar_1.0-1_binary", "%s/ubuntu" % copy_archive.id)
-        self.processUpload(uploadprocessor, upload_dir)
+        self.processUpload(uploadprocessor, upload_dir,
+             build=bar_copied_build)
 
         # Make sure the upload succeeded.
         self.assertEqual(
@@ -792,9 +797,9 @@
         [bar_original_build] = bar_source_pub.createMissingBuilds()
 
         self.options.context = 'buildd'
-        self.options.buildid = bar_original_build.id
         upload_dir = self.queueUpload("bar_1.0-1_binary")
-        self.processUpload(uploadprocessor, upload_dir)
+        self.processUpload(
+            uploadprocessor, upload_dir, build=bar_original_build)
         [bar_binary_pub] = self.publishPackage("bar", "1.0-1", source=False)
 
         # Prepare ubuntu/breezy-autotest to build sources in i386.
@@ -814,10 +819,10 @@
         # Re-upload the same 'bar-1.0-1' binary as if it was rebuilt
         # in breezy-autotest context.
         shutil.rmtree(upload_dir)
-        self.options.buildid = bar_copied_build.id
         self.options.distroseries = breezy_autotest.name
         upload_dir = self.queueUpload("bar_1.0-1_binary")
-        self.processUpload(uploadprocessor, upload_dir)
+        self.processUpload(uploadprocessor, upload_dir,
+            build=bar_copied_build)
         [duplicated_binary_upload] = breezy_autotest.getQueueItems(
             status=PackageUploadStatus.NEW, name='bar',
             version='1.0-1', exact_match=True)
@@ -855,9 +860,9 @@
         [bar_original_build] = bar_source_pub.getBuilds()
 
         self.options.context = 'buildd'
-        self.options.buildid = bar_original_build.id
         upload_dir = self.queueUpload("bar_1.0-2_binary")
-        self.processUpload(uploadprocessor, upload_dir)
+        self.processUpload(uploadprocessor, upload_dir,
+            build=bar_original_build)
         [bar_binary_pub] = self.publishPackage("bar", "1.0-2", source=False)
 
         # Create a COPY archive for building in non-virtual builds.
@@ -874,10 +879,10 @@
         [bar_copied_build] = bar_copied_source.createMissingBuilds()
 
         shutil.rmtree(upload_dir)
-        self.options.buildid = bar_copied_build.id
         upload_dir = self.queueUpload(
             "bar_1.0-1_binary", "%s/ubuntu" % copy_archive.id)
-        self.processUpload(uploadprocessor, upload_dir)
+        self.processUpload(uploadprocessor, upload_dir,
+            build=bar_copied_build)
 
         # The binary just uploaded is accepted because it's destined for a
         # copy archive and the PRIMARY and the COPY archives are isolated
@@ -1030,9 +1035,9 @@
             self.breezy['i386'], PackagePublishingPocket.RELEASE,
             self.ubuntu.main_archive)
         self.layer.txn.commit()
-        self.options.buildid = foocomm_build.id
         upload_dir = self.queueUpload("foocomm_1.0-1_binary")
-        self.processUpload(uploadprocessor, upload_dir)
+        self.processUpload(
+            uploadprocessor, upload_dir, build=foocomm_build)
 
         contents = [
             "Subject: foocomm_1.0-1_i386.changes rejected",
@@ -1040,10 +1045,8 @@
             "where they don't fit."]
         self.assertEmail(contents)
 
-        # Reset upload queue directory for a new upload and the
-        # uploadprocessor buildid option.
+        # Reset upload queue directory for a new upload.
         shutil.rmtree(upload_dir)
-        self.options.buildid = None
 
         # Now upload a binary package of 'foocomm', letting a new build record
         # with appropriate data be created by the uploadprocessor.

=== modified file 'lib/lp/archiveuploader/tests/uploadpolicy.txt'
--- lib/lp/archiveuploader/tests/uploadpolicy.txt	2010-08-18 14:03:15 +0000
+++ lib/lp/archiveuploader/tests/uploadpolicy.txt	2010-09-15 19:43:57 +0000
@@ -53,23 +53,16 @@
   ...     distro = 'ubuntu'
   ...     distroseries = None
   >>> class MockOptions(MockAbstractOptions):
-  ...     buildid = 1
+  ...     builds = True
 
   >>> ab_opts = MockAbstractOptions()
   >>> bd_opts = MockOptions()
 
   >>> insecure_policy.setOptions(ab_opts)
-  >>> insecure_policy.options is ab_opts
-  True
   >>> insecure_policy.distro.name
   u'ubuntu'
   >>> buildd_policy.setOptions(ab_opts)
-  Traceback (most recent call last):
-  ...
-  UploadPolicyError: BuildID required for buildd context
   >>> buildd_policy.setOptions(bd_opts)
-  >>> buildd_policy.options is bd_opts
-  True
   >>> buildd_policy.distro.name
   u'ubuntu'
 

=== modified file 'lib/lp/archiveuploader/uploadpolicy.py'
--- lib/lp/archiveuploader/uploadpolicy.py	2010-08-25 13:04:14 +0000
+++ lib/lp/archiveuploader/uploadpolicy.py	2010-09-15 19:43:57 +0000
@@ -128,7 +128,7 @@
             raise AssertionError(
                 "Upload is not sourceful, binaryful or mixed.")
 
-    def getUploader(self, changes):
+    def getUploader(self, changes, build):
         """Get the person who is doing the uploading."""
         return changes.signer
 
@@ -334,10 +334,7 @@
 
     def setOptions(self, options):
         AbstractUploadPolicy.setOptions(self, options)
-        # We require a buildid to be provided
-        if (getattr(options, 'buildid', None) is None and
-            not getattr(options, 'builds', False)):
-            raise UploadPolicyError("BuildID required for buildd context")
+        self.builds = True
 
     def policySpecificChecks(self, upload):
         """The buildd policy should enforce that the buildid matches."""

=== modified file 'lib/lp/archiveuploader/uploadprocessor.py'
--- lib/lp/archiveuploader/uploadprocessor.py	2010-09-15 19:43:56 +0000
+++ lib/lp/archiveuploader/uploadprocessor.py	2010-09-15 19:43:57 +0000
@@ -417,7 +417,7 @@
                          "https://help.launchpad.net/Packaging/PPA#Uploading "
                          "and update your configuration.")))
         logger.debug("Finding fresh policy")
-        policy = self._getPolicyForDistro(distribution)
+        policy = self._getPolicyForDistro(distribution, build)
         policy.archive = archive
 
         # DistroSeries overriding respect the following precedence:

=== modified file 'lib/lp/code/model/sourcepackagerecipebuild.py'
--- lib/lp/code/model/sourcepackagerecipebuild.py	2010-09-09 17:02:33 +0000
+++ lib/lp/code/model/sourcepackagerecipebuild.py	2010-09-15 19:43:57 +0000
@@ -83,10 +83,8 @@
     name = SOURCE_PACKAGE_RECIPE_UPLOAD_POLICY_NAME
     accepted_type = ArchiveUploadType.SOURCE_ONLY
 
-    def getUploader(self, changes):
+    def getUploader(self, changes, sprb):
         """Return the person doing the upload."""
-        build_id = int(getattr(self.options, 'buildid'))
-        sprb = getUtility(ISourcePackageRecipeBuildSource).getById(build_id)
         return sprb.requester
 
 

=== modified file 'lib/lp/soyuz/doc/build-failedtoupload-workflow.txt'
--- lib/lp/soyuz/doc/build-failedtoupload-workflow.txt	2010-08-04 00:16:44 +0000
+++ lib/lp/soyuz/doc/build-failedtoupload-workflow.txt	2010-09-15 19:43:57 +0000
@@ -162,8 +162,7 @@
   >>> buildd_policy = getPolicy(
   ...     name='buildd',
   ...     distro=failedtoupload_candidate.distribution.name,
-  ...     distroseries=failedtoupload_candidate.distro_series.name,
-  ...     buildid=failedtoupload_candidate.id)
+  ...     distroseries=failedtoupload_candidate.distro_series.name)
 
   >>> cdrkit_bin_upload = NascentUpload.from_changesfile_path(
   ...     datadir('suite/cdrkit_1.0/cdrkit_1.0_i386.changes'),
@@ -171,7 +170,7 @@
   >>> cdrkit_bin_upload.process()
   >>> cdrkit_bin_upload.is_rejected
   False
-  >>> success = cdrkit_bin_upload.do_accept()
+  >>> success = cdrkit_bin_upload.do_accept(build=failedtoupload_candidate)
   >>> print cdrkit_bin_upload.queue_root.status.name
   NEW
 

=== modified file 'lib/lp/soyuz/doc/distroseriesqueue-translations.txt'
--- lib/lp/soyuz/doc/distroseriesqueue-translations.txt	2010-08-24 15:29:01 +0000
+++ lib/lp/soyuz/doc/distroseriesqueue-translations.txt	2010-09-15 19:43:57 +0000
@@ -74,15 +74,14 @@
   ...      dapper_amd64, PackagePublishingPocket.RELEASE, dapper.main_archive)
 
   >>> buildd_policy = getPolicy(
-  ...     name='buildd', distro='ubuntu', distroseries='dapper',
-  ...     buildid=build.id)
+  ...     name='buildd', distro='ubuntu', distroseries='dapper')
 
   >>> pmount_upload = NascentUpload.from_changesfile_path(
   ...     datadir('pmount_0.9.7-2ubuntu2_amd64.changes'),
   ...     buildd_policy, mock_logger)
   DEBUG: Changes file can be unsigned.
 
-  >>> pmount_upload.process()
+  >>> pmount_upload.process(build=build)
   DEBUG: Beginning processing.
   DEBUG: Verifying the changes file.
   DEBUG: Verifying files in upload.
@@ -105,9 +104,8 @@
   >>> print len(dapper_pmount.getLatestTranslationsUploads())
   0
 
-  >>> success = pmount_upload.do_accept()
+  >>> success = pmount_upload.do_accept(build=build)
   DEBUG: Creating queue entry
-  DEBUG: Build ... found
   ...
 
   # And all things worked.

=== modified file 'lib/lp/soyuz/doc/soyuz-set-of-uploads.txt'
--- lib/lp/soyuz/doc/soyuz-set-of-uploads.txt	2010-08-30 02:07:38 +0000
+++ lib/lp/soyuz/doc/soyuz-set-of-uploads.txt	2010-09-15 19:43:57 +0000
@@ -119,21 +119,17 @@
   >>> from lp.soyuz.scripts.soyuz_process_upload import (
   ...     ProcessUpload)
   >>> from canonical.testing import LaunchpadZopelessLayer
-  >>> def process_uploads(upload_policy, build_id, series, loglevel):
+  >>> def process_uploads(upload_policy, series, loglevel):
   ...     """Simulate process-upload.py script run.
   ...
   ...     :param upload_policy: context in which to consider the upload
   ...         (equivalent to script's --context option).
-  ...     :param build_id: build to which to attach this upload.
-  ...         (equivalent to script's --buildid option).
   ...     :param series: distro series to give back from.
   ...         (equivalent to script's --series option).
   ...     :param loglevel: logging level (as defined in logging module).  Any
   ...         log messages below this level will be suppressed.
   ...     """
   ...     args = [temp_dir, "-C", upload_policy]
-  ...     if build_id is not None:
-  ...         args.extend(["-b", build_id])
   ...     if series is not None:
   ...         args.extend(["-s", series])
   ...     # Run script under 'uploader' DB user.  The dbuser argument to the
@@ -230,11 +226,11 @@
   >>> from lp.services.mail import stub
 
   >>> def simulate_upload(
-  ...     leafname, is_new=False, upload_policy='anything', build_id=None,
+  ...     leafname, is_new=False, upload_policy='anything',
   ...     series=None, distro="ubuntutest", loglevel=logging.WARN):
   ...     """Process upload(s).  Options are as for process_uploads()."""
   ...     punt_upload_into_queue(leafname, distro=distro)
-  ...     process_uploads(upload_policy, build_id, series, loglevel)
+  ...     process_uploads(upload_policy, series, loglevel)
   ...     # We seem to be leaving a lock file behind here for some reason.
   ...     # Naturally it doesn't count as an unprocessed incoming file, which
   ...     # is what we're really looking for.
@@ -289,19 +285,6 @@
 
   >>> simulate_upload('bar_1.0-2')
 
-Check the rejection of bar_1.0-2_binary when uploaded to the wrong build id.
-
-  >>> simulate_upload(
-  ...     'bar_1.0-2_binary', upload_policy="buildd", build_id="2",
-  ...     loglevel=logging.ERROR)
-  log> Exception while accepting:
-  Attempt to upload binaries specifying build 2, where they don't fit.
-  ...
-  Rejected uploads: ['bar_1.0-2_binary']
-
-Try it again without the bogus build id.  This succeeds without
-complaints.
-
   >>> simulate_upload('bar_1.0-2_binary')
 
 Check the rejection of a malicious version of bar package which refers

=== modified file 'lib/lp/soyuz/scripts/soyuz_process_upload.py'
--- lib/lp/soyuz/scripts/soyuz_process_upload.py	2010-08-20 20:31:18 +0000
+++ lib/lp/soyuz/scripts/soyuz_process_upload.py	2010-09-15 19:43:57 +0000
@@ -61,11 +61,6 @@
             help="Distro series to give back from.")
 
         self.parser.add_option(
-            "-b", "--buildid", action="store", type="int", dest="buildid",
-            metavar="BUILD",
-            help="The build ID to which to attach this upload.")
-
-        self.parser.add_option(
             "-a", "--announce", action="store", dest="announcelist",
             metavar="ANNOUNCELIST", help="Override the announcement list")
 
@@ -82,10 +77,15 @@
                 "%s is not a directory" % self.options.base_fsroot)
 
         self.logger.debug("Initialising connection.")
-        def getPolicy(distro):
+        def getPolicy(distro, build):
             self.options.distro = distro.name
             policy = findPolicyByName(self.options.context)
             policy.setOptions(self.options)
+            if self.options.builds:
+                assert build, "--builds specified but no build"
+                policy.distroseries = build.distro_series
+                policy.pocket = build.pocket
+                policy.archive = build.archive
             return policy
         processor = UploadProcessor(self.options.base_fsroot,
             self.options.dryrun, self.options.nomails, self.options.builds,


Follow ups