← Back to team overview

launchpad-reviewers team mailing list archive

[Merge] lp:~leonardr/launchpad/revert-oauth-aware-website into lp:launchpad

 

Leonard Richardson has proposed merging lp:~leonardr/launchpad/revert-oauth-aware-website into lp:launchpad.

Requested reviews:
  Launchpad code reviewers (launchpad-reviewers)


This branch reverts my recent branch to make parts of the Launchpad website accept OAuth-signed requests. I'm reverting it not because the code is bad, but because the requirements changed immediately after I merged this branch, rendering it moot.
-- 
https://code.launchpad.net/~leonardr/launchpad/revert-oauth-aware-website/+merge/36038
Your team Launchpad code reviewers is requested to review the proposed merge of lp:~leonardr/launchpad/revert-oauth-aware-website into lp:launchpad.
=== modified file 'database/schema/security.cfg'
--- database/schema/security.cfg	2010-09-19 22:32:31 +0000
+++ database/schema/security.cfg	2010-09-20 17:09:57 +0000
@@ -1131,6 +1131,8 @@
 public.packagebuild                     = SELECT, INSERT, UPDATE
 public.binarypackagebuild               = SELECT, INSERT, UPDATE
 public.sourcepackagerecipebuild         = SELECT, UPDATE
+public.sourcepackagerecipebuildjob      = SELECT, UPDATE
+public.sourcepackagerecipe              = SELECT, UPDATE
 public.buildqueue                       = SELECT, INSERT, UPDATE
 public.job                              = SELECT, INSERT, UPDATE
 public.buildpackagejob                  = SELECT, INSERT, UPDATE

=== modified file 'lib/canonical/launchpad/icing/style-3-0.css.in'
--- lib/canonical/launchpad/icing/style-3-0.css.in	2010-08-27 19:20:53 +0000
+++ lib/canonical/launchpad/icing/style-3-0.css.in	2010-09-20 17:09:57 +0000
@@ -1804,6 +1804,20 @@
     color: #b8b8ff;
     }
 
+/* Inline description editing.
+ * Override the lazr-js style for all uses.
+ */
+div#edit-description,
+div#edit-commit_message {
+    font-family: 'UbuntuBeta Mono', 'Ubuntu Mono', monospace;
+    font-size: 93%;
+    margin: 1em 0;
+    }
+div#edit-description .yui-ieditor-input,
+div#edit-commit_message .yui-ieditor-input {
+    top: 0;
+    }
+
 
 /* =========================
    Universal presentation

=== modified file 'lib/lp/archiveuploader/dscfile.py'
--- lib/lp/archiveuploader/dscfile.py	2010-09-09 17:02:33 +0000
+++ lib/lp/archiveuploader/dscfile.py	2010-09-20 17:09:57 +0000
@@ -630,35 +630,6 @@
             cleanup_unpacked_dir(unpacked_dir)
         self.logger.debug("Done")
 
-    def findBuild(self):
-        """Find and return the SourcePackageRecipeBuild, if one is specified.
-
-        If by any chance an inconsistent build was found this method will
-        raise UploadError resulting in a upload rejection.
-        """
-        build_id = getattr(self.policy.options, 'buildid', None)
-        if build_id is None:
-            return None
-
-        build = getUtility(ISourcePackageRecipeBuildSource).getById(build_id)
-
-        # The master verifies the status to confirm successful upload.
-        build.status = BuildStatus.FULLYBUILT
-        # If this upload is successful, any existing log is wrong and
-        # unuseful.
-        build.upload_log = None
-
-        # Sanity check; raise an error if the build we've been
-        # told to link to makes no sense.
-        if (build.pocket != self.policy.pocket or
-            build.distroseries != self.policy.distroseries or
-            build.archive != self.policy.archive):
-            raise UploadError(
-                "Attempt to upload source specifying "
-                "recipe build %s, where it doesn't fit." % build.id)
-
-        return build
-
     def storeInDatabase(self, build):
         """Store DSC information as a SourcePackageRelease record.
 

=== modified file 'lib/lp/archiveuploader/nascentupload.py'
--- lib/lp/archiveuploader/nascentupload.py	2010-08-27 14:27:22 +0000
+++ lib/lp/archiveuploader/nascentupload.py	2010-09-20 17:09:57 +0000
@@ -137,7 +137,7 @@
             raise FatalUploadError(str(e))
         return cls(changesfile, policy, logger)
 
-    def process(self):
+    def process(self, build=None):
         """Process this upload, checking it against policy, loading it into
         the database if it seems okay.
 
@@ -200,7 +200,7 @@
         self.overrideArchive()
 
         # Check upload rights for the signer of the upload.
-        self.verify_acl()
+        self.verify_acl(build)
 
         # Perform policy checks.
         policy.checkUpload(self)
@@ -483,7 +483,7 @@
     #
     # Signature and ACL stuff
     #
-    def verify_acl(self):
+    def verify_acl(self, build=None):
         """Check the signer's upload rights.
 
         The signer must have permission to upload to either the component
@@ -498,10 +498,13 @@
         if self.binaryful:
             return
 
-        # Set up some convenient shortcut variables.
-
-        uploader = self.policy.getUploader(self.changes)
-        archive = self.policy.archive
+        # The build can have an explicit uploader, which may be different
+        # from the changes file signer. (i.e in case of daily source package
+        # builds)
+        if build is not None:
+            uploader = build.getUploader(self.changes)
+        else:
+            uploader = self.changes.signer
 
         # If we have no signer, there's no ACL we can apply.
         if uploader is None:
@@ -511,7 +514,7 @@
         source_name = getUtility(
             ISourcePackageNameSet).queryByName(self.changes.dsc.package)
 
-        rejection_reason = archive.checkUpload(
+        rejection_reason = self.policy.archive.checkUpload(
             uploader, self.policy.distroseries, source_name,
             self.changes.dsc.component, self.policy.pocket, not self.is_new)
 
@@ -824,7 +827,7 @@
     #
     # Actually processing accepted or rejected uploads -- and mailing people
     #
-    def do_accept(self, notify=True):
+    def do_accept(self, notify=True, build=None):
         """Accept the upload into the queue.
 
         This *MAY* in extreme cases cause a database error and thus
@@ -834,13 +837,14 @@
         constraint.
 
         :param notify: True to send an email, False to not send one.
+        :param build: The build associated with this upload.
         """
         if self.is_rejected:
             self.reject("Alas, someone called do_accept when we're rejected")
             self.do_reject(notify)
             return False
         try:
-            self.storeObjectsInDatabase()
+            self.storeObjectsInDatabase(build=build)
 
             # Send the email.
             # There is also a small corner case here where the DB transaction
@@ -923,7 +927,7 @@
     #
     # Inserting stuff in the database
     #
-    def storeObjectsInDatabase(self):
+    def storeObjectsInDatabase(self, build=None):
         """Insert this nascent upload into the database."""
 
         # Queue entries are created in the NEW state by default; at the
@@ -939,7 +943,8 @@
         sourcepackagerelease = None
         if self.sourceful:
             assert self.changes.dsc, "Sourceful upload lacks DSC."
-            build = self.changes.dsc.findBuild()
+            if build is not None:
+                self.changes.dsc.checkBuild(build)
             sourcepackagerelease = self.changes.dsc.storeInDatabase(build)
             package_upload_source = self.queue_root.addSource(
                 sourcepackagerelease)
@@ -980,11 +985,21 @@
                     sourcepackagerelease = (
                         binary_package_file.findSourcePackageRelease())
 
-                build = binary_package_file.findBuild(sourcepackagerelease)
-                assert self.queue_root.pocket == build.pocket, (
+                # Find the build for this particular binary package file.
+                if build is None:
+                    bpf_build = binary_package_file.findBuild(
+                        sourcepackagerelease)
+                else:
+                    bpf_build = build
+                if bpf_build.source_package_release != sourcepackagerelease:
+                    raise AssertionError(
+                        "Attempt to upload binaries specifying build %s, "
+                        "where they don't fit." % bpf_build.id)
+                binary_package_file.checkBuild(bpf_build)
+                assert self.queue_root.pocket == bpf_build.pocket, (
                     "Binary was not build for the claimed pocket.")
-                binary_package_file.storeInDatabase(build)
-                processed_builds.append(build)
+                binary_package_file.storeInDatabase(bpf_build)
+                processed_builds.append(bpf_build)
 
             # Store the related builds after verifying they were built
             # from the same source.

=== modified file 'lib/lp/archiveuploader/nascentuploadfile.py'
--- lib/lp/archiveuploader/nascentuploadfile.py	2010-09-02 16:28:50 +0000
+++ lib/lp/archiveuploader/nascentuploadfile.py	2010-09-20 17:09:57 +0000
@@ -33,6 +33,7 @@
 from canonical.encoding import guess as guess_encoding
 from canonical.launchpad.interfaces.librarian import ILibraryFileAliasSet
 from canonical.librarian.utils import filechunks
+from lp.app.errors import NotFoundError
 from lp.archiveuploader.utils import (
     determine_source_file_type,
     prefix_multi_line_string,
@@ -52,7 +53,6 @@
     PackageUploadCustomFormat,
     PackageUploadStatus,
     )
-from lp.soyuz.interfaces.binarypackagebuild import IBinaryPackageBuildSet
 from lp.soyuz.interfaces.binarypackagename import IBinaryPackageNameSet
 from lp.soyuz.interfaces.component import IComponentSet
 from lp.soyuz.interfaces.section import ISectionSet
@@ -338,6 +338,13 @@
         """Return an ISection for self.section_name."""
         return getUtility(ISectionSet)[self.section_name]
 
+    def checkBuild(self, build):
+        """Check the status of the build this file is part of.
+
+        :param build: an `IPackageBuild` instance
+        """
+        raise NotImplementedError(self.checkBuild)
+
     def extractUserDefinedFields(self, control):
         """Extract the user defined fields out of a control file list.
         """
@@ -381,6 +388,23 @@
             yield UploadError("%s: should be %s according to changes file."
                 % (filename_version, version_chopped))
 
+    def checkBuild(self, build):
+        """See PackageUploadFile."""
+        # The master verifies the status to confirm successful upload.
+        build.status = BuildStatus.FULLYBUILT
+        # If this upload is successful, any existing log is wrong and
+        # unuseful.
+        build.upload_log = None
+
+        # Sanity check; raise an error if the build we've been
+        # told to link to makes no sense.
+        if (build.pocket != self.policy.pocket or
+            build.distroseries != self.policy.distroseries or
+            build.archive != self.policy.archive):
+            raise UploadError(
+                "Attempt to upload source specifying "
+                "recipe build %s, where it doesn't fit." % build.id)
+
 
 class BaseBinaryUploadFile(PackageUploadFile):
     """Base methods for binary upload modeling."""
@@ -834,52 +858,52 @@
         in this case, change this build to be FULLYBUILT.
         - Create a new build in FULLYBUILT status.
 
-        If by any chance an inconsistent build was found this method will
-        raise UploadError resulting in a upload rejection.
         """
-        build_id = getattr(self.policy.options, 'buildid', None)
         dar = self.policy.distroseries[self.archtag]
 
-        if build_id is None:
-            # Check if there's a suitable existing build.
-            build = sourcepackagerelease.getBuildByArch(
-                dar, self.policy.archive)
-            if build is not None:
-                build.status = BuildStatus.FULLYBUILT
-                self.logger.debug("Updating build for %s: %s" % (
-                    dar.architecturetag, build.id))
-            else:
-                # No luck. Make one.
-                # Usually happen for security binary uploads.
-                build = sourcepackagerelease.createBuild(
-                    dar, self.policy.pocket, self.policy.archive,
-                    status=BuildStatus.FULLYBUILT)
-                self.logger.debug("Build %s created" % build.id)
-        else:
-            build = getUtility(IBinaryPackageBuildSet).getByBuildID(build_id)
-            self.logger.debug("Build %s found" % build.id)
-            # Ensure gathered binary is related to a FULLYBUILT build
-            # record. It will be check in slave-scanner procedure to
-            # certify that the build was processed correctly.
+        # Check if there's a suitable existing build.
+        build = sourcepackagerelease.getBuildByArch(
+            dar, self.policy.archive)
+        if build is not None:
             build.status = BuildStatus.FULLYBUILT
-            # Also purge any previous failed upload_log stored, so its
-            # content can be garbage-collected since it's not useful
-            # anymore.
-            build.upload_log = None
+            self.logger.debug("Updating build for %s: %s" % (
+                dar.architecturetag, build.id))
+        else:
+            # No luck. Make one.
+            # Usually happen for security binary uploads.
+            build = sourcepackagerelease.createBuild(
+                dar, self.policy.pocket, self.policy.archive,
+                status=BuildStatus.FULLYBUILT)
+            self.logger.debug("Build %s created" % build.id)
+        return build
+
+    def checkBuild(self, build):
+        """See PackageUploadFile."""
+        try:
+            dar = self.policy.distroseries[self.archtag]
+        except NotFoundError:
+            raise UploadError(
+                "Upload to unknown architecture %s for distroseries %s" %
+                (self.archtag, self.policy.distroseries))
+
+        # Ensure gathered binary is related to a FULLYBUILT build
+        # record. It will be check in slave-scanner procedure to
+        # certify that the build was processed correctly.
+        build.status = BuildStatus.FULLYBUILT
+        # Also purge any previous failed upload_log stored, so its
+        # content can be garbage-collected since it's not useful
+        # anymore.
+        build.upload_log = None
 
         # Sanity check; raise an error if the build we've been
-        # told to link to makes no sense (ie. is not for the right
-        # source package).
-        if (build.source_package_release != sourcepackagerelease or
-            build.pocket != self.policy.pocket or
+        # told to link to makes no sense.
+        if (build.pocket != self.policy.pocket or
             build.distro_arch_series != dar or
             build.archive != self.policy.archive):
             raise UploadError(
                 "Attempt to upload binaries specifying "
                 "build %s, where they don't fit." % build.id)
 
-        return build
-
     def storeInDatabase(self, build):
         """Insert this binary release and build into the database."""
         # Reencode everything we are supplying, because old packages

=== modified file 'lib/lp/archiveuploader/tests/__init__.py'
--- lib/lp/archiveuploader/tests/__init__.py	2010-09-16 00:33:37 +0000
+++ lib/lp/archiveuploader/tests/__init__.py	2010-09-20 17:09:57 +0000
@@ -64,17 +64,15 @@
 class MockUploadOptions:
     """Mock upload policy options helper"""
 
-    def __init__(self, distro='ubuntutest', distroseries=None, buildid=None):
+    def __init__(self, distro='ubuntutest', distroseries=None):
         self.distro = distro
         self.distroseries = distroseries
-        self.buildid = buildid
-
-
-def getPolicy(name='anything', distro='ubuntu', distroseries=None,
-              buildid=None):
+
+
+def getPolicy(name='anything', distro='ubuntu', distroseries=None):
     """Build and return an Upload Policy for the given context."""
     policy = findPolicyByName(name)
-    options = MockUploadOptions(distro, distroseries, buildid)
+    options = MockUploadOptions(distro, distroseries)
     policy.setOptions(options)
     return policy
 

=== modified file 'lib/lp/archiveuploader/tests/nascentupload.txt'
--- lib/lp/archiveuploader/tests/nascentupload.txt	2010-08-26 15:28:34 +0000
+++ lib/lp/archiveuploader/tests/nascentupload.txt	2010-09-20 17:09:57 +0000
@@ -27,7 +27,7 @@
   ...    datadir, getPolicy, mock_logger, mock_logger_quiet)
 
   >>> buildd_policy = getPolicy(
-  ...     name='buildd', distro='ubuntu', distroseries='hoary', buildid=1)
+  ...     name='buildd', distro='ubuntu', distroseries='hoary')
 
   >>> sync_policy = getPolicy(
   ...     name='sync', distro='ubuntu', distroseries='hoary')
@@ -216,7 +216,7 @@
   # Use the buildd policy as it accepts unsigned changes files and binary
   # uploads.
   >>> modified_buildd_policy = getPolicy(
-  ...     name='buildd', distro='ubuntu', distroseries='hoary', buildid=1)
+  ...     name='buildd', distro='ubuntu', distroseries='hoary')
 
   >>> ed_mismatched_upload = NascentUpload.from_changesfile_path(
   ...     datadir("ed_0.2-20_i386.changes.mismatched-arch-unsigned"),
@@ -640,13 +640,12 @@
 the 'buildd' upload policy and the build record id.
 
   >>> buildd_policy = getPolicy(
-  ...     name='buildd', distro='ubuntu', distroseries='hoary',
-  ...     buildid=multibar_build.id)
+  ...     name='buildd', distro='ubuntu', distroseries='hoary')
 
   >>> multibar_bin_upload = NascentUpload.from_changesfile_path(
   ...     datadir('suite/multibar_1.0-1/multibar_1.0-1_i386.changes'),
   ...     buildd_policy, mock_logger_quiet)
-  >>> multibar_bin_upload.process()
+  >>> multibar_bin_upload.process(build=multibar_build)
   >>> success = multibar_bin_upload.do_accept()
 
 Now that we have successfully processed the binaries coming from a

=== modified file 'lib/lp/archiveuploader/tests/test_buildduploads.py'
--- lib/lp/archiveuploader/tests/test_buildduploads.py	2010-08-27 10:06:26 +0000
+++ lib/lp/archiveuploader/tests/test_buildduploads.py	2010-09-20 17:09:57 +0000
@@ -112,7 +112,7 @@
         # Store source queue item for future use.
         self.source_queue = queue_item
 
-    def _uploadBinary(self, archtag):
+    def _uploadBinary(self, archtag, build):
         """Upload the base binary.
 
         Ensure it got processed and has a respective queue record.
@@ -121,7 +121,7 @@
         self._prepareUpload(self.binary_dir)
         self.uploadprocessor.processChangesFile(
             os.path.join(self.queue_folder, "incoming", self.binary_dir),
-            self.getBinaryChangesfileFor(archtag))
+            self.getBinaryChangesfileFor(archtag), build=build)
         queue_item = self.uploadprocessor.last_processed_upload.queue_root
         self.assertTrue(
             queue_item is not None,
@@ -205,10 +205,9 @@
         pubrec.datepublished = UTC_NOW
         queue_item.setDone()
 
-    def _setupUploadProcessorForBuild(self, build_candidate):
+    def _setupUploadProcessorForBuild(self):
         """Setup an UploadProcessor instance for a given buildd context."""
         self.options.context = self.policy
-        self.options.buildid = str(build_candidate.id)
         self.uploadprocessor = self.getUploadProcessor(
             self.layer.txn)
 
@@ -223,8 +222,8 @@
         """
         # Upload i386 binary.
         build_candidate = self._createBuild('i386')
-        self._setupUploadProcessorForBuild(build_candidate)
-        build_used = self._uploadBinary('i386')
+        self._setupUploadProcessorForBuild()
+        build_used = self._uploadBinary('i386', build_candidate)
 
         self.assertEqual(build_used.id, build_candidate.id)
         self.assertBuildsCreated(1)
@@ -239,8 +238,8 @@
 
         # Upload powerpc binary
         build_candidate = self._createBuild('powerpc')
-        self._setupUploadProcessorForBuild(build_candidate)
-        build_used = self._uploadBinary('powerpc')
+        self._setupUploadProcessorForBuild()
+        build_used = self._uploadBinary('powerpc', build_candidate)
 
         self.assertEqual(build_used.id, build_candidate.id)
         self.assertBuildsCreated(2)

=== modified file 'lib/lp/archiveuploader/tests/test_nascentuploadfile.py'
--- lib/lp/archiveuploader/tests/test_nascentuploadfile.py	2010-09-03 06:06:40 +0000
+++ lib/lp/archiveuploader/tests/test_nascentuploadfile.py	2010-09-20 17:09:57 +0000
@@ -20,8 +20,11 @@
 from lp.archiveuploader.nascentuploadfile import (
     CustomUploadFile,
     DebBinaryUploadFile,
+    UploadError,
     )
+from lp.registry.interfaces.pocket import PackagePublishingPocket
 from lp.archiveuploader.tests import AbsolutelyAnythingGoesUploadPolicy
+from lp.buildmaster.enums import BuildStatus
 from lp.soyuz.enums import PackageUploadCustomFormat
 from lp.testing import TestCaseWithFactory
 
@@ -34,6 +37,7 @@
         self.logger = BufferLogger()
         self.policy = AbsolutelyAnythingGoesUploadPolicy()
         self.distro = self.factory.makeDistribution()
+        self.policy.pocket = PackagePublishingPocket.RELEASE
         self.policy.archive = self.factory.makeArchive(
             distribution=self.distro)
 
@@ -217,6 +221,34 @@
         release = uploadfile.storeInDatabase(None)
         self.assertEquals(u"http://samba.org/~jelmer/bzr";, release.homepage)
 
+    def test_checkBuild(self):
+        # checkBuild() verifies consistency with a build.
+        build = self.factory.makeSourcePackageRecipeBuild(
+            pocket=self.policy.pocket, distroseries=self.policy.distroseries,
+            archive=self.policy.archive)
+        dsc = self.getBaseDsc()
+        uploadfile = self.createDSCFile(
+            "foo.dsc", dsc, "main/net", "extra", "dulwich", "0.42",
+            self.createChangesFile("foo.changes", self.getBaseChanges()))
+        uploadfile.checkBuild(build)
+        # checkBuild() sets the build status to FULLYBUILT and
+        # removes the upload log.
+        self.assertEquals(BuildStatus.FULLYBUILT, build.status)
+        self.assertIs(None, build.upload_log)
+
+    def test_checkBuild_inconsistent(self):
+        # checkBuild() raises UploadError if inconsistencies between build
+        # and upload file are found.
+        build = self.factory.makeSourcePackageRecipeBuild(
+            pocket=self.policy.pocket,
+            distroseries=self.factory.makeDistroSeries(),
+            archive=self.policy.archive)
+        dsc = self.getBaseDsc()
+        uploadfile = self.createDSCFile(
+            "foo.dsc", dsc, "main/net", "extra", "dulwich", "0.42",
+            self.createChangesFile("foo.changes", self.getBaseChanges()))
+        self.assertRaises(UploadError, uploadfile.checkBuild, build)
+
 
 class DebBinaryUploadFileTests(PackageUploadFileTestCase):
     """Tests for DebBinaryUploadFile."""
@@ -326,3 +358,32 @@
         bpr = uploadfile.storeInDatabase(build)
         self.assertEquals(
             u"http://samba.org/~jelmer/dulwich";, bpr.homepage)
+
+    def test_checkBuild(self):
+        # checkBuild() verifies consistency with a build.
+        das = self.factory.makeDistroArchSeries(
+            distroseries=self.policy.distroseries, architecturetag="i386")
+        build = self.factory.makeBinaryPackageBuild(
+            distroarchseries=das,
+            archive=self.policy.archive)
+        uploadfile = self.createDebBinaryUploadFile(
+            "foo_0.42_i386.deb", "main/python", "unknown", "mypkg", "0.42",
+            None)
+        uploadfile.checkBuild(build)
+        # checkBuild() sets the build status to FULLYBUILT and
+        # removes the upload log.
+        self.assertEquals(BuildStatus.FULLYBUILT, build.status)
+        self.assertIs(None, build.upload_log)
+
+    def test_checkBuild_inconsistent(self):
+        # checkBuild() raises UploadError if inconsistencies between build
+        # and upload file are found.
+        das = self.factory.makeDistroArchSeries(
+            distroseries=self.policy.distroseries, architecturetag="amd64")
+        build = self.factory.makeBinaryPackageBuild(
+            distroarchseries=das,
+            archive=self.policy.archive)
+        uploadfile = self.createDebBinaryUploadFile(
+            "foo_0.42_i386.deb", "main/python", "unknown", "mypkg", "0.42",
+            None)
+        self.assertRaises(UploadError, uploadfile.checkBuild, build)

=== modified file 'lib/lp/archiveuploader/tests/test_ppauploadprocessor.py'
--- lib/lp/archiveuploader/tests/test_ppauploadprocessor.py	2010-09-06 20:19:45 +0000
+++ lib/lp/archiveuploader/tests/test_ppauploadprocessor.py	2010-09-20 17:09:57 +0000
@@ -355,10 +355,10 @@
         builds = self.name16.archive.getBuildRecords(name="bar")
         [build] = builds
         self.options.context = 'buildd'
-        self.options.buildid = build.id
         upload_dir = self.queueUpload(
             "bar_1.0-1_binary_universe", "~name16/ubuntu")
-        self.processUpload(self.uploadprocessor, upload_dir)
+        self.processUpload(
+            self.uploadprocessor, upload_dir, build=build)
 
         # No mails are sent for successful binary uploads.
         self.assertEqual(len(stub.test_emails), 0,
@@ -405,9 +405,9 @@
 
         # Binary upload to the just-created build record.
         self.options.context = 'buildd'
-        self.options.buildid = build.id
         upload_dir = self.queueUpload("bar_1.0-1_binary", "~name16/ubuntu")
-        self.processUpload(self.uploadprocessor, upload_dir)
+        self.processUpload(
+            self.uploadprocessor, upload_dir, build=build)
 
         # The binary upload was accepted and it's waiting in the queue.
         queue_items = self.breezy.getQueueItems(
@@ -459,9 +459,9 @@
 
         # Binary upload to the just-created build record.
         self.options.context = 'buildd'
-        self.options.buildid = build_bar_i386.id
         upload_dir = self.queueUpload("bar_1.0-1_binary", "~cprov/ubuntu")
-        self.processUpload(self.uploadprocessor, upload_dir)
+        self.processUpload(
+            self.uploadprocessor, upload_dir, build=build_bar_i386)
 
         # The binary upload was accepted and it's waiting in the queue.
         queue_items = self.breezy.getQueueItems(
@@ -760,9 +760,9 @@
         builds = self.name16.archive.getBuildRecords(name='bar')
         [build] = builds
         self.options.context = 'buildd'
-        self.options.buildid = build.id
         upload_dir = self.queueUpload("bar_1.0-1_binary", "~name16/ubuntu")
-        self.processUpload(self.uploadprocessor, upload_dir)
+        self.processUpload(
+            self.uploadprocessor, upload_dir, build=build)
 
         # The binary upload was accepted and it's waiting in the queue.
         queue_items = self.breezy.getQueueItems(
@@ -804,10 +804,9 @@
         # Binary uploads should exhibit the same behaviour:
         [build] = self.name16.archive.getBuildRecords(name="bar")
         self.options.context = 'buildd'
-        self.options.buildid = build.id
         upload_dir = self.queueUpload(
             "bar_1.0-1_contrib_binary", "~name16/ubuntu")
-        self.processUpload(self.uploadprocessor, upload_dir)
+        self.processUpload(self.uploadprocessor, upload_dir, build=build)
         queue_items = self.breezy.getQueueItems(
             status=PackageUploadStatus.ACCEPTED, name="bar",
             version="1.0-1", exact_match=True, archive=self.name16.archive)
@@ -1306,14 +1305,14 @@
         builds = self.name16.archive.getBuildRecords(name='bar')
         [build] = builds
         self.options.context = 'buildd'
-        self.options.buildid = build.id
 
         # Stuff 1024 MiB in name16 PPA, so anything will be above the
         # default quota limit, 1024 MiB.
         self._fillArchive(self.name16.archive, 1024 * (2 ** 20))
 
         upload_dir = self.queueUpload("bar_1.0-1_binary", "~name16/ubuntu")
-        self.processUpload(self.uploadprocessor, upload_dir)
+        self.processUpload(
+            self.uploadprocessor, upload_dir, build=build)
 
         # The binary upload was accepted, and it's waiting in the queue.
         queue_items = self.breezy.getQueueItems(

=== modified file 'lib/lp/archiveuploader/tests/test_recipeuploads.py'
--- lib/lp/archiveuploader/tests/test_recipeuploads.py	2010-08-27 11:26:59 +0000
+++ lib/lp/archiveuploader/tests/test_recipeuploads.py	2010-09-20 17:09:57 +0000
@@ -10,6 +10,9 @@
 from storm.store import Store
 from zope.component import getUtility
 
+from lp.archiveuploader.uploadprocessor import (
+    UploadStatusEnum,
+    )
 from lp.archiveuploader.tests.test_uploadprocessor import (
     TestUploadProcessorBase,
     )
@@ -17,7 +20,6 @@
 from lp.code.interfaces.sourcepackagerecipebuild import (
     ISourcePackageRecipeBuildSource,
     )
-from lp.soyuz.enums import PackageUploadStatus
 
 
 class TestSourcePackageRecipeBuildUploads(TestUploadProcessorBase):
@@ -40,8 +42,7 @@
             requester=self.recipe.owner)
 
         Store.of(self.build).flush()
-        self.options.context = 'recipe'
-        self.options.buildid = self.build.id
+        self.options.context = 'buildd'
 
         self.uploadprocessor = self.getUploadProcessor(
             self.layer.txn)
@@ -54,19 +55,14 @@
         self.assertIs(None, self.build.source_package_release)
         self.assertEqual(False, self.build.verifySuccessfulUpload())
         self.queueUpload('bar_1.0-1', '%d/ubuntu' % self.build.archive.id)
-        self.uploadprocessor.processChangesFile(
+        result = self.uploadprocessor.processChangesFile(
             os.path.join(self.queue_folder, "incoming", 'bar_1.0-1'),
-            '%d/ubuntu/bar_1.0-1_source.changes' % self.build.archive.id)
+            '%d/ubuntu/bar_1.0-1_source.changes' % self.build.archive.id,
+            build=self.build)
         self.layer.txn.commit()
 
-        queue_item = self.uploadprocessor.last_processed_upload.queue_root
-        self.assertTrue(
-            queue_item is not None,
+        self.assertEquals(UploadStatusEnum.ACCEPTED, result,
             "Source upload failed\nGot: %s" % "\n".join(self.log.lines))
 
-        self.assertEqual(PackageUploadStatus.DONE, queue_item.status)
-        spr = queue_item.sources[0].sourcepackagerelease
-        self.assertEqual(self.build, spr.source_package_recipe_build)
-        self.assertEqual(spr, self.build.source_package_release)
         self.assertEqual(BuildStatus.FULLYBUILT, self.build.status)
         self.assertEqual(True, self.build.verifySuccessfulUpload())

=== modified file 'lib/lp/archiveuploader/tests/test_uploadprocessor.py'
--- lib/lp/archiveuploader/tests/test_uploadprocessor.py	2010-09-16 00:33:37 +0000
+++ lib/lp/archiveuploader/tests/test_uploadprocessor.py	2010-09-20 17:09:57 +0000
@@ -18,6 +18,7 @@
 import tempfile
 import traceback
 
+from storm.locals import Store
 from zope.component import (
     getGlobalSiteManager,
     getUtility,
@@ -153,7 +154,7 @@
 
         self.options = MockOptions()
         self.options.base_fsroot = self.queue_folder
-        self.options.builds = True
+        self.options.builds = False
         self.options.leafname = None
         self.options.distro = "ubuntu"
         self.options.distroseries = None
@@ -172,9 +173,13 @@
         super(TestUploadProcessorBase, self).tearDown()
 
     def getUploadProcessor(self, txn):
-        def getPolicy(distro):
+        def getPolicy(distro, build):
             self.options.distro = distro.name
             policy = findPolicyByName(self.options.context)
+            if self.options.builds:
+                policy.distroseries = build.distro_series
+                policy.pocket = build.pocket
+                policy.archive = build.archive
             policy.setOptions(self.options)
             return policy
         return UploadProcessor(
@@ -288,7 +293,7 @@
         shutil.copytree(upload_dir, target_path)
         return os.path.join(self.incoming_folder, queue_entry)
 
-    def processUpload(self, processor, upload_dir):
+    def processUpload(self, processor, upload_dir, build=None):
         """Process an upload queue entry directory.
 
         There is some duplication here with logic in UploadProcessor,
@@ -298,7 +303,8 @@
         results = []
         changes_files = processor.locateChangesFiles(upload_dir)
         for changes_file in changes_files:
-            result = processor.processChangesFile(upload_dir, changes_file)
+            result = processor.processChangesFile(
+                upload_dir, changes_file, build=build)
             results.append(result)
         return results
 
@@ -693,10 +699,10 @@
         # Upload and accept a binary for the primary archive source.
         shutil.rmtree(upload_dir)
         self.options.context = 'buildd'
-        self.options.buildid = bar_original_build.id
         self.layer.txn.commit()
         upload_dir = self.queueUpload("bar_1.0-1_binary")
-        self.processUpload(uploadprocessor, upload_dir)
+        self.processUpload(uploadprocessor, upload_dir,
+            build=bar_original_build)
         self.assertEqual(
             uploadprocessor.last_processed_upload.is_rejected, False)
         bar_bin_pubs = self.publishPackage('bar', '1.0-1', source=False)
@@ -724,10 +730,10 @@
 
         shutil.rmtree(upload_dir)
         self.options.context = 'buildd'
-        self.options.buildid = bar_copied_build.id
         upload_dir = self.queueUpload(
             "bar_1.0-1_binary", "%s/ubuntu" % copy_archive.id)
-        self.processUpload(uploadprocessor, upload_dir)
+        self.processUpload(uploadprocessor, upload_dir,
+             build=bar_copied_build)
 
         # Make sure the upload succeeded.
         self.assertEqual(
@@ -796,9 +802,9 @@
         [bar_original_build] = bar_source_pub.createMissingBuilds()
 
         self.options.context = 'buildd'
-        self.options.buildid = bar_original_build.id
         upload_dir = self.queueUpload("bar_1.0-1_binary")
-        self.processUpload(uploadprocessor, upload_dir)
+        self.processUpload(
+            uploadprocessor, upload_dir, build=bar_original_build)
         [bar_binary_pub] = self.publishPackage("bar", "1.0-1", source=False)
 
         # Prepare ubuntu/breezy-autotest to build sources in i386.
@@ -818,10 +824,10 @@
         # Re-upload the same 'bar-1.0-1' binary as if it was rebuilt
         # in breezy-autotest context.
         shutil.rmtree(upload_dir)
-        self.options.buildid = bar_copied_build.id
         self.options.distroseries = breezy_autotest.name
         upload_dir = self.queueUpload("bar_1.0-1_binary")
-        self.processUpload(uploadprocessor, upload_dir)
+        self.processUpload(uploadprocessor, upload_dir,
+            build=bar_copied_build)
         [duplicated_binary_upload] = breezy_autotest.getQueueItems(
             status=PackageUploadStatus.NEW, name='bar',
             version='1.0-1', exact_match=True)
@@ -859,9 +865,9 @@
         [bar_original_build] = bar_source_pub.getBuilds()
 
         self.options.context = 'buildd'
-        self.options.buildid = bar_original_build.id
         upload_dir = self.queueUpload("bar_1.0-2_binary")
-        self.processUpload(uploadprocessor, upload_dir)
+        self.processUpload(uploadprocessor, upload_dir,
+            build=bar_original_build)
         [bar_binary_pub] = self.publishPackage("bar", "1.0-2", source=False)
 
         # Create a COPY archive for building in non-virtual builds.
@@ -878,10 +884,10 @@
         [bar_copied_build] = bar_copied_source.createMissingBuilds()
 
         shutil.rmtree(upload_dir)
-        self.options.buildid = bar_copied_build.id
         upload_dir = self.queueUpload(
             "bar_1.0-1_binary", "%s/ubuntu" % copy_archive.id)
-        self.processUpload(uploadprocessor, upload_dir)
+        self.processUpload(uploadprocessor, upload_dir,
+            build=bar_copied_build)
 
         # The binary just uploaded is accepted because it's destined for a
         # copy archive and the PRIMARY and the COPY archives are isolated
@@ -1034,9 +1040,9 @@
             self.breezy['i386'], PackagePublishingPocket.RELEASE,
             self.ubuntu.main_archive)
         self.layer.txn.commit()
-        self.options.buildid = foocomm_build.id
         upload_dir = self.queueUpload("foocomm_1.0-1_binary")
-        self.processUpload(uploadprocessor, upload_dir)
+        self.processUpload(
+            uploadprocessor, upload_dir, build=foocomm_build)
 
         contents = [
             "Subject: foocomm_1.0-1_i386.changes rejected",
@@ -1044,10 +1050,8 @@
             "where they don't fit."]
         self.assertEmail(contents)
 
-        # Reset upload queue directory for a new upload and the
-        # uploadprocessor buildid option.
+        # Reset upload queue directory for a new upload.
         shutil.rmtree(upload_dir)
-        self.options.buildid = None
 
         # Now upload a binary package of 'foocomm', letting a new build record
         # with appropriate data be created by the uploadprocessor.
@@ -1881,7 +1885,7 @@
         self.assertLogContains(
             "Unable to find package build job with id 42. Skipping.")
 
-    def testNoFiles(self):
+    def testBinaryPackageBuild_fail(self):
         # If the upload directory is empty, the upload
         # will fail.
 
@@ -1905,6 +1909,8 @@
 
         # Upload and accept a binary for the primary archive source.
         shutil.rmtree(upload_dir)
+
+        # Commit so the build cookie has the right ids.
         self.layer.txn.commit()
         leaf_name = build.getUploadDirLeaf(build.getBuildCookie())
         os.mkdir(os.path.join(self.incoming_folder, leaf_name))
@@ -1925,7 +1931,7 @@
         self.assertTrue('DEBUG: Moving upload directory '
             in log_contents)
 
-    def testSuccess(self):
+    def testBinaryPackageBuilds(self):
         # Properly uploaded binaries should result in the
         # build status changing to FULLYBUILT.
         # Upload a source package
@@ -1946,6 +1952,8 @@
 
         # Upload and accept a binary for the primary archive source.
         shutil.rmtree(upload_dir)
+
+        # Commit so the build cookie has the right ids.
         self.layer.txn.commit()
         leaf_name = build.getUploadDirLeaf(build.getBuildCookie())
         upload_dir = self.queueUpload("bar_1.0-1_binary",
@@ -1959,13 +1967,74 @@
         # No emails are sent on success
         self.assertEquals(len(stub.test_emails), last_stub_mail_count)
         self.assertEquals(BuildStatus.FULLYBUILT, build.status)
-        log_contents = build.upload_log.read()
-        log_lines = log_contents.splitlines()
-        self.assertTrue(
-            'INFO: Processing upload bar_1.0-1_i386.changes' in log_lines)
-        self.assertTrue(
-            'INFO: Committing the transaction and any mails associated with '
-            'this upload.' in log_lines)
+        # Upon full build the upload log is unset.
+        self.assertIs(None, build.upload_log)
+
+    def testSourcePackageRecipeBuild(self):
+        # Properly uploaded source packages should result in the
+        # build status changing to FULLYBUILT.
+
+        # Upload a source package
+        archive = self.factory.makeArchive()
+        archive.require_virtualized = False
+        build = self.factory.makeSourcePackageRecipeBuild(sourcename=u"bar",
+            distroseries=self.breezy, archive=archive, requester=archive.owner)
+        self.assertEquals(archive.owner, build.requester)
+        bq = self.factory.makeSourcePackageRecipeBuildJob(recipe_build=build)
+        # Commit so the build cookie has the right ids.
+        self.layer.txn.commit()
+        leaf_name = build.getUploadDirLeaf(build.getBuildCookie())
+        relative_path = "~%s/%s/%s/%s" % (
+            archive.owner.name, archive.name, self.breezy.distribution.name,
+            self.breezy.name)
+        upload_dir = self.queueUpload(
+            "bar_1.0-1", queue_entry=leaf_name, relative_path=relative_path)
+        self.options.context = 'buildd'
+        self.options.builds = True
+        build.jobStarted()
+        # Commit so date_started is recorded and doesn't cause constraint
+        # violations later.
+        build.status = BuildStatus.UPLOADING
+        Store.of(build).flush()
+        self.uploadprocessor.processBuildUpload(
+            self.incoming_folder, leaf_name)
+        self.layer.txn.commit()
+
+        self.assertEquals(BuildStatus.FULLYBUILT, build.status)
+        self.assertEquals(None, build.builder)
+        self.assertIsNot(None, build.date_finished)
+        self.assertIsNot(None, build.duration)
+        # Upon full build the upload log is unset.
+        self.assertIs(None, build.upload_log)
+
+    def testSourcePackageRecipeBuild_fail(self):
+        # A source package recipe build will fail if no files are present.
+
+        # Upload a source package
+        archive = self.factory.makeArchive()
+        archive.require_virtualized = False
+        build = self.factory.makeSourcePackageRecipeBuild(sourcename=u"bar",
+            distroseries=self.breezy, archive=archive)
+        bq = self.factory.makeSourcePackageRecipeBuildJob(recipe_build=build)
+        # Commit so the build cookie has the right ids.
+        Store.of(build).flush()
+        leaf_name = build.getUploadDirLeaf(build.getBuildCookie())
+        os.mkdir(os.path.join(self.incoming_folder, leaf_name))
+        self.options.context = 'buildd'
+        self.options.builds = True
+        build.jobStarted()
+        # Commit so date_started is recorded and doesn't cause constraint
+        # violations later.
+        Store.of(build).flush()
+        build.status = BuildStatus.UPLOADING
+        self.uploadprocessor.processBuildUpload(
+            self.incoming_folder, leaf_name)
+        self.layer.txn.commit()
+        self.assertEquals(BuildStatus.FAILEDTOUPLOAD, build.status)
+        self.assertEquals(None, build.builder)
+        self.assertIsNot(None, build.date_finished)
+        self.assertIsNot(None, build.duration)
+        self.assertIsNot(None, build.upload_log)
 
 
 class ParseBuildUploadLeafNameTests(TestCase):

=== modified file 'lib/lp/archiveuploader/tests/uploadpolicy.txt'
--- lib/lp/archiveuploader/tests/uploadpolicy.txt	2010-08-18 14:03:15 +0000
+++ lib/lp/archiveuploader/tests/uploadpolicy.txt	2010-09-20 17:09:57 +0000
@@ -53,23 +53,16 @@
   ...     distro = 'ubuntu'
   ...     distroseries = None
   >>> class MockOptions(MockAbstractOptions):
-  ...     buildid = 1
+  ...     builds = True
 
   >>> ab_opts = MockAbstractOptions()
   >>> bd_opts = MockOptions()
 
   >>> insecure_policy.setOptions(ab_opts)
-  >>> insecure_policy.options is ab_opts
-  True
   >>> insecure_policy.distro.name
   u'ubuntu'
   >>> buildd_policy.setOptions(ab_opts)
-  Traceback (most recent call last):
-  ...
-  UploadPolicyError: BuildID required for buildd context
   >>> buildd_policy.setOptions(bd_opts)
-  >>> buildd_policy.options is bd_opts
-  True
   >>> buildd_policy.distro.name
   u'ubuntu'
 

=== modified file 'lib/lp/archiveuploader/uploadpolicy.py'
--- lib/lp/archiveuploader/uploadpolicy.py	2010-08-27 10:06:26 +0000
+++ lib/lp/archiveuploader/uploadpolicy.py	2010-09-20 17:09:57 +0000
@@ -11,7 +11,6 @@
     "BuildDaemonUploadPolicy",
     "findPolicyByName",
     "IArchiveUploadPolicy",
-    "SOURCE_PACKAGE_RECIPE_UPLOAD_POLICY_NAME",
     "UploadPolicyError",
     ]
 
@@ -34,8 +33,6 @@
 from lazr.enum import EnumeratedType, Item
 
 
-# Defined here so that uploadpolicy.py doesn't depend on lp.code.
-SOURCE_PACKAGE_RECIPE_UPLOAD_POLICY_NAME = 'recipe'
 # Number of seconds in an hour (used later)
 HOURS = 3600
 
@@ -128,13 +125,8 @@
             raise AssertionError(
                 "Upload is not sourceful, binaryful or mixed.")
 
-    def getUploader(self, changes):
-        """Get the person who is doing the uploading."""
-        return changes.signer
-
     def setOptions(self, options):
         """Store the options for later."""
-        self.options = options
         # Extract and locate the distribution though...
         self.distro = getUtility(IDistributionSet)[options.distro]
         if options.distroseries is not None:
@@ -324,7 +316,6 @@
     """The build daemon upload policy is invoked by the slave scanner."""
 
     name = 'buildd'
-    accepted_type = ArchiveUploadType.BINARY_ONLY
 
     def __init__(self):
         super(BuildDaemonUploadPolicy, self).__init__()
@@ -333,11 +324,9 @@
         self.unsigned_dsc_ok = True
 
     def setOptions(self, options):
-        AbstractUploadPolicy.setOptions(self, options)
-        # We require a buildid to be provided
-        if (getattr(options, 'buildid', None) is None and
-            not getattr(options, 'builds', False)):
-            raise UploadPolicyError("BuildID required for buildd context")
+        """Store the options for later."""
+        super(BuildDaemonUploadPolicy, self).setOptions(options)
+        options.builds = True
 
     def policySpecificChecks(self, upload):
         """The buildd policy should enforce that the buildid matches."""
@@ -349,6 +338,15 @@
         """Buildd policy allows PPA upload."""
         return False
 
+    def validateUploadType(self, upload):
+        if upload.sourceful and upload.binaryful:
+            if self.accepted_type != ArchiveUploadType.MIXED_ONLY:
+                upload.reject(
+                    "Source/binary (i.e. mixed) uploads are not allowed.")
+        elif not upload.sourceful and not upload.binaryful:
+            raise AssertionError(
+                "Upload is not sourceful, binaryful or mixed.")
+
 
 class SyncUploadPolicy(AbstractUploadPolicy):
     """This policy is invoked when processing sync uploads."""

=== modified file 'lib/lp/archiveuploader/uploadprocessor.py'
--- lib/lp/archiveuploader/uploadprocessor.py	2010-09-07 09:23:11 +0000
+++ lib/lp/archiveuploader/uploadprocessor.py	2010-09-20 17:09:57 +0000
@@ -71,7 +71,6 @@
     )
 from lp.archiveuploader.uploadpolicy import (
     BuildDaemonUploadPolicy,
-    SOURCE_PACKAGE_RECIPE_UPLOAD_POLICY_NAME,
     UploadPolicyError,
     )
 from lp.buildmaster.enums import (
@@ -207,6 +206,7 @@
         The name of the leaf is the build id of the build.
         Build uploads always contain a single package per leaf.
         """
+        upload_path = os.path.join(fsroot, upload)
         try:
             job_id = parse_build_upload_leaf_name(upload)
         except ValueError:
@@ -220,20 +220,20 @@
                 "Unable to find package build job with id %d. Skipping." %
                 job_id)
             return
+        logger = BufferLogger()
         build = buildfarm_job.getSpecificJob()
         if build.status != BuildStatus.UPLOADING:
             self.log.warn(
-                "Expected build status to be 'UPLOADING', was %s. Skipping.",
-                build.status.name)
+                "Expected build status to be 'UPLOADING', was %s. "
+                "Moving to failed.", build.status.name)
+            self.moveProcessedUpload(upload_path, "failed", logger)
             return
         self.log.debug("Build %s found" % build.id)
-        logger = BufferLogger()
-        upload_path = os.path.join(fsroot, upload)
         try:
             [changes_file] = self.locateChangesFiles(upload_path)
             logger.debug("Considering changefile %s" % changes_file)
             result = self.processChangesFile(
-                upload_path, changes_file, logger)
+                upload_path, changes_file, logger, build)
         except (KeyboardInterrupt, SystemExit):
             raise
         except:
@@ -251,16 +251,13 @@
             UploadStatusEnum.REJECTED: "rejected",
             UploadStatusEnum.ACCEPTED: "accepted"}[result]
         self.moveProcessedUpload(upload_path, destination, logger)
+        build.date_finished = datetime.datetime.now(pytz.UTC)
         if not (result == UploadStatusEnum.ACCEPTED and
                 build.verifySuccessfulUpload() and
                 build.status == BuildStatus.FULLYBUILT):
             build.status = BuildStatus.FAILEDTOUPLOAD
-            build.date_finished = datetime.datetime.now(pytz.UTC)
             build.notify(extra_info="Uploading build %s failed." % upload)
-        build.storeUploadLog(logger.buffer.getvalue())
-
-        # Remove BuildQueue record.
-        build.buildqueue_record.destroySelf()
+            build.storeUploadLog(logger.buffer.getvalue())
 
     def processUpload(self, fsroot, upload):
         """Process an upload's changes files, and move it to a new directory.
@@ -376,7 +373,8 @@
                         os.path.join(relative_path, filename))
         return self.orderFilenames(changes_files)
 
-    def processChangesFile(self, upload_path, changes_file, logger=None):
+    def processChangesFile(self, upload_path, changes_file, logger=None,
+                           build=None):
         """Process a single changes file.
 
         This is done by obtaining the appropriate upload policy (according
@@ -432,7 +430,7 @@
                          "https://help.launchpad.net/Packaging/PPA#Uploading "
                          "and update your configuration.")))
         logger.debug("Finding fresh policy")
-        policy = self._getPolicyForDistro(distribution)
+        policy = self._getPolicyForDistro(distribution, build)
         policy.archive = archive
 
         # DistroSeries overriding respect the following precedence:
@@ -450,10 +448,8 @@
 
         # Reject source upload to buildd upload paths.
         first_path = relative_path.split(os.path.sep)[0]
-        is_not_buildd_nor_recipe_policy = policy.name not in [
-            SOURCE_PACKAGE_RECIPE_UPLOAD_POLICY_NAME,
-            BuildDaemonUploadPolicy.name]
-        if first_path.isdigit() and is_not_buildd_nor_recipe_policy:
+        if (first_path.isdigit() and
+            policy.name != BuildDaemonUploadPolicy.name):
             error_message = (
                 "Invalid upload path (%s) for this policy (%s)" %
                 (relative_path, policy.name))
@@ -472,7 +468,7 @@
             result = UploadStatusEnum.ACCEPTED
 
             try:
-                upload.process()
+                upload.process(build)
             except UploadPolicyError, e:
                 upload.reject("UploadPolicyError escaped upload.process: "
                               "%s " % e)
@@ -513,7 +509,8 @@
                 upload.do_reject(notify)
                 self.ztm.abort()
             else:
-                successful = upload.do_accept(notify=notify)
+                successful = upload.do_accept(
+                    notify=notify, build=build)
                 if not successful:
                     result = UploadStatusEnum.REJECTED
                     logger.info(

=== modified file 'lib/lp/bugs/templates/bugtask-index.pt'
--- lib/lp/bugs/templates/bugtask-index.pt	2010-08-20 13:33:51 +0000
+++ lib/lp/bugs/templates/bugtask-index.pt	2010-09-20 17:09:57 +0000
@@ -20,8 +20,6 @@
         });
       </script>
       <style type="text/css">
-        /* A page-specific fix for inline text are editing to line up box. */
-        #edit-description .yui-ieditor-input { top: 0; }
         /* Align the 'add comment' link to the right of the comment box. */
         #add-comment-form textarea { width: 100%; }
         #add-comment-form { max-width: 60em; padding-bottom: 4em; }

=== modified file 'lib/lp/buildmaster/interfaces/packagebuild.py'
--- lib/lp/buildmaster/interfaces/packagebuild.py	2010-09-18 02:57:24 +0000
+++ lib/lp/buildmaster/interfaces/packagebuild.py	2010-09-20 17:09:57 +0000
@@ -65,10 +65,6 @@
         title=_('Build farm job'), schema=IBuildFarmJob, required=True,
         readonly=True, description=_('The base build farm job.'))
 
-    policy_name = TextLine(
-        title=_("Policy name"), required=True,
-        description=_("The upload policy to use for handling these builds."))
-
     current_component = Attribute(
         'Component where the source related to this build was last '
         'published.')
@@ -143,6 +139,14 @@
             created in a suspended state.
         """
 
+    def getUploader(changes):
+        """Return the person responsible for the upload.
+
+        This is used to when checking permissions.
+
+        :param changes: Changes file from the upload.
+        """
+
 
 class IPackageBuildSource(Interface):
     """A utility of this interface used to create _things_."""

=== modified file 'lib/lp/buildmaster/model/packagebuild.py'
--- lib/lp/buildmaster/model/packagebuild.py	2010-09-16 00:33:37 +0000
+++ lib/lp/buildmaster/model/packagebuild.py	2010-09-20 17:09:57 +0000
@@ -94,8 +94,6 @@
     build_farm_job_id = Int(name='build_farm_job', allow_none=False)
     build_farm_job = Reference(build_farm_job_id, 'BuildFarmJob.id')
 
-    policy_name = 'buildd'
-
     # The following two properties are part of the IPackageBuild
     # interface, but need to be provided by derived classes.
     distribution = None
@@ -239,6 +237,10 @@
         """See `IPackageBuild`."""
         raise NotImplementedError
 
+    def getUploader(self, changes):
+        """See `IPackageBuild`."""
+        raise NotImplementedError
+
 
 class PackageBuildDerived:
     """Setup the delegation for package build.
@@ -352,6 +354,10 @@
         if not os.path.exists(target_dir):
             os.mkdir(target_dir)
 
+        # Flush so there are no race conditions with archiveuploader about
+        # self.status.
+        Store.of(self).flush()
+
         # Move the directory used to grab the binaries into
         # the incoming directory so the upload processor never
         # sees half-finished uploads.
@@ -360,6 +366,9 @@
         # Release the builder for another job.
         self.buildqueue_record.builder.cleanSlave()
 
+        # Remove BuildQueue record.
+        self.buildqueue_record.destroySelf()
+
     def _handleStatus_PACKAGEFAIL(self, librarian, slave_status, logger):
         """Handle a package that had failed to build.
 

=== modified file 'lib/lp/buildmaster/tests/test_packagebuild.py'
--- lib/lp/buildmaster/tests/test_packagebuild.py	2010-09-16 00:33:37 +0000
+++ lib/lp/buildmaster/tests/test_packagebuild.py	2010-09-20 17:09:57 +0000
@@ -105,7 +105,6 @@
 
     def test_default_values(self):
         # PackageBuild has a number of default values.
-        self.failUnlessEqual('buildd', self.package_build.policy_name)
         self.failUnlessEqual(
             'multiverse', self.package_build.current_component.name)
         self.failUnlessEqual(None, self.package_build.distribution)
@@ -327,6 +326,7 @@
             })
         self.assertEqual(BuildStatus.FAILEDTOUPLOAD, self.build.status)
         self.assertResultCount(0, "failed")
+        self.assertIs(None, self.build.buildqueue_record)
 
     def test_handleStatus_OK_relative_filepath(self):
         # A filemap that tries to write to files outside of

=== modified file 'lib/lp/code/configure.zcml'
--- lib/lp/code/configure.zcml	2010-09-13 04:56:29 +0000
+++ lib/lp/code/configure.zcml	2010-09-20 17:09:57 +0000
@@ -923,7 +923,7 @@
     <require permission="launchpad.View" interface="lp.code.interfaces.sourcepackagerecipebuild.ISourcePackageRecipeBuild"/>
     <!-- This is needed for UploadProcessor to run. The permission isn't
          important; launchpad.Edit isn't actually held by anybody. -->
-    <require permission="launchpad.Edit" set_attributes="status upload_log" />
+    <require permission="launchpad.Edit" set_attributes="status upload_log date_finished requester" />
   </class>
 
   <securedutility
@@ -988,10 +988,6 @@
         name="RECIPEBRANCHBUILD"
         provides="lp.buildmaster.interfaces.buildfarmjob.IBuildFarmJob"/>
 
-  <call
-    callable="lp.code.model.sourcepackagerecipebuild.register_archive_upload_policy_adapter"
-    />
-
   <webservice:register module="lp.code.interfaces.webservice" />
     <adapter
         provides="lp.buildmaster.interfaces.buildfarmjob.ISpecificBuildFarmJob"

=== modified file 'lib/lp/code/model/sourcepackagerecipebuild.py'
--- lib/lp/code/model/sourcepackagerecipebuild.py	2010-09-09 17:02:33 +0000
+++ lib/lp/code/model/sourcepackagerecipebuild.py	2010-09-20 17:09:57 +0000
@@ -22,7 +22,6 @@
     )
 from storm.store import Store
 from zope.component import (
-    getGlobalSiteManager,
     getUtility,
     )
 from zope.interface import (
@@ -39,12 +38,6 @@
     )
 from canonical.launchpad.webapp import errorlog
 from lp.app.errors import NotFoundError
-from lp.archiveuploader.uploadpolicy import (
-    ArchiveUploadType,
-    BuildDaemonUploadPolicy,
-    IArchiveUploadPolicy,
-    SOURCE_PACKAGE_RECIPE_UPLOAD_POLICY_NAME,
-    )
 from lp.buildmaster.enums import (
     BuildFarmJobType,
     BuildStatus,
@@ -77,25 +70,10 @@
 from lp.soyuz.model.sourcepackagerelease import SourcePackageRelease
 
 
-class SourcePackageRecipeUploadPolicy(BuildDaemonUploadPolicy):
-    """Policy for uploading the results of a source package recipe build."""
-
-    name = SOURCE_PACKAGE_RECIPE_UPLOAD_POLICY_NAME
-    accepted_type = ArchiveUploadType.SOURCE_ONLY
-
-    def getUploader(self, changes):
-        """Return the person doing the upload."""
-        build_id = int(getattr(self.options, 'buildid'))
-        sprb = getUtility(ISourcePackageRecipeBuildSource).getById(build_id)
-        return sprb.requester
-
-
 class SourcePackageRecipeBuild(PackageBuildDerived, Storm):
 
     __storm_table__ = 'SourcePackageRecipeBuild'
 
-    policy_name = SourcePackageRecipeUploadPolicy.name
-
     implements(ISourcePackageRecipeBuild)
     classProvides(ISourcePackageRecipeBuildSource)
 
@@ -333,6 +311,10 @@
         if self.status == BuildStatus.FULLYBUILT:
             self.notify()
 
+    def getUploader(self, changes):
+        """See `IPackageBuild`."""
+        return self.requester
+
 
 class SourcePackageRecipeBuildJob(BuildFarmJobOldDerived, Storm):
     classProvides(ISourcePackageRecipeBuildJobSource)
@@ -384,13 +366,6 @@
         return 2505 + self.build.archive.relative_build_score
 
 
-def register_archive_upload_policy_adapter():
-    getGlobalSiteManager().registerUtility(
-        component=SourcePackageRecipeUploadPolicy,
-        provided=IArchiveUploadPolicy,
-        name=SourcePackageRecipeUploadPolicy.name)
-
-
 def get_recipe_build_for_build_farm_job(build_farm_job):
     """Return the SourcePackageRecipeBuild associated with a BuildFarmJob."""
     store = Store.of(build_farm_job)

=== modified file 'lib/lp/code/model/tests/test_sourcepackagerecipebuild.py'
--- lib/lp/code/model/tests/test_sourcepackagerecipebuild.py	2010-09-16 00:33:37 +0000
+++ lib/lp/code/model/tests/test_sourcepackagerecipebuild.py	2010-09-20 17:09:57 +0000
@@ -309,6 +309,12 @@
         job = sprb.build_farm_job.getSpecificJob()
         self.assertEqual(sprb, job)
 
+    def test_getUploader(self):
+        # For ACL purposes the uploader is the build requester.
+        build = self.makeSourcePackageRecipeBuild()
+        self.assertEquals(build.requester,
+            build.getUploader(None))
+
 
 class TestAsBuildmaster(TestCaseWithFactory):
 

=== modified file 'lib/lp/code/templates/branchmergeproposal-index.pt'
--- lib/lp/code/templates/branchmergeproposal-index.pt	2010-09-13 00:29:50 +0000
+++ lib/lp/code/templates/branchmergeproposal-index.pt	2010-09-20 17:09:57 +0000
@@ -18,9 +18,6 @@
     #code-review-votes {
       margin: 1em 0;
     }
-    #description, #edit-description {
-      margin: 1em 0;
-    }
     #add-comment-form {
       max-width: 60em;
       padding-bottom: 3em;
@@ -50,9 +47,6 @@
       padding-left: 20px;
       padding-bottom: 10px;
     }
-    /* A page-specific fix for inline text are editing to line up box. */
-    #edit-description .yui-ieditor-input { top: 0; }
-    #edit-commit_message .yui-ieditor-input { top: 0; }
   </style>
 </metal:block>
 

=== modified file 'lib/lp/soyuz/doc/build-failedtoupload-workflow.txt'
--- lib/lp/soyuz/doc/build-failedtoupload-workflow.txt	2010-08-04 00:16:44 +0000
+++ lib/lp/soyuz/doc/build-failedtoupload-workflow.txt	2010-09-20 17:09:57 +0000
@@ -162,8 +162,7 @@
   >>> buildd_policy = getPolicy(
   ...     name='buildd',
   ...     distro=failedtoupload_candidate.distribution.name,
-  ...     distroseries=failedtoupload_candidate.distro_series.name,
-  ...     buildid=failedtoupload_candidate.id)
+  ...     distroseries=failedtoupload_candidate.distro_series.name)
 
   >>> cdrkit_bin_upload = NascentUpload.from_changesfile_path(
   ...     datadir('suite/cdrkit_1.0/cdrkit_1.0_i386.changes'),
@@ -171,7 +170,7 @@
   >>> cdrkit_bin_upload.process()
   >>> cdrkit_bin_upload.is_rejected
   False
-  >>> success = cdrkit_bin_upload.do_accept()
+  >>> success = cdrkit_bin_upload.do_accept(build=failedtoupload_candidate)
   >>> print cdrkit_bin_upload.queue_root.status.name
   NEW
 

=== modified file 'lib/lp/soyuz/doc/buildd-slavescanner.txt'
--- lib/lp/soyuz/doc/buildd-slavescanner.txt	2010-09-06 20:17:11 +0000
+++ lib/lp/soyuz/doc/buildd-slavescanner.txt	2010-09-20 17:09:57 +0000
@@ -339,8 +339,6 @@
     >>> build.status.title
     'Uploading build'
 
-    >>> bqItem10.destroySelf()
-
 === Successfully collected and uploaded  (FULLYBUILT) ===
 
 Build item 6 has binary packages available in the sample data, letting us test
@@ -1062,7 +1060,6 @@
     True
     >>> print lfa.filename
     buildlog_ubuntu-hoary-i386.mozilla-firefox_0.9_BUILDING.txt.gz
-    >>> candidate.destroySelf()
 
 The attempt to fetch the buildlog from the common librarian will fail
 since this is a build in a private archive and the buildlog was thus

=== modified file 'lib/lp/soyuz/doc/distroseriesqueue-translations.txt'
--- lib/lp/soyuz/doc/distroseriesqueue-translations.txt	2010-08-24 15:29:01 +0000
+++ lib/lp/soyuz/doc/distroseriesqueue-translations.txt	2010-09-20 17:09:57 +0000
@@ -74,15 +74,14 @@
   ...      dapper_amd64, PackagePublishingPocket.RELEASE, dapper.main_archive)
 
   >>> buildd_policy = getPolicy(
-  ...     name='buildd', distro='ubuntu', distroseries='dapper',
-  ...     buildid=build.id)
+  ...     name='buildd', distro='ubuntu', distroseries='dapper')
 
   >>> pmount_upload = NascentUpload.from_changesfile_path(
   ...     datadir('pmount_0.9.7-2ubuntu2_amd64.changes'),
   ...     buildd_policy, mock_logger)
   DEBUG: Changes file can be unsigned.
 
-  >>> pmount_upload.process()
+  >>> pmount_upload.process(build=build)
   DEBUG: Beginning processing.
   DEBUG: Verifying the changes file.
   DEBUG: Verifying files in upload.
@@ -105,9 +104,8 @@
   >>> print len(dapper_pmount.getLatestTranslationsUploads())
   0
 
-  >>> success = pmount_upload.do_accept()
+  >>> success = pmount_upload.do_accept(build=build)
   DEBUG: Creating queue entry
-  DEBUG: Build ... found
   ...
 
   # And all things worked.

=== modified file 'lib/lp/soyuz/doc/soyuz-set-of-uploads.txt'
--- lib/lp/soyuz/doc/soyuz-set-of-uploads.txt	2010-08-30 02:07:38 +0000
+++ lib/lp/soyuz/doc/soyuz-set-of-uploads.txt	2010-09-20 17:09:57 +0000
@@ -119,21 +119,17 @@
   >>> from lp.soyuz.scripts.soyuz_process_upload import (
   ...     ProcessUpload)
   >>> from canonical.testing import LaunchpadZopelessLayer
-  >>> def process_uploads(upload_policy, build_id, series, loglevel):
+  >>> def process_uploads(upload_policy, series, loglevel):
   ...     """Simulate process-upload.py script run.
   ...
   ...     :param upload_policy: context in which to consider the upload
   ...         (equivalent to script's --context option).
-  ...     :param build_id: build to which to attach this upload.
-  ...         (equivalent to script's --buildid option).
   ...     :param series: distro series to give back from.
   ...         (equivalent to script's --series option).
   ...     :param loglevel: logging level (as defined in logging module).  Any
   ...         log messages below this level will be suppressed.
   ...     """
   ...     args = [temp_dir, "-C", upload_policy]
-  ...     if build_id is not None:
-  ...         args.extend(["-b", build_id])
   ...     if series is not None:
   ...         args.extend(["-s", series])
   ...     # Run script under 'uploader' DB user.  The dbuser argument to the
@@ -230,11 +226,11 @@
   >>> from lp.services.mail import stub
 
   >>> def simulate_upload(
-  ...     leafname, is_new=False, upload_policy='anything', build_id=None,
+  ...     leafname, is_new=False, upload_policy='anything',
   ...     series=None, distro="ubuntutest", loglevel=logging.WARN):
   ...     """Process upload(s).  Options are as for process_uploads()."""
   ...     punt_upload_into_queue(leafname, distro=distro)
-  ...     process_uploads(upload_policy, build_id, series, loglevel)
+  ...     process_uploads(upload_policy, series, loglevel)
   ...     # We seem to be leaving a lock file behind here for some reason.
   ...     # Naturally it doesn't count as an unprocessed incoming file, which
   ...     # is what we're really looking for.
@@ -289,19 +285,6 @@
 
   >>> simulate_upload('bar_1.0-2')
 
-Check the rejection of bar_1.0-2_binary when uploaded to the wrong build id.
-
-  >>> simulate_upload(
-  ...     'bar_1.0-2_binary', upload_policy="buildd", build_id="2",
-  ...     loglevel=logging.ERROR)
-  log> Exception while accepting:
-  Attempt to upload binaries specifying build 2, where they don't fit.
-  ...
-  Rejected uploads: ['bar_1.0-2_binary']
-
-Try it again without the bogus build id.  This succeeds without
-complaints.
-
   >>> simulate_upload('bar_1.0-2_binary')
 
 Check the rejection of a malicious version of bar package which refers

=== modified file 'lib/lp/soyuz/model/binarypackagebuild.py'
--- lib/lp/soyuz/model/binarypackagebuild.py	2010-09-16 00:33:37 +0000
+++ lib/lp/soyuz/model/binarypackagebuild.py	2010-09-20 17:09:57 +0000
@@ -760,6 +760,10 @@
         # package build, then don't hit the db.
         return self
 
+    def getUploader(self, changes):
+        """See `IBinaryPackageBuild`."""
+        return changes.signer
+
 
 class BinaryPackageBuildSet:
     implements(IBinaryPackageBuildSet)

=== modified file 'lib/lp/soyuz/scripts/soyuz_process_upload.py'
--- lib/lp/soyuz/scripts/soyuz_process_upload.py	2010-08-23 14:25:10 +0000
+++ lib/lp/soyuz/scripts/soyuz_process_upload.py	2010-09-20 17:09:57 +0000
@@ -61,11 +61,6 @@
             help="Distro series to give back from.")
 
         self.parser.add_option(
-            "-b", "--buildid", action="store", type="int", dest="buildid",
-            metavar="BUILD",
-            help="The build ID to which to attach this upload.")
-
-        self.parser.add_option(
             "-a", "--announce", action="store", dest="announcelist",
             metavar="ANNOUNCELIST", help="Override the announcement list")
 
@@ -82,10 +77,15 @@
                 "%s is not a directory" % self.options.base_fsroot)
 
         self.logger.debug("Initialising connection.")
-        def getPolicy(distro):
+        def getPolicy(distro, build):
             self.options.distro = distro.name
             policy = findPolicyByName(self.options.context)
             policy.setOptions(self.options)
+            if self.options.builds:
+                assert build, "--builds specified but no build"
+                policy.distroseries = build.distro_series
+                policy.pocket = build.pocket
+                policy.archive = build.archive
             return policy
         processor = UploadProcessor(self.options.base_fsroot,
             self.options.dryrun, self.options.nomails, self.options.builds,

=== modified file 'lib/lp/soyuz/tests/test_binarypackagebuild.py'
--- lib/lp/soyuz/tests/test_binarypackagebuild.py	2010-09-16 00:33:37 +0000
+++ lib/lp/soyuz/tests/test_binarypackagebuild.py	2010-09-20 17:09:57 +0000
@@ -150,6 +150,15 @@
         self.assertStatementCount(
             0, self.build.getSpecificJob)
 
+    def test_getUploader(self):
+        # For ACL purposes the uploader is the changes file signer.
+
+        class MockChanges:
+            signer = "Somebody <somebody@xxxxxxxxxx>"
+
+        self.assertEquals("Somebody <somebody@xxxxxxxxxx>",
+            self.build.getUploader(MockChanges()))
+
 
 class TestBuildUpdateDependencies(TestCaseWithFactory):
 

=== modified file 'lib/lp/translations/stories/translations/30-rosetta-pofile-translation-gettext-error.txt'
--- lib/lp/translations/stories/translations/30-rosetta-pofile-translation-gettext-error.txt	2007-09-14 18:12:05 +0000
+++ lib/lp/translations/stories/translations/30-rosetta-pofile-translation-gettext-error.txt	2010-09-20 17:09:57 +0000
@@ -42,8 +42,7 @@
   ...     u'Found %s invalid files')
   >>> user_browser.getControl(name='submit_translations').click()
 
-We got an error, that means the url should be the same.
-We are still in the same URL, because the system detected an error.
+Because of the error, we're still in on the same page.
 
   >>> print user_browser.url
   http://translations.launchpad.dev/ubuntu/hoary/+source/evolution/+pots/evolution-2.2/es/+translate?start=20
@@ -61,8 +60,8 @@
     <td>
     </td><td>
       <div>
-        format specifications in 'msgid' and 'msgstr[0]' for argument 1 are
-        not the sameformat specifications in 'msgid' and 'msgstr[1]' for
+        format specifications in ... and 'msgstr[0]' for argument 1 are
+        not the sameformat specifications in ... and 'msgstr[1]' for
         argument 1 are not the same
       </div>
     </td>