launchpad-reviewers team mailing list archive
-
launchpad-reviewers team
-
Mailing list archive
-
Message #02420
[Merge] lp:~abentley/launchpad/build-mail2 into lp:launchpad
Aaron Bentley has proposed merging lp:~abentley/launchpad/build-mail2 into lp:launchpad.
Requested reviews:
Launchpad code reviewers (launchpad-reviewers)
For more details, see:
https://code.launchpad.net/~abentley/launchpad/build-mail2/+merge/47563
= Summary =
Refactor upload processor to provide different behaviour for builds and user
uploads via polymorphism.
== Proposed fix ==
Extract UploadHandler, UserUploadHandler and BuildUploadHandler from
UploadProcessor
== Pre-implementation notes ==
thumper encouraged refactoring soyuz if I felt it was the right way to address
bug #681125, but did not look at the specifics.
== Implementation details ==
UploadProcessor provides the common functionality for User and Build uploads.
UserUploadHandler and BuildUploadHandler use polymorphism to provide different
behaviour for those cases where behaviour differs. This is mainly private
methods, but "process" is derived from either processBuildUpload or
processUpload.
UploadProcessor retains responsibility for processing the queue of uploads, and
hands off processing of individual uploads to the appropriate UploadHandler.
== Tests ==
bin/test -v
== Demo and Q/A ==
None; this is a refactoring.
= Launchpad lint =
Checking for conflicts and issues in changed files.
Linting changed files:
lib/lp/archiveuploader/uploadprocessor.py
lib/lp/archiveuploader/tests/test_ppauploadprocessor.py
lib/lp/archiveuploader/tests/test_recipeuploads.py
lib/lp/archiveuploader/tests/test_buildduploads.py
lib/lp/archiveuploader/tests/test_uploadprocessor.py
./lib/lp/archiveuploader/tests/test_ppauploadprocessor.py
873: E301 expected 1 blank line, found 2
./lib/lp/archiveuploader/tests/test_uploadprocessor.py
1263: E301 expected 1 blank line, found 2
--
https://code.launchpad.net/~abentley/launchpad/build-mail2/+merge/47563
Your team Launchpad code reviewers is requested to review the proposed merge of lp:~abentley/launchpad/build-mail2 into lp:launchpad.
=== modified file 'lib/lp/archiveuploader/tests/test_buildduploads.py'
--- lib/lp/archiveuploader/tests/test_buildduploads.py 2010-12-21 20:11:40 +0000
+++ lib/lp/archiveuploader/tests/test_buildduploads.py 2011-01-26 17:46:50 +0000
@@ -18,6 +18,9 @@
from lp.archiveuploader.tests.test_uploadprocessor import (
TestUploadProcessorBase,
)
+from lp.archiveuploader.uploadprocessor import (
+ UploadHandler,
+ )
from lp.registry.interfaces.distribution import IDistributionSet
from lp.registry.interfaces.pocket import PackagePublishingPocket
from lp.soyuz.interfaces.publishing import IPublishingSet
@@ -76,6 +79,8 @@
self.options.nomails = self.no_mails
# Set up the uploadprocessor with appropriate options and logger
self.uploadprocessor = self.getUploadProcessor(self.layer.txn)
+ self.build_uploadprocessor = self.getUploadProcessor(
+ self.layer.txn, builds=True)
self.builds_before_upload = BinaryPackageBuild.select().count()
self.source_queue = None
self._uploadSource()
@@ -96,9 +101,10 @@
def _uploadSource(self):
"""Upload and Accept (if necessary) the base source."""
self._prepareUpload(self.source_dir)
- self.uploadprocessor.processChangesFile(
- os.path.join(self.queue_folder, "incoming", self.source_dir),
- self.source_changesfile)
+ fsroot = os.path.join(self.queue_folder, "incoming")
+ handler = UploadHandler.forProcessor(
+ self.uploadprocessor, fsroot, self.source_dir)
+ handler.processChangesFile(self.source_changesfile)
queue_item = self.uploadprocessor.last_processed_upload.queue_root
self.assertTrue(
queue_item is not None,
@@ -119,10 +125,12 @@
Return the IBuild attached to upload.
"""
self._prepareUpload(self.binary_dir)
- self.uploadprocessor.processChangesFile(
- os.path.join(self.queue_folder, "incoming", self.binary_dir),
- self.getBinaryChangesfileFor(archtag), build=build)
- queue_item = self.uploadprocessor.last_processed_upload.queue_root
+ fsroot = os.path.join(self.queue_folder, "incoming")
+ handler = UploadHandler.forProcessor(
+ self.build_uploadprocessor, fsroot, self.binary_dir, build=build)
+ handler.processChangesFile(self.getBinaryChangesfileFor(archtag))
+ last_processed = self.build_uploadprocessor.last_processed_upload
+ queue_item = last_processed.queue_root
self.assertTrue(
queue_item is not None,
"Binary Upload Failed\nGot: %s" % self.log.getLogBuffer())
@@ -240,7 +248,8 @@
self.assertEqual('FULLYBUILT', build_used.status.name)
# Force immediate publication.
- queue_item = self.uploadprocessor.last_processed_upload.queue_root
+ last_processed = self.build_uploadprocessor.last_processed_upload
+ queue_item = last_processed.queue_root
self._publishBuildQueueItem(queue_item)
# Upload powerpc binary
=== modified file 'lib/lp/archiveuploader/tests/test_ppauploadprocessor.py'
--- lib/lp/archiveuploader/tests/test_ppauploadprocessor.py 2010-12-21 20:29:43 +0000
+++ lib/lp/archiveuploader/tests/test_ppauploadprocessor.py 2011-01-26 17:46:50 +0000
@@ -56,6 +56,8 @@
distroseries and an new uploadprocessor instance.
"""
super(TestPPAUploadProcessorBase, self).setUp()
+ self.build_uploadprocessor = self.getUploadProcessor(
+ self.layer.txn, builds=True)
self.ubuntu = getUtility(IDistributionSet).getByName('ubuntu')
# Let's make 'name16' person member of 'launchpad-beta-tester'
# team only in the context of this test.
@@ -353,7 +355,7 @@
upload_dir = self.queueUpload(
"bar_1.0-1_binary_universe", "~name16/ubuntu")
self.processUpload(
- self.uploadprocessor, upload_dir, build=build)
+ self.build_uploadprocessor, upload_dir, build=build)
# No mails are sent for successful binary uploads.
self.assertEqual(len(stub.test_emails), 0,
@@ -402,7 +404,7 @@
self.options.context = 'buildd'
upload_dir = self.queueUpload("bar_1.0-1_binary", "~name16/ubuntu")
self.processUpload(
- self.uploadprocessor, upload_dir, build=build)
+ self.build_uploadprocessor, upload_dir, build=build)
# The binary upload was accepted and it's waiting in the queue.
queue_items = self.breezy.getQueueItems(
@@ -456,7 +458,7 @@
self.options.context = 'buildd'
upload_dir = self.queueUpload("bar_1.0-1_binary", "~cprov/ubuntu")
self.processUpload(
- self.uploadprocessor, upload_dir, build=build_bar_i386)
+ self.build_uploadprocessor, upload_dir, build=build_bar_i386)
# The binary upload was accepted and it's waiting in the queue.
queue_items = self.breezy.getQueueItems(
@@ -724,7 +726,7 @@
self.options.context = 'buildd'
upload_dir = self.queueUpload("bar_1.0-1_binary", "~name16/ubuntu")
self.processUpload(
- self.uploadprocessor, upload_dir, build=build)
+ self.build_uploadprocessor, upload_dir, build=build)
# The binary upload was accepted and it's waiting in the queue.
queue_items = self.breezy.getQueueItems(
@@ -768,7 +770,8 @@
self.options.context = 'buildd'
upload_dir = self.queueUpload(
"bar_1.0-1_contrib_binary", "~name16/ubuntu")
- self.processUpload(self.uploadprocessor, upload_dir, build=build)
+ self.processUpload(
+ self.build_uploadprocessor, upload_dir, build=build)
queue_items = self.breezy.getQueueItems(
status=PackageUploadStatus.ACCEPTED, name="bar",
version="1.0-1", exact_match=True, archive=self.name16.archive)
@@ -1298,7 +1301,7 @@
upload_dir = self.queueUpload("bar_1.0-1_binary", "~name16/ubuntu")
self.processUpload(
- self.uploadprocessor, upload_dir, build=build)
+ self.build_uploadprocessor, upload_dir, build=build)
# The binary upload was accepted, and it's waiting in the queue.
queue_items = self.breezy.getQueueItems(
=== modified file 'lib/lp/archiveuploader/tests/test_recipeuploads.py'
--- lib/lp/archiveuploader/tests/test_recipeuploads.py 2010-12-21 20:29:43 +0000
+++ lib/lp/archiveuploader/tests/test_recipeuploads.py 2011-01-26 17:46:50 +0000
@@ -12,6 +12,7 @@
from lp.archiveuploader.uploadprocessor import (
UploadStatusEnum,
+ UploadHandler,
)
from lp.archiveuploader.tests.test_uploadprocessor import (
TestUploadProcessorBase,
@@ -45,7 +46,7 @@
self.options.context = 'buildd'
self.uploadprocessor = self.getUploadProcessor(
- self.layer.txn)
+ self.layer.txn, builds=True)
def testSetsBuildAndState(self):
# Ensure that the upload processor correctly links the SPR to
@@ -55,10 +56,11 @@
self.assertIs(None, self.build.source_package_release)
self.assertEqual(False, self.build.verifySuccessfulUpload())
self.queueUpload('bar_1.0-1', '%d/ubuntu' % self.build.archive.id)
- result = self.uploadprocessor.processChangesFile(
- os.path.join(self.queue_folder, "incoming", 'bar_1.0-1'),
- '%d/ubuntu/bar_1.0-1_source.changes' % self.build.archive.id,
- build=self.build)
+ fsroot = os.path.join(self.queue_folder, "incoming")
+ handler = UploadHandler.forProcessor(
+ self.uploadprocessor, fsroot, 'bar_1.0-1', self.build)
+ result = handler.processChangesFile(
+ '%d/ubuntu/bar_1.0-1_source.changes' % self.build.archive.id)
self.layer.txn.commit()
self.assertEquals(UploadStatusEnum.ACCEPTED, result,
=== modified file 'lib/lp/archiveuploader/tests/test_uploadprocessor.py'
--- lib/lp/archiveuploader/tests/test_uploadprocessor.py 2011-01-13 11:41:19 +0000
+++ lib/lp/archiveuploader/tests/test_uploadprocessor.py 2011-01-26 17:46:50 +0000
@@ -36,7 +36,10 @@
IArchiveUploadPolicy,
)
from lp.archiveuploader.uploadprocessor import (
+ BuildUploadHandler,
+ CannotGetBuild,
parse_build_upload_leaf_name,
+ UploadHandler,
UploadProcessor,
)
from lp.buildmaster.enums import (
@@ -143,12 +146,14 @@
shutil.rmtree(self.queue_folder)
super(TestUploadProcessorBase, self).tearDown()
- def getUploadProcessor(self, txn):
+ def getUploadProcessor(self, txn, builds=None):
+ if builds is None:
+ builds = self.options.builds
def getPolicy(distro, build):
self.options.distro = distro.name
policy = findPolicyByName(self.options.context)
- if self.options.builds:
+ if builds:
policy.distroseries = build.distro_series
policy.pocket = build.pocket
policy.archive = build.archive
@@ -157,8 +162,8 @@
return UploadProcessor(
self.options.base_fsroot, self.options.dryrun,
- self.options.nomails, self.options.builds,
- self.options.keep, getPolicy, txn, self.log)
+ self.options.nomails, builds, self.options.keep, getPolicy, txn,
+ self.log)
def publishPackage(self, packagename, version, source=True,
archive=None):
@@ -275,10 +280,12 @@
so that we can debug failing tests effectively.
"""
results = []
- changes_files = processor.locateChangesFiles(upload_dir)
+ self.assertEqual(processor.builds, build is not None)
+ handler = UploadHandler.forProcessor(
+ processor, '.', upload_dir, build)
+ changes_files = handler.locateChangesFiles()
for changes_file in changes_files:
- result = processor.processChangesFile(
- upload_dir, changes_file, build=build)
+ result = handler.processChangesFile(changes_file)
results.append(result)
return results
@@ -402,25 +409,6 @@
finally:
shutil.rmtree(testdir)
- def testLocateChangesFiles(self):
- """locateChangesFiles should return the .changes files in a folder.
-
- 'source' changesfiles come first. Files that are not named as
- changesfiles are ignored.
- """
- testdir = tempfile.mkdtemp()
- try:
- open("%s/1.changes" % testdir, "w").close()
- open("%s/2_source.changes" % testdir, "w").close()
- open("%s/3.not_changes" % testdir, "w").close()
-
- up = self.getUploadProcessor(None)
- located_files = up.locateChangesFiles(testdir)
- self.assertEqual(
- located_files, ["2_source.changes", "1.changes"])
- finally:
- shutil.rmtree(testdir)
-
def _makeUpload(self, testdir):
"""Create a dummy upload for testing the move/remove methods."""
upload = tempfile.mkdtemp(dir=testdir)
@@ -443,7 +431,8 @@
# Move it
self.options.base_fsroot = testdir
up = self.getUploadProcessor(None)
- up.moveUpload(upload, target_name, self.log)
+ handler = UploadHandler(up, '.', upload)
+ handler.moveUpload(target_name, self.log)
# Check it moved
self.assertTrue(os.path.exists(os.path.join(target, upload_name)))
@@ -464,7 +453,8 @@
# Remove it
self.options.base_fsroot = testdir
up = self.getUploadProcessor(None)
- up.moveProcessedUpload(upload, "accepted", self.log)
+ handler = UploadHandler(up, '.', upload)
+ handler.moveProcessedUpload("accepted", self.log)
# Check it was removed, not moved
self.assertFalse(os.path.exists(os.path.join(
@@ -487,7 +477,8 @@
# Move it
self.options.base_fsroot = testdir
up = self.getUploadProcessor(None)
- up.moveProcessedUpload(upload, "rejected", self.log)
+ handler = UploadHandler(up, '.', upload)
+ handler.moveProcessedUpload("rejected", self.log)
# Check it moved
self.assertTrue(os.path.exists(os.path.join(testdir,
@@ -510,7 +501,8 @@
# Remove it
self.options.base_fsroot = testdir
up = self.getUploadProcessor(None)
- up.removeUpload(upload, self.log)
+ handler = UploadHandler(up, '.', upload)
+ handler.removeUpload(self.log)
# Check it was removed, not moved
self.assertFalse(os.path.exists(os.path.join(
@@ -520,13 +512,6 @@
finally:
shutil.rmtree(testdir)
- def testOrderFilenames(self):
- """orderFilenames sorts _source.changes ahead of other files."""
- up = self.getUploadProcessor(None)
-
- self.assertEqual(["d_source.changes", "a", "b", "c"],
- up.orderFilenames(["b", "a", "d_source.changes", "c"]))
-
def testRejectionEmailForUnhandledException(self):
"""Test there's a rejection email when nascentupload breaks.
@@ -675,7 +660,9 @@
self.options.context = 'buildd'
self.layer.txn.commit()
upload_dir = self.queueUpload("bar_1.0-1_binary")
- self.processUpload(uploadprocessor, upload_dir,
+ build_uploadprocessor = self.getUploadProcessor(
+ self.layer.txn, builds=True)
+ self.processUpload(build_uploadprocessor, upload_dir,
build=bar_original_build)
self.assertEqual(
uploadprocessor.last_processed_upload.is_rejected, False)
@@ -706,12 +693,12 @@
self.options.context = 'buildd'
upload_dir = self.queueUpload(
"bar_1.0-1_binary", "%s/ubuntu" % copy_archive.id)
- self.processUpload(uploadprocessor, upload_dir,
+ self.processUpload(build_uploadprocessor, upload_dir,
build=bar_copied_build)
# Make sure the upload succeeded.
self.assertEqual(
- uploadprocessor.last_processed_upload.is_rejected, False)
+ build_uploadprocessor.last_processed_upload.is_rejected, False)
# The upload should also be auto-accepted even though there's no
# ancestry. This means items should go to ACCEPTED and not NEW.
@@ -777,8 +764,10 @@
self.options.context = 'buildd'
upload_dir = self.queueUpload("bar_1.0-1_binary")
+ build_uploadprocessor = self.getUploadProcessor(
+ self.layer.txn, builds=True)
self.processUpload(
- uploadprocessor, upload_dir, build=bar_original_build)
+ build_uploadprocessor, upload_dir, build=bar_original_build)
[bar_binary_pub] = self.publishPackage("bar", "1.0-1", source=False)
# Prepare ubuntu/breezy-autotest to build sources in i386.
@@ -800,7 +789,7 @@
shutil.rmtree(upload_dir)
self.options.distroseries = breezy_autotest.name
upload_dir = self.queueUpload("bar_1.0-1_binary")
- self.processUpload(uploadprocessor, upload_dir,
+ self.processUpload(build_uploadprocessor, upload_dir,
build=bar_copied_build)
[duplicated_binary_upload] = breezy_autotest.getQueueItems(
status=PackageUploadStatus.NEW, name='bar',
@@ -840,7 +829,9 @@
self.options.context = 'buildd'
upload_dir = self.queueUpload("bar_1.0-2_binary")
- self.processUpload(uploadprocessor, upload_dir,
+ build_uploadprocessor = self.getUploadProcessor(
+ self.layer.txn, builds=True)
+ self.processUpload(build_uploadprocessor, upload_dir,
build=bar_original_build)
[bar_binary_pub] = self.publishPackage("bar", "1.0-2", source=False)
@@ -860,14 +851,14 @@
shutil.rmtree(upload_dir)
upload_dir = self.queueUpload(
"bar_1.0-1_binary", "%s/ubuntu" % copy_archive.id)
- self.processUpload(uploadprocessor, upload_dir,
+ self.processUpload(build_uploadprocessor, upload_dir,
build=bar_copied_build)
# The binary just uploaded is accepted because it's destined for a
# copy archive and the PRIMARY and the COPY archives are isolated
# from each other.
self.assertEqual(
- uploadprocessor.last_processed_upload.is_rejected, False)
+ build_uploadprocessor.last_processed_upload.is_rejected, False)
def testPartnerArchiveMissingForPartnerUploadFails(self):
"""A missing partner archive should produce a rejection email.
@@ -1015,8 +1006,10 @@
self.ubuntu.main_archive)
self.layer.txn.commit()
upload_dir = self.queueUpload("foocomm_1.0-1_binary")
+ build_uploadprocessor = self.getUploadProcessor(
+ self.layer.txn, builds=True)
self.processUpload(
- uploadprocessor, upload_dir, build=foocomm_build)
+ build_uploadprocessor, upload_dir, build=foocomm_build)
contents = [
"Subject: foocomm_1.0-1_i386.changes rejected",
@@ -1183,9 +1176,9 @@
fp = StringIO()
error_report.write(fp)
error_text = fp.getvalue()
- self.assertTrue(
+ self.assertIn(
"Unable to find mandatory field 'Files' "
- "in the changes file" in error_text)
+ "in the changes file", error_text)
# Housekeeping so the next test won't fail.
shutil.rmtree(upload_dir)
@@ -1844,22 +1837,34 @@
super(TestBuildUploadProcessor, self).setUp()
self.uploadprocessor = self.setupBreezyAndGetUploadProcessor()
+
+class TestUploadHandler(TestUploadProcessorBase):
+
+ def setUp(self):
+ super(TestUploadHandler, self).setUp()
+ self.uploadprocessor = self.setupBreezyAndGetUploadProcessor()
+
def testInvalidLeafName(self):
# Directories with invalid leaf names should be skipped,
# and a warning logged.
upload_dir = self.queueUpload("bar_1.0-1", queue_entry="bar")
- self.uploadprocessor.processBuildUpload(upload_dir, "bar")
- self.assertLogContains('Unable to extract build id from leaf '
- 'name bar, skipping.')
+ e = self.assertRaises(
+ CannotGetBuild, BuildUploadHandler, self.uploadprocessor,
+ upload_dir, "bar")
+ self.assertIn(
+ 'Unable to extract build id from leaf name bar, skipping.',
+ str(e))
def testNoBuildEntry(self):
# Directories with that refer to a nonexistent build
# should be skipped and a warning logged.
cookie = "%s-%d" % (BuildFarmJobType.PACKAGEBUILD.name, 42)
upload_dir = self.queueUpload("bar_1.0-1", queue_entry=cookie)
- self.uploadprocessor.processBuildUpload(upload_dir, cookie)
- self.assertLogContains(
- "Unable to find package build job with id 42. Skipping.")
+ e = self.assertRaises(
+ CannotGetBuild,
+ BuildUploadHandler, self.uploadprocessor, upload_dir, cookie)
+ self.assertIn(
+ "Unable to find package build job with id 42. Skipping.", str(e))
def testBinaryPackageBuild_fail(self):
# If the upload directory is empty, the upload
@@ -1893,8 +1898,8 @@
os.mkdir(os.path.join(self.incoming_folder, leaf_name))
self.options.context = 'buildd'
self.options.builds = True
- self.uploadprocessor.processBuildUpload(
- self.incoming_folder, leaf_name)
+ BuildUploadHandler(self.uploadprocessor, self.incoming_folder,
+ leaf_name).process()
self.assertEquals(1, len(self.oopses))
self.assertEquals(
BuildStatus.FAILEDTOUPLOAD, build.status)
@@ -1936,8 +1941,8 @@
self.options.context = 'buildd'
self.options.builds = True
last_stub_mail_count = len(stub.test_emails)
- self.uploadprocessor.processBuildUpload(
- self.incoming_folder, leaf_name)
+ BuildUploadHandler(self.uploadprocessor, self.incoming_folder,
+ leaf_name).process()
self.layer.txn.commit()
# No emails are sent on success
self.assertEquals(len(stub.test_emails), last_stub_mail_count)
@@ -1973,8 +1978,8 @@
build.status = BuildStatus.UPLOADING
build.date_finished = UTC_NOW
Store.of(build).flush()
- self.uploadprocessor.processBuildUpload(
- self.incoming_folder, leaf_name)
+ BuildUploadHandler(self.uploadprocessor, self.incoming_folder,
+ leaf_name).process()
self.layer.txn.commit()
self.assertEquals(BuildStatus.FULLYBUILT, build.status)
@@ -2004,8 +2009,8 @@
Store.of(build).flush()
build.status = BuildStatus.UPLOADING
build.date_finished = UTC_NOW
- self.uploadprocessor.processBuildUpload(
- self.incoming_folder, leaf_name)
+ BuildUploadHandler(self.uploadprocessor, self.incoming_folder,
+ leaf_name).process()
self.layer.txn.commit()
self.assertEquals(BuildStatus.FAILEDTOUPLOAD, build.status)
self.assertEquals(None, build.builder)
@@ -2039,8 +2044,8 @@
self.options.context = 'buildd'
self.options.builds = True
last_stub_mail_count = len(stub.test_emails)
- self.uploadprocessor.processBuildUpload(
- self.incoming_folder, leaf_name)
+ BuildUploadHandler(self.uploadprocessor, self.incoming_folder,
+ leaf_name).process()
self.layer.txn.commit()
# The build status is not changed
self.assertTrue(
@@ -2050,6 +2055,31 @@
"Expected build status to be 'UPLOADING', was BUILDING. "
"Ignoring.")
+ def testOrderFilenames(self):
+ """orderFilenames sorts _source.changes ahead of other files."""
+ self.assertEqual(["d_source.changes", "a", "b", "c"],
+ UploadHandler.orderFilenames(["b", "a", "d_source.changes", "c"]))
+
+ def testLocateChangesFiles(self):
+ """locateChangesFiles should return the .changes files in a folder.
+
+ 'source' changesfiles come first. Files that are not named as
+ changesfiles are ignored.
+ """
+ testdir = tempfile.mkdtemp()
+ try:
+ open("%s/1.changes" % testdir, "w").close()
+ open("%s/2_source.changes" % testdir, "w").close()
+ open("%s/3.not_changes" % testdir, "w").close()
+
+ up = self.getUploadProcessor(None)
+ handler = UploadHandler(up, '.', testdir)
+ located_files = handler.locateChangesFiles()
+ self.assertEqual(
+ located_files, ["2_source.changes", "1.changes"])
+ finally:
+ shutil.rmtree(testdir)
+
class ParseBuildUploadLeafNameTests(TestCase):
"""Tests for parse_build_upload_leaf_name."""
=== modified file 'lib/lp/archiveuploader/uploadprocessor.py'
--- lib/lp/archiveuploader/uploadprocessor.py 2011-01-13 11:42:44 +0000
+++ lib/lp/archiveuploader/uploadprocessor.py 2011-01-26 17:46:50 +0000
@@ -47,14 +47,12 @@
__metaclass__ = type
-import datetime
import os
import shutil
import stat
import sys
from contrib.glock import GlobalLock
-import pytz
from sqlobject import SQLObjectNotFound
from zope.component import getUtility
@@ -190,127 +188,16 @@
self.log.debug("Skipping %s -- does not match %s" % (
upload, leaf_name))
continue
- if self.builds:
- # Upload directories contain build results,
- # directories are named after job ids.
- self.processBuildUpload(fsroot, upload)
+ try:
+ handler = UploadHandler.forProcessor(self, fsroot, upload)
+ except CannotGetBuild, e:
+ self.log.warn(e)
else:
- self.processUpload(fsroot, upload)
+ handler.process()
finally:
self.log.debug("Rolling back any remaining transactions.")
self.ztm.abort()
- def processBuildUpload(self, fsroot, upload):
- """Process an upload that is the result of a build.
-
- The name of the leaf is the build id of the build.
- Build uploads always contain a single package per leaf.
- """
- upload_path = os.path.join(fsroot, upload)
- try:
- job_id = parse_build_upload_leaf_name(upload)
- except ValueError:
- self.log.warn("Unable to extract build id from leaf name %s,"
- " skipping." % upload)
- return
- try:
- buildfarm_job = getUtility(IBuildFarmJobSet).getByID(job_id)
- except NotFoundError:
- self.log.warn(
- "Unable to find package build job with id %d. Skipping." %
- job_id)
- return
- logger = BufferLogger()
- build = buildfarm_job.getSpecificJob()
- if build.status != BuildStatus.UPLOADING:
- self.log.warn(
- "Expected build status to be 'UPLOADING', was %s. Ignoring." %
- build.status.name)
- return
- self.log.debug("Build %s found" % build.id)
- try:
- [changes_file] = self.locateChangesFiles(upload_path)
- logger.debug("Considering changefile %s" % changes_file)
- result = self.processChangesFile(
- upload_path, changes_file, logger, build)
- except (KeyboardInterrupt, SystemExit):
- raise
- except:
- info = sys.exc_info()
- message = (
- 'Exception while processing upload %s' % upload_path)
- properties = [('error-explanation', message)]
- request = ScriptRequest(properties)
- error_utility = ErrorReportingUtility()
- error_utility.raising(info, request)
- logger.error('%s (%s)' % (message, request.oopsid))
- result = UploadStatusEnum.FAILED
- destination = {
- UploadStatusEnum.FAILED: "failed",
- UploadStatusEnum.REJECTED: "rejected",
- UploadStatusEnum.ACCEPTED: "accepted"}[result]
- if not (result == UploadStatusEnum.ACCEPTED and
- build.verifySuccessfulUpload() and
- build.status == BuildStatus.FULLYBUILT):
- build.status = BuildStatus.FAILEDTOUPLOAD
- build.notify(extra_info="Uploading build %s failed." % upload)
- build.storeUploadLog(logger.getLogBuffer())
- self.ztm.commit()
- self.moveProcessedUpload(upload_path, destination, logger)
-
- def processUpload(self, fsroot, upload):
- """Process an upload's changes files, and move it to a new directory.
-
- The destination directory depends on the result of the processing
- of the changes files. If there are no changes files, the result
- is 'failed', otherwise it is the worst of the results from the
- individual changes files, in order 'failed', 'rejected', 'accepted'.
-
- """
- upload_path = os.path.join(fsroot, upload)
- changes_files = self.locateChangesFiles(upload_path)
-
- # Keep track of the various results
- some_failed = False
- some_rejected = False
- some_accepted = False
-
- for changes_file in changes_files:
- self.log.debug("Considering changefile %s" % changes_file)
- try:
- result = self.processChangesFile(
- upload_path, changes_file, self.log)
- if result == UploadStatusEnum.FAILED:
- some_failed = True
- elif result == UploadStatusEnum.REJECTED:
- some_rejected = True
- else:
- some_accepted = True
- except (KeyboardInterrupt, SystemExit):
- raise
- except:
- info = sys.exc_info()
- message = (
- 'Exception while processing upload %s' % upload_path)
- properties = [('error-explanation', message)]
- request = ScriptRequest(properties)
- error_utility = ErrorReportingUtility()
- error_utility.raising(info, request)
- self.log.error('%s (%s)' % (message, request.oopsid))
- some_failed = True
-
- if some_failed:
- destination = "failed"
- elif some_rejected:
- destination = "rejected"
- elif some_accepted:
- destination = "accepted"
- else:
- # There were no changes files at all. We consider
- # the upload to be failed in this case.
- destination = "failed"
- self.moveProcessedUpload(upload_path, destination, self.log)
-
def locateDirectories(self, fsroot):
"""Return a list of upload directories in a given queue.
@@ -354,8 +241,43 @@
return sorted_dir_names
- def locateChangesFiles(self, upload_path):
- """Locate .changes files in the given upload directory.
+
+class UploadHandler:
+ """Handler for processing a single upload."""
+
+ def __init__(self, processor, fsroot, upload):
+ """Constructor.
+
+ :param processor: The `UploadProcessor` that requested processing the
+ upload.
+ :param fsroot: Path to the directory containing the upload directory
+ :param upload: Name of the directory containing the upload.
+ """
+ self.processor = processor
+ self.fsroot = fsroot
+ self.upload = upload
+ self.upload_path = os.path.join(self.fsroot, self.upload)
+
+ @staticmethod
+ def forProcessor(processor, fsroot, upload, build=None):
+ """Instantiate an UploadHandler subclass for a given upload.
+
+ :param processor: The `UploadProcessor` that requested processing the
+ upload.
+ :param fsroot: Path to the directory containing the upload directory
+ :param upload: Name of the directory containing the upload.
+ :param build: Optional; the build that produced the upload.
+ """
+ if processor.builds:
+ # Upload directories contain build results,
+ # directories are named after job ids.
+ return BuildUploadHandler(processor, fsroot, upload, build)
+ else:
+ assert build is None
+ return UserUploadHandler(processor, fsroot, upload)
+
+ def locateChangesFiles(self):
+ """Locate .changes files in the upload directory.
Return .changes files sorted with *_source.changes first. This
is important to us, as in an upload containing several changes files,
@@ -364,16 +286,15 @@
"""
changes_files = []
- for dirpath, dirnames, filenames in os.walk(upload_path):
- relative_path = dirpath[len(upload_path) + 1:]
+ for dirpath, dirnames, filenames in os.walk(self.upload_path):
+ relative_path = dirpath[len(self.upload_path) + 1:]
for filename in filenames:
if filename.endswith(".changes"):
changes_files.append(
os.path.join(relative_path, filename))
return self.orderFilenames(changes_files)
- def processChangesFile(self, upload_path, changes_file, logger=None,
- build=None):
+ def processChangesFile(self, changes_file, logger=None):
"""Process a single changes file.
This is done by obtaining the appropriate upload policy (according
@@ -389,9 +310,13 @@
Returns a value from UploadStatusEnum, or re-raises an exception
from NascentUpload.
+
+ :param changes_file: filename of the changes file to process.
+ :param logger: logger to use for processing.
+ :return: an `UploadStatusEnum` value
"""
if logger is None:
- logger = self.log
+ logger = self.processor.log
# Calculate the distribution from the path within the upload
# Reject the upload since we could not process the path,
# Store the exception information as a rejection message.
@@ -429,7 +354,7 @@
"https://help.launchpad.net/Packaging/PPA#Uploading "
"and update your configuration.")))
logger.debug("Finding fresh policy")
- policy = self._getPolicyForDistro(distribution, build)
+ policy = self._getPolicyForDistro(distribution)
policy.archive = archive
# DistroSeries overriding respect the following precedence:
@@ -441,9 +366,9 @@
# The path we want for NascentUpload is the path to the folder
# containing the changes file (and the other files referenced by it).
- changesfile_path = os.path.join(upload_path, changes_file)
+ changesfile_path = os.path.join(self.upload_path, changes_file)
upload = NascentUpload.from_changesfile_path(
- changesfile_path, policy, self.log)
+ changesfile_path, policy, self.processor.log)
# Reject source upload to buildd upload paths.
first_path = relative_path.split(os.path.sep)[0]
@@ -460,14 +385,14 @@
upload.reject(upload_path_error)
# Store processed NascentUpload instance, mostly used for tests.
- self.last_processed_upload = upload
+ self.processor.last_processed_upload = upload
try:
logger.info("Processing upload %s" % upload.changes.filename)
result = UploadStatusEnum.ACCEPTED
try:
- upload.process(build)
+ self._processUpload(upload)
except UploadPolicyError, e:
upload.reject("UploadPolicyError escaped upload.process: "
"%s " % e)
@@ -501,103 +426,251 @@
# when transaction is committed) this will cause any emails sent
# sent by do_reject to be lost.
notify = True
- if self.dry_run or self.no_mails:
+ if self.processor.dry_run or self.processor.no_mails:
notify = False
if upload.is_rejected:
result = UploadStatusEnum.REJECTED
upload.do_reject(notify)
- self.ztm.abort()
+ self.processor.ztm.abort()
else:
- successful = upload.do_accept(
- notify=notify, build=build)
+ successful = self._acceptUpload(upload, notify)
if not successful:
result = UploadStatusEnum.REJECTED
logger.info(
"Rejection during accept. Aborting partial accept.")
- self.ztm.abort()
+ self.processor.ztm.abort()
if upload.is_rejected:
logger.info("Upload was rejected:")
for msg in upload.rejections:
logger.info("\t%s" % msg)
- if self.dry_run:
+ if self.processor.dry_run:
logger.info("Dry run, aborting transaction.")
- self.ztm.abort()
+ self.processor.ztm.abort()
else:
logger.info(
"Committing the transaction and any mails associated "
"with this upload.")
- self.ztm.commit()
+ self.processor.ztm.commit()
except:
- self.ztm.abort()
+ self.processor.ztm.abort()
raise
return result
- def removeUpload(self, upload, logger):
+ def removeUpload(self, logger):
"""Remove an upload that has succesfully been processed.
This includes moving the given upload directory and moving the
matching .distro file, if it exists.
+
+ :param logger: The logger to use for logging results.
"""
- if self.keep or self.dry_run:
+ if self.processor.keep or self.processor.dry_run:
logger.debug("Keeping contents untouched")
return
- logger.debug("Removing upload directory %s", upload)
- shutil.rmtree(upload)
+ logger.debug("Removing upload directory %s", self.upload_path)
+ shutil.rmtree(self.upload_path)
- distro_filename = upload + ".distro"
+ distro_filename = self.upload_path + ".distro"
if os.path.isfile(distro_filename):
logger.debug("Removing distro file %s", distro_filename)
os.remove(distro_filename)
- def moveProcessedUpload(self, upload_path, destination, logger):
+ def moveProcessedUpload(self, destination, logger):
"""Move or remove the upload depending on the status of the upload.
+
+ :param destination: An `UploadStatusEnum` value.
+ :param logger: The logger to use for logging results.
"""
if destination == "accepted":
- self.removeUpload(upload_path, logger)
+ self.removeUpload(logger)
else:
- self.moveUpload(upload_path, destination, logger)
+ self.moveUpload(destination, logger)
- def moveUpload(self, upload, subdir_name, logger):
+ def moveUpload(self, subdir_name, logger):
"""Move the upload to the named subdir of the root, eg 'accepted'.
This includes moving the given upload directory and moving the
matching .distro file, if it exists.
+
+ :param subdir_name: Name of the subdirectory to move to.
+ :param logger: The logger to use for logging results.
"""
- if self.keep or self.dry_run:
+ if self.processor.keep or self.processor.dry_run:
logger.debug("Keeping contents untouched")
return
- pathname = os.path.basename(upload)
+ pathname = os.path.basename(self.upload_path)
target_path = os.path.join(
- self.base_fsroot, subdir_name, pathname)
+ self.processor.base_fsroot, subdir_name, pathname)
logger.debug("Moving upload directory %s to %s" %
- (upload, target_path))
- shutil.move(upload, target_path)
+ (self.upload_path, target_path))
+ shutil.move(self.upload_path, target_path)
- distro_filename = upload + ".distro"
+ distro_filename = self.upload_path + ".distro"
if os.path.isfile(distro_filename):
- target_path = os.path.join(self.base_fsroot, subdir_name,
+ target_path = os.path.join(self.processor.base_fsroot,
+ subdir_name,
os.path.basename(distro_filename))
logger.debug("Moving distro file %s to %s" % (distro_filename,
target_path))
shutil.move(distro_filename, target_path)
- def orderFilenames(self, fnames):
+ @staticmethod
+ def orderFilenames(fnames):
"""Order filenames, sorting *_source.changes before others.
Aside from that, a standard string sort.
"""
+
def sourceFirst(filename):
return (not filename.endswith("_source.changes"), filename)
return sorted(fnames, key=sourceFirst)
+class UserUploadHandler(UploadHandler):
+
+ def process(self):
+ """Process an upload's changes files, and move it to a new directory.
+
+ The destination directory depends on the result of the processing
+ of the changes files. If there are no changes files, the result
+ is 'failed', otherwise it is the worst of the results from the
+ individual changes files, in order 'failed', 'rejected', 'accepted'.
+ """
+ changes_files = self.locateChangesFiles()
+
+ results = set()
+
+ for changes_file in changes_files:
+ self.processor.log.debug(
+ "Considering changefile %s" % changes_file)
+ try:
+ results.add(self.processChangesFile(
+ changes_file, self.processor.log))
+ except (KeyboardInterrupt, SystemExit):
+ raise
+ except:
+ info = sys.exc_info()
+ message = (
+ 'Exception while processing upload %s' % self.upload_path)
+ properties = [('error-explanation', message)]
+ request = ScriptRequest(properties)
+ error_utility = ErrorReportingUtility()
+ error_utility.raising(info, request)
+ self.processor.log.error(
+ '%s (%s)' % (message, request.oopsid))
+ results.add(UploadStatusEnum.FAILED)
+ if len(results) == 0:
+ destination = UploadStatusEnum.FAILED
+ else:
+ for destination in [
+ UploadStatusEnum.FAILED, UploadStatusEnum.REJECTED,
+ UploadStatusEnum.ACCEPTED]:
+ if destination in results:
+ break
+ self.moveProcessedUpload(destination, self.processor.log)
+
+ def _getPolicyForDistro(self, distribution):
+ return self.processor._getPolicyForDistro(distribution, None)
+
+ def _processUpload(self, upload):
+ upload.process(None)
+
+ def _acceptUpload(self, upload, notify):
+ return upload.do_accept(notify=notify, build=None)
+
+
+class CannotGetBuild(Exception):
+
+ """Attempting to retrieve the build for this upload failed."""
+
+
+class BuildUploadHandler(UploadHandler):
+
+ def __init__(self, processor, fsroot, upload, build=None):
+ """Constructor.
+
+ See `UploadHandler`.
+ :build: Optional build that produced this upload. If not supplied,
+ will be retrieved using the id in the upload.
+ :raises: CannotGetBuild if the build could not be retrieved.
+ """
+ super(BuildUploadHandler, self).__init__(processor, fsroot, upload)
+ self.build = build
+ if self.build is None:
+ self.build = self._getBuild()
+
+ def _getPolicyForDistro(self, distribution):
+ return self.processor._getPolicyForDistro(distribution, self.build)
+
+ def _processUpload(self, upload):
+ upload.process(self.build)
+
+ def _acceptUpload(self, upload, notify):
+ return upload.do_accept(notify=notify, build=self.build)
+
+ def _getBuild(self):
+ try:
+ job_id = parse_build_upload_leaf_name(self.upload)
+ except ValueError:
+ raise CannotGetBuild(
+ "Unable to extract build id from leaf name %s, skipping." %
+ self.upload)
+ try:
+ buildfarm_job = getUtility(IBuildFarmJobSet).getByID(job_id)
+ except NotFoundError:
+ raise CannotGetBuild(
+ "Unable to find package build job with id %d. Skipping." %
+ job_id)
+ return
+ return buildfarm_job.getSpecificJob()
+
+ def process(self):
+ """Process an upload that is the result of a build.
+
+ The name of the leaf is the build id of the build.
+ Build uploads always contain a single package per leaf.
+ """
+ logger = BufferLogger()
+ if self.build.status != BuildStatus.UPLOADING:
+ self.processor.log.warn(
+ "Expected build status to be 'UPLOADING', was %s. Ignoring." %
+ self.build.status.name)
+ return
+ self.processor.log.debug("Build %s found" % self.build.id)
+ try:
+ [changes_file] = self.locateChangesFiles()
+ logger.debug("Considering changefile %s" % changes_file)
+ result = self.processChangesFile(changes_file, logger)
+ except (KeyboardInterrupt, SystemExit):
+ raise
+ except:
+ info = sys.exc_info()
+ message = (
+ 'Exception while processing upload %s' % self.upload_path)
+ properties = [('error-explanation', message)]
+ request = ScriptRequest(properties)
+ error_utility = ErrorReportingUtility()
+ error_utility.raising(info, request)
+ logger.error('%s (%s)' % (message, request.oopsid))
+ result = UploadStatusEnum.FAILED
+ if not (result == UploadStatusEnum.ACCEPTED and
+ self.build.verifySuccessfulUpload() and
+ self.build.status == BuildStatus.FULLYBUILT):
+ self.build.status = BuildStatus.FAILEDTOUPLOAD
+ self.build.notify(extra_info="Uploading build %s failed." %
+ self.upload)
+ self.build.storeUploadLog(logger.getLogBuffer())
+ self.processor.ztm.commit()
+ self.moveProcessedUpload(result, logger)
+
+
def _getDistributionAndSuite(parts, exc_type):
"""Return an `IDistribution` and a valid suite name for the given path.
@@ -733,5 +806,3 @@
% (archive.displayname, archive.distribution.name))
return (distribution, suite_name, archive)
-
-