launchpad-reviewers team mailing list archive
-
launchpad-reviewers team
-
Mailing list archive
-
Message #00472
[Merge] lp:~jelmer/launchpad/refactor-uploadprocessor into lp:launchpad/devel
Jelmer Vernooij has proposed merging lp:~jelmer/launchpad/refactor-uploadprocessor into lp:launchpad/devel.
Requested reviews:
Launchpad code reviewers (launchpad-reviewers): code
This branch contains some refactoring required, in preparation of adding support for dealing with build upload queues to process-upload:
* Add convenience function for obtaining UploadProcessor in the UploadProcessor tests
* Move _publishPackage to the base class for UploadProcessor tests
* Use individual parameters for UploadProcessor rather than an Options object, so it is easier to create instances outside of the process-upload script without having to mock the options object
--
https://code.launchpad.net/~jelmer/launchpad/refactor-uploadprocessor/+merge/31966
Your team Launchpad code reviewers is requested to review the proposed merge of lp:~jelmer/launchpad/refactor-uploadprocessor into lp:launchpad/devel.
=== modified file 'lib/lp/archiveuploader/tests/__init__.py'
--- lib/lp/archiveuploader/tests/__init__.py 2010-05-04 15:38:08 +0000
+++ lib/lp/archiveuploader/tests/__init__.py 2010-08-06 14:31:06 +0000
@@ -1,6 +1,10 @@
# Copyright 2009 Canonical Ltd. This software is licensed under the
# GNU Affero General Public License version 3 (see the file LICENSE).
+"""Tests for the archive uploader."""
+
+from __future__ import with_statement
+
__metaclass__ = type
__all__ = ['datadir', 'getPolicy', 'insertFakeChangesFile',
@@ -25,6 +29,7 @@
raise ValueError("Path is not relative: %s" % path)
return os.path.join(here, 'data', path)
+
def insertFakeChangesFile(fileID, path=None):
"""Insert a fake changes file into the librarian.
@@ -34,11 +39,11 @@
"""
if path is None:
path = datadir("ed-0.2-21/ed_0.2-21_source.changes")
- changes_file_obj = open(path, 'r')
- test_changes_file = changes_file_obj.read()
- changes_file_obj.close()
+ with open(path, 'r') as changes_file_obj:
+ test_changes_file = changes_file_obj.read()
fillLibrarianFile(fileID, content=test_changes_file)
+
def insertFakeChangesFileForAllPackageUploads():
"""Ensure all the PackageUpload records point to a valid changes file."""
for id in set(pu.changesfile.id for pu in PackageUploadSet()):
@@ -53,6 +58,7 @@
self.distroseries = distroseries
self.buildid = buildid
+
def getPolicy(name='anything', distro='ubuntu', distroseries=None,
buildid=None):
"""Build and return an Upload Policy for the given context."""
=== modified file 'lib/lp/archiveuploader/tests/test_buildduploads.py'
--- lib/lp/archiveuploader/tests/test_buildduploads.py 2010-07-18 00:26:33 +0000
+++ lib/lp/archiveuploader/tests/test_buildduploads.py 2010-08-06 14:31:06 +0000
@@ -7,7 +7,6 @@
from lp.archiveuploader.tests.test_securityuploads import (
TestStagedBinaryUploadBase)
-from lp.archiveuploader.uploadprocessor import UploadProcessor
from lp.registry.interfaces.pocket import PackagePublishingPocket
from canonical.database.constants import UTC_NOW
from canonical.launchpad.interfaces import PackagePublishingStatus
@@ -84,8 +83,8 @@
"""Setup an UploadProcessor instance for a given buildd context."""
self.options.context = self.policy
self.options.buildid = str(build_candidate.id)
- self.uploadprocessor = UploadProcessor(
- self.options, self.layer.txn, self.log)
+ self.uploadprocessor = self.getUploadProcessor(
+ self.layer.txn)
def testDelayedBinaryUpload(self):
"""Check if Soyuz copes with delayed binary uploads.
=== modified file 'lib/lp/archiveuploader/tests/test_ppauploadprocessor.py'
--- lib/lp/archiveuploader/tests/test_ppauploadprocessor.py 2010-08-02 02:13:52 +0000
+++ lib/lp/archiveuploader/tests/test_ppauploadprocessor.py 2010-08-06 14:31:06 +0000
@@ -18,7 +18,6 @@
from zope.security.proxy import removeSecurityProxy
from lp.app.errors import NotFoundError
-from lp.archiveuploader.uploadprocessor import UploadProcessor
from lp.archiveuploader.tests.test_uploadprocessor import (
TestUploadProcessorBase)
from canonical.config import config
@@ -74,8 +73,7 @@
# Set up the uploadprocessor with appropriate options and logger
self.options.context = 'insecure'
- self.uploadprocessor = UploadProcessor(
- self.options, self.layer.txn, self.log)
+ self.uploadprocessor = self.getUploadProcessor(self.layer.txn)
def assertEmail(self, contents=None, recipients=None,
ppa_header='name16'):
@@ -1224,8 +1222,7 @@
# Re-initialize uploadprocessor since it depends on the new
# transaction reset by switchDbUser.
- self.uploadprocessor = UploadProcessor(
- self.options, self.layer.txn, self.log)
+ self.uploadprocessor = self.getUploadProcessor(self.layer.txn)
def testPPASizeQuotaSourceRejection(self):
"""Verify the size quota check for PPA uploads.
=== modified file 'lib/lp/archiveuploader/tests/test_recipeuploads.py'
--- lib/lp/archiveuploader/tests/test_recipeuploads.py 2010-07-18 00:26:33 +0000
+++ lib/lp/archiveuploader/tests/test_recipeuploads.py 2010-08-06 14:31:06 +0000
@@ -12,7 +12,6 @@
from lp.archiveuploader.tests.test_uploadprocessor import (
TestUploadProcessorBase)
-from lp.archiveuploader.uploadprocessor import UploadProcessor
from lp.buildmaster.interfaces.buildbase import BuildStatus
from lp.code.interfaces.sourcepackagerecipebuild import (
ISourcePackageRecipeBuildSource)
@@ -42,8 +41,8 @@
self.options.context = 'recipe'
self.options.buildid = self.build.id
- self.uploadprocessor = UploadProcessor(
- self.options, self.layer.txn, self.log)
+ self.uploadprocessor = self.getUploadProcessor(
+ self.layer.txn)
def testSetsBuildAndState(self):
# Ensure that the upload processor correctly links the SPR to
=== modified file 'lib/lp/archiveuploader/tests/test_securityuploads.py'
--- lib/lp/archiveuploader/tests/test_securityuploads.py 2010-07-18 00:26:33 +0000
+++ lib/lp/archiveuploader/tests/test_securityuploads.py 2010-08-06 14:31:06 +0000
@@ -11,7 +11,6 @@
from lp.archiveuploader.tests.test_uploadprocessor import (
TestUploadProcessorBase)
-from lp.archiveuploader.uploadprocessor import UploadProcessor
from lp.registry.interfaces.pocket import PackagePublishingPocket
from lp.soyuz.model.binarypackagebuild import BinaryPackageBuild
from lp.soyuz.model.processor import ProcessorFamily
@@ -70,8 +69,7 @@
self.options.context = self.policy
self.options.nomails = self.no_mails
# Set up the uploadprocessor with appropriate options and logger
- self.uploadprocessor = UploadProcessor(
- self.options, self.layer.txn, self.log)
+ self.uploadprocessor = self.getUploadProcessor(self.layer.txn)
self.builds_before_upload = BinaryPackageBuild.select().count()
self.source_queue = None
self._uploadSource()
@@ -232,8 +230,7 @@
"""
build_candidate = self._createBuild('i386')
self.options.buildid = str(build_candidate.id)
- self.uploadprocessor = UploadProcessor(
- self.options, self.layer.txn, self.log)
+ self.uploadprocessor = self.getUploadProcessor(self.layer.txn)
build_used = self._uploadBinary('i386')
@@ -254,8 +251,7 @@
"""
build_candidate = self._createBuild('hppa')
self.options.buildid = str(build_candidate.id)
- self.uploadprocessor = UploadProcessor(
- self.options, self.layer.txn, self.log)
+ self.uploadprocessor = self.getUploadProcessor(self.layer.txn)
self.assertRaises(AssertionError, self._uploadBinary, 'i386')
=== modified file 'lib/lp/archiveuploader/tests/test_uploadprocessor.py'
--- lib/lp/archiveuploader/tests/test_uploadprocessor.py 2010-08-02 02:13:52 +0000
+++ lib/lp/archiveuploader/tests/test_uploadprocessor.py 2010-08-06 14:31:06 +0000
@@ -23,8 +23,10 @@
from zope.security.proxy import removeSecurityProxy
from lp.app.errors import NotFoundError
-from lp.archiveuploader.uploadpolicy import AbstractUploadPolicy
+from lp.archiveuploader.uploadpolicy import (AbstractUploadPolicy,
+ findPolicyByOptions)
from lp.archiveuploader.uploadprocessor import UploadProcessor
+from lp.buildmaster.interfaces.buildbase import BuildStatus
from canonical.config import config
from canonical.database.constants import UTC_NOW
from lp.soyuz.model.archivepermission import ArchivePermission
@@ -59,7 +61,7 @@
ISourcePackageNameSet)
from lp.services.mail import stub
from canonical.launchpad.testing.fakepackager import FakePackager
-from lp.testing import TestCaseWithFactory
+from lp.testing import TestCase, TestCaseWithFactory
from lp.testing.mail_helpers import pop_notifications
from canonical.launchpad.webapp.errorlog import ErrorReportingUtility
from canonical.testing import LaunchpadZopelessLayer
@@ -113,7 +115,8 @@
super(TestUploadProcessorBase, self).setUp()
self.queue_folder = tempfile.mkdtemp()
- os.makedirs(os.path.join(self.queue_folder, "incoming"))
+ self.incoming_folder = os.path.join(self.queue_folder, "incoming")
+ os.makedirs(self.incoming_folder)
self.test_files_dir = os.path.join(config.root,
"lib/lp/archiveuploader/tests/data/suite")
@@ -139,6 +142,30 @@
shutil.rmtree(self.queue_folder)
super(TestUploadProcessorBase, self).tearDown()
+ def getUploadProcessor(self, txn):
+ def getPolicy(distro):
+ self.options.distro = distro.name
+ return findPolicyByOptions(self.options)
+ return UploadProcessor(
+ self.options.base_fsroot, self.options.dryrun,
+ self.options.nomails,
+ self.options.keep, getPolicy, txn, self.log)
+
+ def publishPackage(self, packagename, version, source=True,
+ archive=None):
+ """Publish a single package that is currently NEW in the queue."""
+ queue_items = self.breezy.getQueueItems(
+ status=PackageUploadStatus.NEW, name=packagename,
+ version=version, exact_match=True, archive=archive)
+ self.assertEqual(queue_items.count(), 1)
+ queue_item = queue_items[0]
+ queue_item.setAccepted()
+ if source:
+ pubrec = queue_item.sources[0].publish(self.log)
+ else:
+ pubrec = queue_item.builds[0].publish(self.log)
+ return pubrec
+
def assertLogContains(self, line):
"""Assert if a given line is present in the log messages."""
self.assertTrue(line in self.log.lines,
@@ -208,25 +235,29 @@
filename, len(content), StringIO(content),
'application/x-gtar')
- def queueUpload(self, upload_name, relative_path="", test_files_dir=None):
+ def queueUpload(self, upload_name, relative_path="", test_files_dir=None,
+ queue_entry=None):
"""Queue one of our test uploads.
- upload_name is the name of the test upload directory. It is also
+ upload_name is the name of the test upload directory. If there
+ is no explicit queue entry name specified, it is also
the name of the queue entry directory we create.
relative_path is the path to create inside the upload, eg
ubuntu/~malcc/default. If not specified, defaults to "".
Return the path to the upload queue entry directory created.
"""
+ if queue_entry is None:
+ queue_entry = upload_name
target_path = os.path.join(
- self.queue_folder, "incoming", upload_name, relative_path)
+ self.incoming_folder, queue_entry, relative_path)
if test_files_dir is None:
test_files_dir = self.test_files_dir
upload_dir = os.path.join(test_files_dir, upload_name)
if relative_path:
os.makedirs(os.path.dirname(target_path))
shutil.copytree(upload_dir, target_path)
- return os.path.join(self.queue_folder, "incoming", upload_name)
+ return os.path.join(self.incoming_folder, queue_entry)
def processUpload(self, processor, upload_dir):
"""Process an upload queue entry directory.
@@ -248,8 +279,7 @@
self.layer.txn.commit()
if policy is not None:
self.options.context = policy
- return UploadProcessor(
- self.options, self.layer.txn, self.log)
+ return self.getUploadProcessor(self.layer.txn)
def assertEmail(self, contents=None, recipients=None):
"""Check last email content and recipients.
@@ -341,24 +371,9 @@
"Expected acceptance email not rejection. Actually Got:\n%s"
% raw_msg)
- def _publishPackage(self, packagename, version, source=True,
- archive=None):
- """Publish a single package that is currently NEW in the queue."""
- queue_items = self.breezy.getQueueItems(
- status=PackageUploadStatus.NEW, name=packagename,
- version=version, exact_match=True, archive=archive)
- self.assertEqual(queue_items.count(), 1)
- queue_item = queue_items[0]
- queue_item.setAccepted()
- if source:
- pubrec = queue_item.sources[0].publish(self.log)
- else:
- pubrec = queue_item.builds[0].publish(self.log)
- return pubrec
-
def testInstantiate(self):
"""UploadProcessor should instantiate"""
- up = UploadProcessor(self.options, None, self.log)
+ up = self.getUploadProcessor(None)
def testLocateDirectories(self):
"""Return a sorted list of subdirs in a directory.
@@ -372,7 +387,7 @@
os.mkdir("%s/dir1" % testdir)
os.mkdir("%s/dir2" % testdir)
- up = UploadProcessor(self.options, None, self.log)
+ up = self.getUploadProcessor(None)
located_dirs = up.locateDirectories(testdir)
self.assertEqual(located_dirs, ['dir1', 'dir2', 'dir3'])
finally:
@@ -390,7 +405,7 @@
open("%s/2_source.changes" % testdir, "w").close()
open("%s/3.not_changes" % testdir, "w").close()
- up = UploadProcessor(self.options, None, self.log)
+ up = self.getUploadProcessor(None)
located_files = up.locateChangesFiles(testdir)
self.assertEqual(
located_files, ["2_source.changes", "1.changes"])
@@ -418,7 +433,7 @@
# Move it
self.options.base_fsroot = testdir
- up = UploadProcessor(self.options, None, self.log)
+ up = self.getUploadProcessor(None)
up.moveUpload(upload, target_name)
# Check it moved
@@ -439,7 +454,7 @@
# Remove it
self.options.base_fsroot = testdir
- up = UploadProcessor(self.options, None, self.log)
+ up = self.getUploadProcessor(None)
up.moveProcessedUpload(upload, "accepted")
# Check it was removed, not moved
@@ -462,7 +477,7 @@
# Move it
self.options.base_fsroot = testdir
- up = UploadProcessor(self.options, None, self.log)
+ up = self.getUploadProcessor(None)
up.moveProcessedUpload(upload, "rejected")
# Check it moved
@@ -485,7 +500,7 @@
# Remove it
self.options.base_fsroot = testdir
- up = UploadProcessor(self.options, None, self.log)
+ up = self.getUploadProcessor(None)
up.removeUpload(upload)
# Check it was removed, not moved
@@ -498,7 +513,7 @@
def testOrderFilenames(self):
"""orderFilenames sorts _source.changes ahead of other files."""
- up = UploadProcessor(self.options, None, self.log)
+ up = self.getUploadProcessor(None)
self.assertEqual(["d_source.changes", "a", "b", "c"],
up.orderFilenames(["b", "a", "d_source.changes", "c"]))
@@ -522,8 +537,7 @@
# Register our broken upload policy
AbstractUploadPolicy._registerPolicy(BrokenUploadPolicy)
self.options.context = 'broken'
- uploadprocessor = UploadProcessor(
- self.options, self.layer.txn, self.log)
+ uploadprocessor = self.getUploadProcessor(self.layer.txn)
# Upload a package to Breezy.
upload_dir = self.queueUpload("baz_1.0-1")
@@ -634,7 +648,7 @@
# Upload 'bar-1.0-1' source and binary to ubuntu/breezy.
upload_dir = self.queueUpload("bar_1.0-1")
self.processUpload(uploadprocessor, upload_dir)
- bar_source_pub = self._publishPackage('bar', '1.0-1')
+ bar_source_pub = self.publishPackage('bar', '1.0-1')
[bar_original_build] = bar_source_pub.createMissingBuilds()
# Move the source from the accepted queue.
@@ -653,7 +667,7 @@
self.processUpload(uploadprocessor, upload_dir)
self.assertEqual(
uploadprocessor.last_processed_upload.is_rejected, False)
- bar_bin_pubs = self._publishPackage('bar', '1.0-1', source=False)
+ bar_bin_pubs = self.publishPackage('bar', '1.0-1', source=False)
# Mangle its publishing component to "restricted" so we can check
# the copy archive ancestry override later.
restricted = getUtility(IComponentSet)["restricted"]
@@ -746,14 +760,14 @@
# Upload 'bar-1.0-1' source and binary to ubuntu/breezy.
upload_dir = self.queueUpload("bar_1.0-1")
self.processUpload(uploadprocessor, upload_dir)
- bar_source_pub = self._publishPackage('bar', '1.0-1')
+ bar_source_pub = self.publishPackage('bar', '1.0-1')
[bar_original_build] = bar_source_pub.createMissingBuilds()
self.options.context = 'buildd'
self.options.buildid = bar_original_build.id
upload_dir = self.queueUpload("bar_1.0-1_binary")
self.processUpload(uploadprocessor, upload_dir)
- [bar_binary_pub] = self._publishPackage("bar", "1.0-1", source=False)
+ [bar_binary_pub] = self.publishPackage("bar", "1.0-1", source=False)
# Prepare ubuntu/breezy-autotest to build sources in i386.
breezy_autotest = self.ubuntu['breezy-autotest']
@@ -803,7 +817,7 @@
# Upload 'bar-1.0-1' source and binary to ubuntu/breezy.
upload_dir = self.queueUpload("bar_1.0-1")
self.processUpload(uploadprocessor, upload_dir)
- bar_source_old = self._publishPackage('bar', '1.0-1')
+ bar_source_old = self.publishPackage('bar', '1.0-1')
# Upload 'bar-1.0-1' source and binary to ubuntu/breezy.
upload_dir = self.queueUpload("bar_1.0-2")
@@ -816,7 +830,7 @@
self.options.buildid = bar_original_build.id
upload_dir = self.queueUpload("bar_1.0-2_binary")
self.processUpload(uploadprocessor, upload_dir)
- [bar_binary_pub] = self._publishPackage("bar", "1.0-2", source=False)
+ [bar_binary_pub] = self.publishPackage("bar", "1.0-2", source=False)
# Create a COPY archive for building in non-virtual builds.
uploader = getUtility(IPersonSet).getByName('name16')
@@ -971,7 +985,7 @@
partner_archive = getUtility(IArchiveSet).getByDistroPurpose(
self.ubuntu, ArchivePurpose.PARTNER)
self.assertTrue(partner_archive)
- self._publishPackage("foocomm", "1.0-1", archive=partner_archive)
+ self.publishPackage("foocomm", "1.0-1", archive=partner_archive)
# Check the publishing record's archive and component.
foocomm_spph = SourcePackagePublishingHistory.selectOneBy(
@@ -1015,7 +1029,7 @@
self.assertEqual(foocomm_bpr.component.name, 'partner')
# Publish the upload so we can check the publishing record.
- self._publishPackage("foocomm", "1.0-1", source=False)
+ self.publishPackage("foocomm", "1.0-1", source=False)
# Check the publishing record's archive and component.
foocomm_bpph = BinaryPackagePublishingHistory.selectOneBy(
@@ -1054,14 +1068,14 @@
# Accept and publish the upload.
partner_archive = getUtility(IArchiveSet).getByDistroPurpose(
self.ubuntu, ArchivePurpose.PARTNER)
- self._publishPackage("foocomm", "1.0-1", archive=partner_archive)
+ self.publishPackage("foocomm", "1.0-1", archive=partner_archive)
# Now do the same thing with a binary package.
upload_dir = self.queueUpload("foocomm_1.0-1_binary")
self.processUpload(uploadprocessor, upload_dir)
# Accept and publish the upload.
- self._publishPackage("foocomm", "1.0-1", source=False,
+ self.publishPackage("foocomm", "1.0-1", source=False,
archive=partner_archive)
# Upload the next source version of the package.
@@ -1105,8 +1119,7 @@
self.breezy.status = SeriesStatus.CURRENT
self.layer.txn.commit()
self.options.context = 'insecure'
- uploadprocessor = UploadProcessor(
- self.options, self.layer.txn, self.log)
+ uploadprocessor = self.getUploadProcessor(self.layer.txn)
# Upload a package for Breezy.
upload_dir = self.queueUpload("foocomm_1.0-1_proposed")
@@ -1124,8 +1137,7 @@
self.breezy.status = SeriesStatus.CURRENT
self.layer.txn.commit()
self.options.context = 'insecure'
- uploadprocessor = UploadProcessor(
- self.options, self.layer.txn, self.log)
+ uploadprocessor = self.getUploadProcessor(self.layer.txn)
# Upload a package for Breezy.
upload_dir = self.queueUpload("foocomm_1.0-1")
@@ -1140,8 +1152,7 @@
pocket and ensure it fails."""
# Set up the uploadprocessor with appropriate options and logger.
self.options.context = 'insecure'
- uploadprocessor = UploadProcessor(
- self.options, self.layer.txn, self.log)
+ uploadprocessor = self.getUploadProcessor(self.layer.txn)
# Upload a package for Breezy.
upload_dir = self.queueUpload("foocomm_1.0-1_updates")
@@ -1302,8 +1313,7 @@
used.
That exception will then initiate the creation of an OOPS report.
"""
- processor = UploadProcessor(
- self.options, self.layer.txn, self.log)
+ processor = self.getUploadProcessor(self.layer.txn)
upload_dir = self.queueUpload("foocomm_1.0-1_proposed")
bogus_changesfile_data = '''
@@ -1346,8 +1356,7 @@
self.setupBreezy()
self.layer.txn.commit()
self.options.context = 'absolutely-anything'
- uploadprocessor = UploadProcessor(
- self.options, self.layer.txn, self.log)
+ uploadprocessor = self.getUploadProcessor(self.layer.txn)
# Upload the source first to enable the binary later:
upload_dir = self.queueUpload("bar_1.0-1_lzma")
@@ -1357,7 +1366,7 @@
self.assertTrue(
"rejected" not in raw_msg,
"Failed to upload bar source:\n%s" % raw_msg)
- self._publishPackage("bar", "1.0-1")
+ self.publishPackage("bar", "1.0-1")
# Clear out emails generated during upload.
ignore = pop_notifications()
@@ -1456,15 +1465,14 @@
permission=ArchivePermissionType.UPLOAD, person=uploader,
component=restricted)
- uploadprocessor = UploadProcessor(
- self.options, self.layer.txn, self.log)
+ uploadprocessor = self.getUploadProcessor(self.layer.txn)
# Upload the first version and accept it to make it known in
# Ubuntu. The uploader has rights to upload NEW packages to
# components that he does not have direct rights to.
upload_dir = self.queueUpload("bar_1.0-1")
self.processUpload(uploadprocessor, upload_dir)
- bar_source_pub = self._publishPackage('bar', '1.0-1')
+ bar_source_pub = self.publishPackage('bar', '1.0-1')
# Clear out emails generated during upload.
ignore = pop_notifications()
@@ -1509,15 +1517,14 @@
permission=ArchivePermissionType.UPLOAD, person=uploader,
component=restricted)
- uploadprocessor = UploadProcessor(
- self.options, self.layer.txn, self.log)
+ uploadprocessor = self.getUploadProcessor(self.layer.txn)
# Upload the first version and accept it to make it known in
# Ubuntu. The uploader has rights to upload NEW packages to
# components that he does not have direct rights to.
upload_dir = self.queueUpload("bar_1.0-1")
self.processUpload(uploadprocessor, upload_dir)
- bar_source_pub = self._publishPackage('bar', '1.0-1')
+ bar_source_pub = self.publishPackage('bar', '1.0-1')
# Clear out emails generated during upload.
ignore = pop_notifications()
@@ -1590,8 +1597,7 @@
# with pointer to the Soyuz questions in Launchpad and the
# reason why the message was sent to the current recipients.
self.setupBreezy()
- uploadprocessor = UploadProcessor(
- self.options, self.layer.txn, self.log)
+ uploadprocessor = self.getUploadProcessor(self.layer.txn)
upload_dir = self.queueUpload("bar_1.0-1", "boing")
self.processUpload(uploadprocessor, upload_dir)
@@ -1636,8 +1642,7 @@
self.setupBreezy()
self.layer.txn.commit()
self.options.context = 'absolutely-anything'
- uploadprocessor = UploadProcessor(
- self.options, self.layer.txn, self.log)
+ uploadprocessor = self.getUploadProcessor(self.layer.txn)
# Upload the source.
upload_dir = self.queueUpload("bar_1.0-1_3.0-quilt")
@@ -1655,8 +1660,7 @@
permitted_formats=[SourcePackageFormat.FORMAT_3_0_QUILT])
self.layer.txn.commit()
self.options.context = 'absolutely-anything'
- uploadprocessor = UploadProcessor(
- self.options, self.layer.txn, self.log)
+ uploadprocessor = self.getUploadProcessor(self.layer.txn)
# Upload the source.
upload_dir = self.queueUpload("bar_1.0-1_3.0-quilt")
@@ -1666,7 +1670,7 @@
self.assertTrue(
"rejected" not in raw_msg,
"Failed to upload bar source:\n%s" % raw_msg)
- spph = self._publishPackage("bar", "1.0-1")
+ spph = self.publishPackage("bar", "1.0-1")
self.assertEquals(
sorted((sprf.libraryfile.filename, sprf.filetype)
@@ -1689,8 +1693,7 @@
permitted_formats=[SourcePackageFormat.FORMAT_3_0_QUILT])
self.layer.txn.commit()
self.options.context = 'absolutely-anything'
- uploadprocessor = UploadProcessor(
- self.options, self.layer.txn, self.log)
+ uploadprocessor = self.getUploadProcessor(self.layer.txn)
# Upload the first source.
upload_dir = self.queueUpload("bar_1.0-1_3.0-quilt")
@@ -1700,7 +1703,7 @@
self.assertTrue(
"rejected" not in raw_msg,
"Failed to upload bar source:\n%s" % raw_msg)
- spph = self._publishPackage("bar", "1.0-1")
+ spph = self.publishPackage("bar", "1.0-1")
# Upload another source sharing the same (component) orig.
upload_dir = self.queueUpload("bar_1.0-2_3.0-quilt_without_orig")
@@ -1728,8 +1731,7 @@
permitted_formats=[SourcePackageFormat.FORMAT_3_0_NATIVE])
self.layer.txn.commit()
self.options.context = 'absolutely-anything'
- uploadprocessor = UploadProcessor(
- self.options, self.layer.txn, self.log)
+ uploadprocessor = self.getUploadProcessor(self.layer.txn)
# Upload the source.
upload_dir = self.queueUpload("bar_1.0_3.0-native")
@@ -1739,7 +1741,7 @@
self.assertTrue(
"rejected" not in raw_msg,
"Failed to upload bar source:\n%s" % raw_msg)
- spph = self._publishPackage("bar", "1.0")
+ spph = self.publishPackage("bar", "1.0")
self.assertEquals(
sorted((sprf.libraryfile.filename, sprf.filetype)
@@ -1754,8 +1756,7 @@
self.setupBreezy()
self.layer.txn.commit()
self.options.context = 'absolutely-anything'
- uploadprocessor = UploadProcessor(
- self.options, self.layer.txn, self.log)
+ uploadprocessor = self.getUploadProcessor(self.layer.txn)
# Upload the source.
upload_dir = self.queueUpload("bar_1.0-1_1.0-bzip2")
@@ -1772,8 +1773,7 @@
self.setupBreezy()
breezy = self.ubuntu['breezy']
breezy.status = SeriesStatus.CURRENT
- uploadprocessor = UploadProcessor(
- self.options, self.layer.txn, self.log)
+ uploadprocessor = self.getUploadProcessor(self.layer.txn)
upload_dir = self.queueUpload("bar_1.0-1")
self.processUpload(uploadprocessor, upload_dir)
=== modified file 'lib/lp/archiveuploader/uploadprocessor.py'
--- lib/lp/archiveuploader/uploadprocessor.py 2010-08-02 09:40:22 +0000
+++ lib/lp/archiveuploader/uploadprocessor.py 2010-08-06 14:31:06 +0000
@@ -60,7 +60,7 @@
from lp.archiveuploader.nascentupload import (
NascentUpload, FatalUploadError, EarlyReturnUploadError)
from lp.archiveuploader.uploadpolicy import (
- findPolicyByOptions, UploadPolicyError)
+ UploadPolicyError)
from lp.soyuz.interfaces.archive import IArchiveSet, NoSuchPPA
from lp.registry.interfaces.distribution import IDistributionSet
from lp.registry.interfaces.person import IPersonSet
@@ -108,16 +108,33 @@
class UploadProcessor:
"""Responsible for processing uploads. See module docstring."""
- def __init__(self, options, ztm, log):
- self.options = options
+ def __init__(self, base_fsroot, dry_run, no_mails, keep, policy_for_distro,
+ ztm, log):
+ """Create a new upload processor.
+
+ :param base_fsroot: Root path for queue to use
+ :param dry_run: Run but don't commit changes to database
+ :param no_mails: Don't send out any emails
+ :param builds: Interpret leaf names as build ids
+ :param keep: Leave the files in place, don't move them away
+ :param policy_for_distro: callback to obtain Policy object for a
+ distribution
+ :param ztm: Database transaction to use
+ :param log: Logger to use for reporting
+ """
+ self.base_fsroot = base_fsroot
+ self.dry_run = dry_run
+ self.keep = keep
+ self.last_processed_upload = None
+ self.log = log
+ self.no_mails = no_mails
+ self._getPolicyForDistro = policy_for_distro
self.ztm = ztm
- self.log = log
- self.last_processed_upload = None
- def processUploadQueue(self):
+ def processUploadQueue(self, leaf_name=None):
"""Search for uploads, and process them.
- Uploads are searched for in the 'incoming' directory inside the
+ Uploads are searched for in the 'incoming' directory inside the
base_fsroot.
This method also creates the 'incoming', 'accepted', 'rejected', and
@@ -127,19 +144,22 @@
self.log.debug("Beginning processing")
for subdir in ["incoming", "accepted", "rejected", "failed"]:
- full_subdir = os.path.join(self.options.base_fsroot, subdir)
+ full_subdir = os.path.join(self.base_fsroot, subdir)
if not os.path.exists(full_subdir):
self.log.debug("Creating directory %s" % full_subdir)
os.mkdir(full_subdir)
- fsroot = os.path.join(self.options.base_fsroot, "incoming")
+ fsroot = os.path.join(self.base_fsroot, "incoming")
uploads_to_process = self.locateDirectories(fsroot)
self.log.debug("Checked in %s, found %s"
% (fsroot, uploads_to_process))
for upload in uploads_to_process:
self.log.debug("Considering upload %s" % upload)
+ if leaf_name is not None and upload != leaf_name:
+ self.log.debug("Skipping %s -- does not match %s" % (
+ upload, leaf_name))
+ continue
self.processUpload(fsroot, upload)
-
finally:
self.log.debug("Rolling back any remaining transactions.")
self.ztm.abort()
@@ -152,16 +172,7 @@
is 'failed', otherwise it is the worst of the results from the
individual changes files, in order 'failed', 'rejected', 'accepted'.
- If the leafname option is set but its value is not the same as the
- name of the upload directory, skip it entirely.
-
"""
- if (self.options.leafname is not None and
- upload != self.options.leafname):
- self.log.debug("Skipping %s -- does not match %s" % (
- upload, self.options.leafname))
- return
-
upload_path = os.path.join(fsroot, upload)
changes_files = self.locateChangesFiles(upload_path)
@@ -242,7 +253,7 @@
# Skip lockfile deletion, see similar code in lp.poppy.hooks.
fsroot_lock.release(skip_delete=True)
- sorted_dir_names = sorted(
+ sorted_dir_names = sorted(
dir_name
for dir_name in dir_names
if os.path.isdir(os.path.join(fsroot, dir_name)))
@@ -321,8 +332,7 @@
"https://help.launchpad.net/Packaging/PPA#Uploading "
"and update your configuration.")))
self.log.debug("Finding fresh policy")
- self.options.distro = distribution.name
- policy = findPolicyByOptions(self.options)
+ policy = self._getPolicyForDistro(distribution)
policy.archive = archive
# DistroSeries overriding respect the following precedence:
@@ -395,7 +405,7 @@
# when transaction is committed) this will cause any emails sent
# sent by do_reject to be lost.
notify = True
- if self.options.dryrun or self.options.nomails:
+ if self.dry_run or self.no_mails:
notify = False
if upload.is_rejected:
result = UploadStatusEnum.REJECTED
@@ -414,7 +424,7 @@
for msg in upload.rejections:
self.log.warn("\t%s" % msg)
- if self.options.dryrun:
+ if self.dry_run:
self.log.info("Dry run, aborting transaction.")
self.ztm.abort()
else:
@@ -433,7 +443,7 @@
This includes moving the given upload directory and moving the
matching .distro file, if it exists.
"""
- if self.options.keep or self.options.dryrun:
+ if self.keep or self.dry_run:
self.log.debug("Keeping contents untouched")
return
@@ -461,21 +471,21 @@
This includes moving the given upload directory and moving the
matching .distro file, if it exists.
"""
- if self.options.keep or self.options.dryrun:
+ if self.keep or self.dry_run:
self.log.debug("Keeping contents untouched")
return
pathname = os.path.basename(upload)
target_path = os.path.join(
- self.options.base_fsroot, subdir_name, pathname)
+ self.base_fsroot, subdir_name, pathname)
self.log.debug("Moving upload directory %s to %s" %
(upload, target_path))
shutil.move(upload, target_path)
distro_filename = upload + ".distro"
if os.path.isfile(distro_filename):
- target_path = os.path.join(self.options.base_fsroot, subdir_name,
+ target_path = os.path.join(self.base_fsroot, subdir_name,
os.path.basename(distro_filename))
self.log.debug("Moving distro file %s to %s" % (distro_filename,
target_path))
=== modified file 'lib/lp/soyuz/scripts/soyuz_process_upload.py'
--- lib/lp/soyuz/scripts/soyuz_process_upload.py 2010-05-04 15:38:08 +0000
+++ lib/lp/soyuz/scripts/soyuz_process_upload.py 2010-08-06 14:31:06 +0000
@@ -8,6 +8,7 @@
import os
+from lp.archiveuploader.uploadpolicy import findPolicyByOptions
from lp.archiveuploader.uploadprocessor import UploadProcessor
from lp.services.scripts.base import (
LaunchpadCronScript, LaunchpadScriptFailure)
@@ -74,8 +75,13 @@
"%s is not a directory" % self.options.base_fsroot)
self.logger.debug("Initialising connection.")
- UploadProcessor(
- self.options, self.txn, self.logger).processUploadQueue()
+ def getPolicy(distro):
+ self.options.distro = distro.name
+ return findPolicyByOptions(self.options)
+ processor = UploadProcessor(self.options.base_fsroot,
+ self.options.dryrun, self.options.nomails, self.options.keep,
+ getPolicy, self.txn, self.logger)
+ processor.processUploadQueue(self.options.leafname)
@property
def lockfilename(self):