← Back to team overview

launchpad-reviewers team mailing list archive

[Merge] lp:~abentley/launchpad/archivejob into lp:launchpad

 

Aaron Bentley has proposed merging lp:~abentley/launchpad/archivejob into lp:launchpad.

Requested reviews:
  Deryck Hodge (deryck)

For more details, see:
https://code.launchpad.net/~abentley/launchpad/archivejob/+merge/103331

= Summary =
Remove usused CopyArchiveJob

== Pre-implementation notes ==
Discussed with julian

== LOC Rationale ==
Reduces LOC count.

== Implementation details ==
CopyArchiveJob turns out to be dead code.  It is the only kind of ArchiveJob, so ArchiveJob is also dead code, as is ArchiveJobType, the enum used to specify ArchiveJob.  This branch removes them an their associated tests.

== Tests ==
None

== Demo and Q/A ==
None


= Launchpad lint =

Checking for conflicts and issues in changed files.

Linting changed files:
  lib/lp/soyuz/enums.py
-- 
https://code.launchpad.net/~abentley/launchpad/archivejob/+merge/103331
Your team Launchpad code reviewers is subscribed to branch lp:launchpad.
=== modified file 'lib/lp/soyuz/enums.py'
--- lib/lp/soyuz/enums.py	2011-12-19 23:38:16 +0000
+++ lib/lp/soyuz/enums.py	2012-04-24 17:31:12 +0000
@@ -5,7 +5,6 @@
 
 __metaclass__ = type
 __all__ = [
-    'ArchiveJobType',
     'ArchivePermissionType',
     'ArchivePurpose',
     'ArchiveStatus',
@@ -40,17 +39,6 @@
 re_bug_numbers = re.compile(r"\#?\s?(\d+)")
 
 
-class ArchiveJobType(DBEnumeratedType):
-    """Values that IArchiveJob.job_type can take."""
-
-    COPY_ARCHIVE = DBItem(0, """
-        Create a copy archive.
-
-        This job creates a copy archive from the current state of
-        the archive.
-        """)
-
-
 class ArchivePermissionType(DBEnumeratedType):
     """Archive Permission Type.
 

=== removed file 'lib/lp/soyuz/interfaces/archivejob.py'
--- lib/lp/soyuz/interfaces/archivejob.py	2011-12-24 16:54:44 +0000
+++ lib/lp/soyuz/interfaces/archivejob.py	1970-01-01 00:00:00 +0000
@@ -1,54 +0,0 @@
-# Copyright 2010 Canonical Ltd.  This software is licensed under the
-# GNU Affero General Public License version 3 (see the file LICENSE).
-
-from zope.interface import (
-    Attribute,
-    Interface,
-    )
-from zope.schema import (
-    Int,
-    Object,
-    )
-
-from lp import _
-from lp.services.job.interfaces.job import (
-    IJob,
-    IJobSource,
-    IRunnableJob,
-    )
-from lp.soyuz.interfaces.archive import IArchive
-
-
-class IArchiveJob(Interface):
-    """A Job related to an Archive."""
-
-    id = Int(
-        title=_('DB ID'), required=True, readonly=True,
-        description=_("The tracking number for this job."))
-
-    archive = Object(
-        title=_('The archive this job is about.'), schema=IArchive,
-        required=True)
-
-    job = Object(
-        title=_('The common Job attributes'), schema=IJob, required=True)
-
-    metadata = Attribute('A dict of data about the job.')
-
-    def destroySelf():
-        """Destroy this object."""
-
-
-class IArchiveJobSource(IJobSource):
-    """An interface for acquiring IArchiveJobs."""
-
-    def create(archive):
-        """Create a new IArchiveJobs for an archive."""
-
-
-class ICopyArchiveJob(IRunnableJob):
-    """A Job to copy archives."""
-
-
-class ICopyArchiveJobSource(IArchiveJobSource):
-    """Interface for acquiring CopyArchiveJobs."""

=== removed file 'lib/lp/soyuz/model/archivejob.py'
--- lib/lp/soyuz/model/archivejob.py	2012-03-04 09:53:04 +0000
+++ lib/lp/soyuz/model/archivejob.py	1970-01-01 00:00:00 +0000
@@ -1,142 +0,0 @@
-# Copyright 2010 Canonical Ltd.  This software is licensed under the
-# GNU Affero General Public License version 3 (see the file LICENSE).
-
-__metaclass__ = object
-
-from lazr.delegates import delegates
-import simplejson
-from sqlobject import SQLObjectNotFound
-from storm.expr import And
-from storm.locals import (
-    Int,
-    Reference,
-    Unicode,
-    )
-from zope.component import getUtility
-from zope.interface import (
-    classProvides,
-    implements,
-    )
-
-from lp.services.database.enumcol import EnumCol
-from lp.services.database.stormbase import StormBase
-from lp.services.job.model.job import Job
-from lp.services.job.runner import BaseRunnableJob
-from lp.services.webapp.interfaces import (
-    DEFAULT_FLAVOR,
-    IStoreSelector,
-    MAIN_STORE,
-    MASTER_FLAVOR,
-    )
-from lp.soyuz.enums import ArchiveJobType
-from lp.soyuz.interfaces.archivejob import (
-    IArchiveJob,
-    IArchiveJobSource,
-    )
-from lp.soyuz.model.archive import Archive
-
-
-class ArchiveJob(StormBase):
-    """Base class for jobs related to Archives."""
-
-    implements(IArchiveJob)
-
-    __storm_table__ = 'archivejob'
-
-    id = Int(primary=True)
-
-    job_id = Int(name='job')
-    job = Reference(job_id, Job.id)
-
-    archive_id = Int(name='archive')
-    archive = Reference(archive_id, Archive.id)
-
-    job_type = EnumCol(enum=ArchiveJobType, notNull=True)
-
-    _json_data = Unicode('json_data')
-
-    @property
-    def metadata(self):
-        return simplejson.loads(self._json_data)
-
-    def __init__(self, archive, job_type, metadata):
-        """Create an ArchiveJob.
-
-        :param archive: the archive this job relates to.
-        :param job_type: the bugjobtype of this job.
-        :param metadata: the type-specific variables, as a json-compatible
-            dict.
-        """
-        super(ArchiveJob, self).__init__()
-        json_data = simplejson.dumps(metadata)
-        self.job = Job()
-        self.archive = archive
-        self.job_type = job_type
-        # XXX AaronBentley 2009-01-29 bug=322819: This should be a bytestring,
-        # but the db representation is unicode.
-        self._json_data = json_data.decode('utf-8')
-
-    @classmethod
-    def get(cls, key):
-        """Return the instance of this class whose key is supplied."""
-        store = getUtility(IStoreSelector).get(MAIN_STORE, DEFAULT_FLAVOR)
-        instance = store.get(cls, key)
-        if instance is None:
-            raise SQLObjectNotFound(
-                'No occurence of %s has key %s' % (cls.__name__, key))
-        return instance
-
-
-class ArchiveJobDerived(BaseRunnableJob):
-    """Intermediate class for deriving from ArchiveJob."""
-    delegates(IArchiveJob)
-    classProvides(IArchiveJobSource)
-
-    def __init__(self, job):
-        self.context = job
-
-    @classmethod
-    def create(cls, archive, metadata=None):
-        """See `IArchiveJob`."""
-        if metadata is None:
-            metadata = {}
-        job = ArchiveJob(archive, cls.class_job_type, metadata)
-        return cls(job)
-
-    @classmethod
-    def get(cls, job_id):
-        """Get a job by id.
-
-        :return: the ArchiveJob with the specified id, as the current
-                 ArchiveJobDerived subclass.
-        :raises: SQLObjectNotFound if there is no job with the specified id,
-                 or its job_type does not match the desired subclass.
-        """
-        job = ArchiveJob.get(job_id)
-        if job.job_type != cls.class_job_type:
-            raise SQLObjectNotFound(
-                'No object found with id %d and type %s' % (job_id,
-                cls.class_job_type.title))
-        return cls(job)
-
-    @classmethod
-    def iterReady(cls):
-        """Iterate through all ready ArchiveJobs."""
-        store = getUtility(IStoreSelector).get(MAIN_STORE, MASTER_FLAVOR)
-        jobs = store.find(
-            ArchiveJob,
-            And(ArchiveJob.job_type == cls.class_job_type,
-                ArchiveJob.job == Job.id,
-                Job.id.is_in(Job.ready_jobs),
-                ArchiveJob.archive == Archive.id))
-        return (cls(job) for job in jobs)
-
-    def getOopsVars(self):
-        """See `IRunnableJob`."""
-        vars = BaseRunnableJob.getOopsVars(self)
-        vars.extend([
-            ('archive_id', self.context.archive.id),
-            ('archive_job_id', self.context.id),
-            ('archive_job_type', self.context.job_type.title),
-            ])
-        return vars

=== removed file 'lib/lp/soyuz/model/copyarchivejob.py'
--- lib/lp/soyuz/model/copyarchivejob.py	2012-01-01 02:58:52 +0000
+++ lib/lp/soyuz/model/copyarchivejob.py	1970-01-01 00:00:00 +0000
@@ -1,153 +0,0 @@
-# Copyright 2010 Canonical Ltd.  This software is licensed under the
-# GNU Affero General Public License version 3 (see the file LICENSE).
-
-__metaclass__ = object
-
-from zope.component import getUtility
-from zope.interface import (
-    classProvides,
-    implements,
-    )
-
-from lp.registry.interfaces.distroseries import IDistroSeriesSet
-from lp.registry.interfaces.pocket import PackagePublishingPocket
-from lp.services.job.model.job import Job
-from lp.services.webapp.interfaces import (
-    DEFAULT_FLAVOR,
-    IStoreSelector,
-    MAIN_STORE,
-    )
-from lp.soyuz.adapters.packagelocation import PackageLocation
-from lp.soyuz.enums import ArchiveJobType
-from lp.soyuz.interfaces.archive import IArchiveSet
-from lp.soyuz.interfaces.archivejob import (
-    ICopyArchiveJob,
-    ICopyArchiveJobSource,
-    )
-from lp.soyuz.interfaces.component import IComponentSet
-from lp.soyuz.interfaces.packagecloner import IPackageCloner
-from lp.soyuz.interfaces.packageset import IPackagesetSet
-from lp.soyuz.interfaces.processor import IProcessorFamilySet
-from lp.soyuz.model.archivejob import (
-    ArchiveJob,
-    ArchiveJobDerived,
-    )
-
-
-class CopyArchiveJob(ArchiveJobDerived):
-
-    implements(ICopyArchiveJob)
-
-    class_job_type = ArchiveJobType.COPY_ARCHIVE
-    classProvides(ICopyArchiveJobSource)
-
-    @classmethod
-    def create(cls, target_archive, source_archive,
-               source_series, source_pocket, target_series, target_pocket,
-               target_component=None, proc_families=None, packagesets=None,
-               merge=False):
-        """See `ICopyArchiveJobSource`."""
-        store = getUtility(IStoreSelector).get(MAIN_STORE, DEFAULT_FLAVOR)
-        job_for_archive = store.find(
-            ArchiveJob,
-            ArchiveJob.archive == target_archive,
-            ArchiveJob.job_type == cls.class_job_type,
-            ArchiveJob.job == Job.id,
-            Job.id.is_in(Job.ready_jobs)
-            ).any()
-
-        if job_for_archive is not None:
-            raise ValueError(
-                "CopyArchiveJob already in progress for %s" % target_archive)
-        else:
-            if proc_families is None:
-                proc_families = []
-            if len(proc_families) > 0 and merge:
-                raise ValueError("Can't specify the architectures for merge.")
-            proc_family_names = [p.name for p in proc_families]
-            if packagesets is None:
-                packagesets = []
-            packageset_names = [p.name for p in packagesets]
-            target_component_id = None
-            if target_component is not None:
-                target_component_id = target_component.id
-            metadata = {
-                'source_archive_id': source_archive.id,
-                'source_distroseries_id': source_series.id,
-                'source_pocket_value': source_pocket.value,
-                'target_distroseries_id': target_series.id,
-                'target_pocket_value': target_pocket.value,
-                'target_component_id': target_component_id,
-                'proc_family_names': proc_family_names,
-                'packageset_names': packageset_names,
-                'merge': merge,
-            }
-            return super(CopyArchiveJob, cls).create(target_archive, metadata)
-
-    def getOopsVars(self):
-        """See `ArchiveJobDerived`."""
-        vars = ArchiveJobDerived.getOopsVars(self)
-        vars.extend([
-            ('source_archive_id', self.metadata['source_archive_id']),
-            ('source_distroseries_id',
-                self.metadata['source_distroseries_id']),
-            ('target_distroseries_id',
-                self.metadata['target_distroseries_id']),
-            ('source_pocket_value', self.metadata['source_pocket_value']),
-            ('target_pocket_value', self.metadata['target_pocket_value']),
-            ('target_component_id', self.metadata['target_component_id']),
-            ('merge', self.metadata['merge']),
-            ])
-        return vars
-
-    def getSourceLocation(self):
-        """Get the PackageLocation for the source."""
-        # TODO: handle things going bye-bye before we get here.
-        source_archive_id = self.metadata['source_archive_id']
-        source_archive = getUtility(IArchiveSet).get(source_archive_id)
-        source_distroseries_id = self.metadata['source_distroseries_id']
-        source_distroseries = getUtility(IDistroSeriesSet).get(
-            source_distroseries_id)
-        source_distribution = source_distroseries.distribution
-        source_pocket_value = self.metadata['source_pocket_value']
-        source_pocket = PackagePublishingPocket.items[source_pocket_value]
-        packageset_names = self.metadata['packageset_names']
-        packagesets = [getUtility(IPackagesetSet).getByName(name)
-                        for name in packageset_names]
-        source_location = PackageLocation(
-            source_archive, source_distribution, source_distroseries,
-            source_pocket, packagesets=packagesets)
-        return source_location
-
-    def getTargetLocation(self):
-        """Get the PackageLocation for the target."""
-        # TODO: handle things going bye-bye before we get here.
-        target_distroseries_id = self.metadata['target_distroseries_id']
-        target_distroseries = getUtility(IDistroSeriesSet).get(
-            target_distroseries_id)
-        target_distribution = target_distroseries.distribution
-        target_pocket_value = self.metadata['target_pocket_value']
-        target_pocket = PackagePublishingPocket.items[target_pocket_value]
-        target_location = PackageLocation(
-            self.archive, target_distribution, target_distroseries,
-            target_pocket)
-        target_component_id = self.metadata['target_component_id']
-        if target_component_id is not None:
-            target_location.component = getUtility(IComponentSet).get(
-                target_component_id)
-        return target_location
-
-    def run(self):
-        """See `IRunnableJob`."""
-        source_location = self.getSourceLocation()
-        target_location = self.getTargetLocation()
-        proc_family_names = self.metadata['proc_family_names']
-        proc_family_set = getUtility(IProcessorFamilySet)
-        proc_families = [proc_family_set.getByName(p)
-                         for p in proc_family_names]
-        package_cloner = getUtility(IPackageCloner)
-        if self.metadata['merge']:
-            package_cloner.mergeCopy(source_location, target_location)
-        else:
-            package_cloner.clonePackages(
-                source_location, target_location, proc_families=proc_families)

=== removed file 'lib/lp/soyuz/tests/test_archivejob.py'
--- lib/lp/soyuz/tests/test_archivejob.py	2012-01-01 02:58:52 +0000
+++ lib/lp/soyuz/tests/test_archivejob.py	1970-01-01 00:00:00 +0000
@@ -1,47 +0,0 @@
-# Copyright 2010 Canonical Ltd.  This software is licensed under the
-# GNU Affero General Public License version 3 (see the file LICENSE).
-
-from lp.soyuz.enums import ArchiveJobType
-from lp.soyuz.model.archivejob import (
-    ArchiveJob,
-    ArchiveJobDerived,
-    )
-from lp.testing import TestCaseWithFactory
-from lp.testing.layers import DatabaseFunctionalLayer
-
-
-class ArchiveJobTestCase(TestCaseWithFactory):
-    """Test case for basic ArchiveJob gubbins."""
-
-    layer = DatabaseFunctionalLayer
-
-    def test_instantiate(self):
-        # ArchiveJob.__init__() instantiates a ArchiveJob instance.
-        archive = self.factory.makeArchive()
-
-        metadata = ('some', 'arbitrary', 'metadata')
-        archive_job = ArchiveJob(
-            archive, ArchiveJobType.COPY_ARCHIVE, metadata)
-
-        self.assertEqual(archive, archive_job.archive)
-        self.assertEqual(ArchiveJobType.COPY_ARCHIVE, archive_job.job_type)
-
-        # When we actually access the ArchiveJob's metadata it gets
-        # deserialized from JSON, so the representation returned by
-        # archive_job.metadata will be different from what we originally
-        # passed in.
-        metadata_expected = [u'some', u'arbitrary', u'metadata']
-        self.assertEqual(metadata_expected, archive_job.metadata)
-
-
-class ArchiveJobDerivedTestCase(TestCaseWithFactory):
-    """Test case for the ArchiveJobDerived class."""
-
-    layer = DatabaseFunctionalLayer
-
-    def test_create_explodes(self):
-        # ArchiveJobDerived.create() will blow up because it needs to be
-        # subclassed to work properly.
-        archive = self.factory.makeArchive()
-        self.assertRaises(
-            AttributeError, ArchiveJobDerived.create, archive)

=== removed file 'lib/lp/soyuz/tests/test_copyarchivejob.py'
--- lib/lp/soyuz/tests/test_copyarchivejob.py	2012-01-01 02:58:52 +0000
+++ lib/lp/soyuz/tests/test_copyarchivejob.py	1970-01-01 00:00:00 +0000
@@ -1,393 +0,0 @@
-# Copyright 2009 Canonical Ltd.  This software is licensed under the
-# GNU Affero General Public License version 3 (see the file LICENSE).
-
-__metaclass__ = type
-
-from zope.component import getUtility
-from zope.security.proxy import removeSecurityProxy
-
-from lp.buildmaster.enums import BuildStatus
-from lp.registry.interfaces.pocket import PackagePublishingPocket
-from lp.soyuz.adapters.packagelocation import PackageLocation
-from lp.soyuz.enums import (
-    ArchivePurpose,
-    PackagePublishingStatus,
-    )
-from lp.soyuz.interfaces.binarypackagebuild import IBinaryPackageBuildSet
-from lp.soyuz.model.copyarchivejob import CopyArchiveJob
-from lp.soyuz.model.processor import ProcessorFamilySet
-from lp.testing import (
-    celebrity_logged_in,
-    TestCaseWithFactory,
-    )
-from lp.testing.layers import DatabaseFunctionalLayer
-
-
-class CopyArchiveJobTests(TestCaseWithFactory):
-    """Tests for CopyArchiveJob."""
-
-    layer = DatabaseFunctionalLayer
-
-    def test_getOopsVars(self):
-        archive = self.factory.makeArchive()
-        args = self.makeDummyArgs()
-        target_distroseries = self.factory.makeDistroSeries()
-        source_pocket = PackagePublishingPocket.RELEASE
-        target_pocket = PackagePublishingPocket.BACKPORTS
-        target_component = self.factory.makeComponent()
-        job = CopyArchiveJob.create(
-            archive, args['source_archive'], args['distroseries'],
-            source_pocket, target_distroseries, target_pocket,
-            target_component=target_component)
-        vars = job.getOopsVars()
-        self.assertIn(('archive_id', archive.id), vars)
-        self.assertIn(('archive_job_id', job.context.id), vars)
-        self.assertIn(('archive_job_type', job.context.job_type.title), vars)
-        self.assertIn(('source_archive_id', args['source_archive'].id), vars)
-        self.assertIn(
-            ('source_distroseries_id', args['distroseries'].id), vars)
-        self.assertIn(
-            ('target_distroseries_id', target_distroseries.id), vars)
-        self.assertIn(('source_pocket_value', source_pocket.value), vars)
-        self.assertIn(('target_pocket_value', target_pocket.value), vars)
-        self.assertIn(
-            ('target_component_id', target_component.id), vars)
-        self.assertIn(('merge', False), vars)
-
-    def makeDummyArgs(self):
-        args = {}
-        distro = self.factory.makeDistribution()
-        args['distroseries'] = self.factory.makeDistroSeries(
-            distribution=distro)
-        args['pocket'] = self.factory.getAnyPocket()
-        args['source_archive'] = self.factory.makeArchive(
-            distribution=distro)
-        return args
-
-    def test_error_if_already_exists(self):
-        target_archive = self.factory.makeArchive()
-        args = self.makeDummyArgs()
-        CopyArchiveJob.create(
-            target_archive, args['source_archive'], args['distroseries'],
-            args['pocket'], args['distroseries'], args['pocket'])
-        self.assertEqual(1, self._getJobCount())
-        args = self.makeDummyArgs()
-        self.assertRaises(
-            ValueError, CopyArchiveJob.create, target_archive,
-            args['source_archive'], args['distroseries'], args['pocket'],
-            args['distroseries'], args['pocket'])
-
-    def test_create_sets_source_archive_id(self):
-        target_archive = self.factory.makeArchive()
-        args = self.makeDummyArgs()
-        source_archive = self.factory.makeArchive()
-        job = CopyArchiveJob.create(
-            target_archive, source_archive, args['distroseries'],
-            args['pocket'], args['distroseries'], args['pocket'])
-        self.assertEqual(
-            source_archive.id, job.metadata['source_archive_id'])
-
-    def test_create_sets_source_series_id(self):
-        target_archive = self.factory.makeArchive()
-        args = self.makeDummyArgs()
-        source_distroseries = self.factory.makeDistroSeries()
-        job = CopyArchiveJob.create(
-            target_archive, args['source_archive'], source_distroseries,
-            args['pocket'], args['distroseries'], args['pocket'])
-        self.assertEqual(
-            source_distroseries.id, job.metadata['source_distroseries_id'])
-
-    def test_create_sets_source_pocket_value(self):
-        target_archive = self.factory.makeArchive()
-        args = self.makeDummyArgs()
-        source_pocket = PackagePublishingPocket.RELEASE
-        target_pocket = PackagePublishingPocket.BACKPORTS
-        job = CopyArchiveJob.create(
-            target_archive, args['source_archive'], args['distroseries'],
-            source_pocket, args['distroseries'], target_pocket)
-        self.assertEqual(
-            source_pocket.value, job.metadata['source_pocket_value'])
-
-    def test_create_sets_target_pocket_value(self):
-        target_archive = self.factory.makeArchive()
-        args = self.makeDummyArgs()
-        source_pocket = PackagePublishingPocket.RELEASE
-        target_pocket = PackagePublishingPocket.BACKPORTS
-        job = CopyArchiveJob.create(
-            target_archive, args['source_archive'], args['distroseries'],
-            source_pocket, args['distroseries'], target_pocket)
-        self.assertEqual(
-            target_pocket.value, job.metadata['target_pocket_value'])
-
-    def test_create_sets_target_distroseries_id(self):
-        target_archive = self.factory.makeArchive()
-        args = self.makeDummyArgs()
-        target_distroseries = self.factory.makeDistroSeries()
-        job = CopyArchiveJob.create(
-            target_archive, args['source_archive'], args['distroseries'],
-            args['pocket'], target_distroseries, args['pocket'])
-        self.assertEqual(
-            target_distroseries.id, job.metadata['target_distroseries_id'])
-
-    def test_create_sets_target_component_id(self):
-        target_archive = self.factory.makeArchive()
-        args = self.makeDummyArgs()
-        target_component = self.factory.makeComponent()
-        job = CopyArchiveJob.create(
-            target_archive, args['source_archive'], args['distroseries'],
-            args['pocket'], args['distroseries'], args['pocket'],
-            target_component=target_component)
-        self.assertEqual(
-            target_component.id, job.metadata['target_component_id'])
-
-    def test_create_sets_target_component_id_to_None_if_unspecified(self):
-        target_archive = self.factory.makeArchive()
-        args = self.makeDummyArgs()
-        job = CopyArchiveJob.create(
-            target_archive, args['source_archive'], args['distroseries'],
-            args['pocket'], args['distroseries'], args['pocket'])
-        self.assertEqual(None, job.metadata['target_component_id'])
-
-    def test_create_sets_proc_family_ids(self):
-        target_archive = self.factory.makeArchive()
-        args = self.makeDummyArgs()
-        family1 = self.factory.makeProcessorFamily(name="armel")
-        family2 = self.factory.makeProcessorFamily(name="ia64")
-        job = CopyArchiveJob.create(
-            target_archive, args['source_archive'], args['distroseries'],
-            args['pocket'], args['distroseries'], args['pocket'],
-            proc_families=[family1, family2])
-        self.assertEqual(
-            [f.name for f in [family1, family2]],
-            job.metadata['proc_family_names'])
-
-    def test_error_on_merge_with_proc_families(self):
-        target_archive = self.factory.makeArchive()
-        args = self.makeDummyArgs()
-        family1 = self.factory.makeProcessorFamily(name="armel")
-        family2 = self.factory.makeProcessorFamily(name="ia64")
-        self.assertRaises(
-            ValueError, CopyArchiveJob.create, target_archive,
-            args['source_archive'], args['distroseries'], args['pocket'],
-            args['distroseries'], args['pocket'],
-            proc_families=[family1, family2], merge=True)
-
-    def test_create_sets_source_package_set_ids(self):
-        target_archive = self.factory.makeArchive()
-        args = self.makeDummyArgs()
-        packagesets = [
-            self.factory.makePackageset(),
-            self.factory.makePackageset(),
-        ]
-        job = CopyArchiveJob.create(
-            target_archive, args['source_archive'], args['distroseries'],
-            args['pocket'], args['distroseries'], args['pocket'],
-            packagesets=packagesets)
-        self.assertEqual(
-            [p.name for p in packagesets], job.metadata['packageset_names'])
-
-    def test_create_sets_merge_False_by_default(self):
-        target_archive = self.factory.makeArchive()
-        args = self.makeDummyArgs()
-        job = CopyArchiveJob.create(
-            target_archive, args['source_archive'], args['distroseries'],
-            args['pocket'], args['distroseries'], args['pocket'])
-        self.assertEqual(False, job.metadata['merge'])
-
-    def test_create_sets_merge_True_on_request(self):
-        target_archive = self.factory.makeArchive()
-        args = self.makeDummyArgs()
-        job = CopyArchiveJob.create(
-            target_archive, args['source_archive'], args['distroseries'],
-            args['pocket'], args['distroseries'], args['pocket'], merge=True)
-        self.assertEqual(True, job.metadata['merge'])
-
-    def test_get_source_location(self):
-        target_archive = self.factory.makeArchive()
-        args = self.makeDummyArgs()
-        source_distroseries = self.factory.makeDistroSeries()
-        source_pocket = PackagePublishingPocket.RELEASE
-        target_pocket = PackagePublishingPocket.BACKPORTS
-        job = CopyArchiveJob.create(
-            target_archive, args['source_archive'], source_distroseries,
-            source_pocket, args['distroseries'], target_pocket)
-        location = job.getSourceLocation()
-        expected_location = PackageLocation(
-            args['source_archive'], source_distroseries.distribution,
-            source_distroseries, source_pocket)
-        self.assertEqual(expected_location, location)
-
-    def test_get_source_location_with_packagesets(self):
-        target_archive = self.factory.makeArchive()
-        args = self.makeDummyArgs()
-        source_distroseries = self.factory.makeDistroSeries()
-        source_pocket = PackagePublishingPocket.RELEASE
-        target_pocket = PackagePublishingPocket.BACKPORTS
-        packagesets = [
-            self.factory.makePackageset(),
-            self.factory.makePackageset(),
-        ]
-        job = CopyArchiveJob.create(
-            target_archive, args['source_archive'], source_distroseries,
-            source_pocket, args['distroseries'], target_pocket,
-            packagesets=packagesets)
-        location = job.getSourceLocation()
-        expected_location = PackageLocation(
-            args['source_archive'], source_distroseries.distribution,
-            source_distroseries, source_pocket, packagesets=packagesets)
-        self.assertEqual(expected_location, location)
-
-    def test_get_target_location(self):
-        target_archive = self.factory.makeArchive()
-        args = self.makeDummyArgs()
-        target_distroseries = self.factory.makeDistroSeries()
-        source_pocket = PackagePublishingPocket.RELEASE
-        target_pocket = PackagePublishingPocket.BACKPORTS
-        job = CopyArchiveJob.create(
-            target_archive, args['source_archive'], args['distroseries'],
-            source_pocket, target_distroseries, target_pocket)
-        location = job.getTargetLocation()
-        expected_location = PackageLocation(
-            target_archive, target_distroseries.distribution,
-            target_distroseries, target_pocket)
-        self.assertEqual(expected_location, location)
-
-    def test_get_target_location_with_component(self):
-        target_archive = self.factory.makeArchive()
-        args = self.makeDummyArgs()
-        target_distroseries = self.factory.makeDistroSeries()
-        source_pocket = PackagePublishingPocket.RELEASE
-        target_pocket = PackagePublishingPocket.BACKPORTS
-        target_component = self.factory.makeComponent()
-        job = CopyArchiveJob.create(
-            target_archive, args['source_archive'], args['distroseries'],
-            source_pocket, target_distroseries, target_pocket,
-            target_component=target_component)
-        location = job.getTargetLocation()
-        expected_location = PackageLocation(
-            target_archive, target_distroseries.distribution,
-            target_distroseries, target_pocket)
-        expected_location.component = target_component
-        self.assertEqual(expected_location, location)
-
-    def _getJobs(self):
-        """Return the pending CopyArchiveJobs as a list."""
-        return list(CopyArchiveJob.iterReady())
-
-    def _getJobCount(self):
-        """Return the number of CopyArchiveJobs in the queue."""
-        return len(self._getJobs())
-
-    def makeSourceAndTarget(self):
-        distribution = self.factory.makeDistribution(name="foobuntu")
-        distroseries = self.factory.makeDistroSeries(
-            distribution=distribution, name="maudlin")
-        source_archive_owner = self.factory.makePerson(name="source-owner")
-        source_archive = self.factory.makeArchive(
-            name="source", owner=source_archive_owner,
-            purpose=ArchivePurpose.PPA, distribution=distribution)
-        self.factory.makeSourcePackagePublishingHistory(
-            sourcepackagename=self.factory.getOrMakeSourcePackageName(
-                name='bzr'),
-            distroseries=distroseries, component=self.factory.makeComponent(),
-            version="2.1", architecturehintlist='any',
-            archive=source_archive, status=PackagePublishingStatus.PUBLISHED,
-            pocket=PackagePublishingPocket.RELEASE)
-        das = self.factory.makeDistroArchSeries(
-            distroseries=distroseries, architecturetag="i386",
-            processorfamily=ProcessorFamilySet().getByName("x86"),
-            supports_virtualized=True)
-        with celebrity_logged_in('admin'):
-            distroseries.nominatedarchindep = das
-        target_archive_owner = self.factory.makePerson()
-        target_archive = self.factory.makeArchive(
-            purpose=ArchivePurpose.COPY, owner=target_archive_owner,
-            name="test-copy-archive", distribution=distribution,
-            description="Test copy archive", enabled=False)
-        return source_archive, target_archive, distroseries
-
-    def checkPublishedSources(self, expected, archive, series):
-        # We need to be admin as the archive is disabled at this point.
-        with celebrity_logged_in('admin'):
-            sources = archive.getPublishedSources(
-                distroseries=series,
-                status=(
-                    PackagePublishingStatus.PENDING,
-                    PackagePublishingStatus.PUBLISHED))
-            actual = []
-            for source in sources:
-                actual.append(
-                    (source.source_package_name,
-                     source.source_package_version))
-            self.assertEqual(sorted(expected), sorted(actual))
-
-    def test_run(self):
-        """Test that CopyArchiveJob.run() actually copies the archive.
-
-        We just make a simple test here, and rely on PackageCloner tests
-        to cover the functionality.
-        """
-        source_archive, target_archive, series = self.makeSourceAndTarget()
-        job = CopyArchiveJob.create(
-            target_archive, source_archive, series,
-            PackagePublishingPocket.RELEASE, series,
-            PackagePublishingPocket.RELEASE)
-        job.run()
-        self.checkPublishedSources([("bzr", "2.1")], target_archive, series)
-
-    def test_run_mergeCopy(self):
-        """Test that CopyArchiveJob.run() when merge=True does a mergeCopy."""
-        source_archive, target_archive, series = self.makeSourceAndTarget()
-        # Create the copy archive
-        job = CopyArchiveJob.create(
-            target_archive, source_archive, series,
-            PackagePublishingPocket.RELEASE, series,
-            PackagePublishingPocket.RELEASE)
-        job.start()
-        job.run()
-        job.complete()
-        # Now the two archives are in the same state, so we change the
-        # source archive and request a merge to check that it works.
-        # Create a new version of the apt package in the source
-        self.factory.makeSourcePackagePublishingHistory(
-            sourcepackagename=self.factory.getOrMakeSourcePackageName(
-                name='apt'),
-            distroseries=series, component=self.factory.makeComponent(),
-            version="1.2", architecturehintlist='any',
-            archive=source_archive, status=PackagePublishingStatus.PUBLISHED,
-            pocket=PackagePublishingPocket.RELEASE)
-        # Create a job to merge
-        job = CopyArchiveJob.create(
-            target_archive, source_archive, series,
-            PackagePublishingPocket.RELEASE, series,
-            PackagePublishingPocket.RELEASE, merge=True)
-        job.run()
-        # Check that the new apt package is in the target
-        self.checkPublishedSources(
-            [("bzr", "2.1"), ("apt", "1.2")], target_archive, series)
-
-    def test_run_with_proc_families(self):
-        """Test that a CopyArchiveJob job with proc_families uses them.
-
-        If we create a CopyArchiveJob with proc_families != None then
-        they should be used when cloning packages.
-        """
-        source_archive, target_archive, series = self.makeSourceAndTarget()
-        proc_families = [ProcessorFamilySet().getByName("x86")]
-        job = CopyArchiveJob.create(
-            target_archive, source_archive, series,
-            PackagePublishingPocket.RELEASE, series,
-            PackagePublishingPocket.RELEASE, proc_families=proc_families)
-        job.run()
-        builds = list(
-            getUtility(IBinaryPackageBuildSet).getBuildsForArchive(
-            target_archive, status=BuildStatus.NEEDSBUILD))
-        actual_builds = list()
-        for build in builds:
-            naked_build = removeSecurityProxy(build)
-            spr = naked_build.source_package_release
-            actual_builds.append(
-                (spr.name, spr.version, naked_build.processor.family.name))
-        # One build for the one package, as we specified one processor
-        # family.
-        self.assertEqual([("bzr", "2.1", "x86")], actual_builds)


Follow ups