launchpad-reviewers team mailing list archive
-
launchpad-reviewers team
-
Mailing list archive
-
Message #03893
[Merge] lp:~rvb/launchpad/init-series-bug-789091-devel into lp:launchpad
Raphaël Victor Badin has proposed merging lp:~rvb/launchpad/init-series-bug-789091-devel into lp:launchpad.
Requested reviews:
Launchpad code reviewers (launchpad-reviewers)
Related bugs:
Bug #789091 in Launchpad itself: "initialise_distroseries doesn't work with multiple parents"
https://bugs.launchpad.net/launchpad/+bug/789091
For more details, see:
https://code.launchpad.net/~rvb/launchpad/init-series-bug-789091-devel/+merge/63676
This branch fixes InitialiseDistroSeries so that it actually initializes a derived series from multiple parents.
= Details =
*This is probably a branch that needs to be reviewed by a Soyuz expert.*
Before this, only the first parent given to InitializeSeries was taken into account when initializing the series. This branch fixes that and copies the information from all the parents into the derived series. This means merging the packagesets, components, sections, unioning the source format selection and copying all the package publication information. Since only one published version of every package should exist in the derived series, we copy from each parent in order excluding duplicates. In result, in case the same package exists in multiple parents, the package from the first parent with this package is the one that will end up in the derived series.
= Tests =
./bin/test -cvv test_initialise_distroseries test_multiple_parents_initialize
./bin/test -cvv test_initialise_distroseries test_multiple_parents_ordering
./bin/test -cvv test_initialise_distroseries test_multiple_parent_packagesets_merge
./bin/test -cvv test_initialise_distroseries test_multiple_parents_format_selection_union
./bin/test -cvv test_initialise_distroseries test_multiple_parents_component_merge
./bin/test -cvv test_initialise_distroseries test_multiple_parents_section_merge
./bin/test -cvv test_initialise_distroseries test_multiple_parents_same_package
./bin/test -cvv test_packagecloner testCopyNoDuplicates
= QA =
On DF:
- create a new series and initialize it from multiple parents.
- check that:
- section selections/component/packagesets are merged in the derived series;
- format section in the child is the union of the parents' format selections;
- for identical packages (same name), the published version in the parent is the published version from the first parent to have this package.
--
https://code.launchpad.net/~rvb/launchpad/init-series-bug-789091-devel/+merge/63676
Your team Launchpad code reviewers is requested to review the proposed merge of lp:~rvb/launchpad/init-series-bug-789091-devel into lp:launchpad.
=== modified file 'lib/lp/registry/interfaces/distroseriesparent.py'
--- lib/lp/registry/interfaces/distroseriesparent.py 2011-05-30 10:41:17 +0000
+++ lib/lp/registry/interfaces/distroseriesparent.py 2011-06-10 10:56:52 +0000
@@ -56,12 +56,19 @@
title=_("The component for this overlay"), required=False,
vocabulary='Component')
+ ordering = Int(
+ title=_("Parent build dependency ordering"), required=False,
+ default=1,
+ description=_(
+ "Parents are ordered in decreasing order of preference "
+ "starting from 1."))
+
class IDistroSeriesParentSet(Interface):
"""`DistroSeriesParentSet` interface."""
def new(derived_series, parent_series, initialized, is_overlay=False,
- pocket=None, component=None):
+ pocket=None, component=None, ordering=1):
"""Create a new `DistroSeriesParent`."""
def getByDerivedSeries(derived_series):
=== modified file 'lib/lp/registry/model/distroseries.py'
--- lib/lp/registry/model/distroseries.py 2011-06-08 15:27:40 +0000
+++ lib/lp/registry/model/distroseries.py 2011-06-10 10:56:52 +0000
@@ -1993,7 +1993,8 @@
from lp.registry.interfaces.distroseriesparent import (
IDistroSeriesParentSet,
)
- dsps = getUtility(IDistroSeriesParentSet).getByDerivedSeries(self)
+ dsp_set = getUtility(IDistroSeriesParentSet)
+ dsps = dsp_set.getByDerivedSeries(self).order_by('ordering')
return [dsp.parent_series for dsp in dsps]
def getDerivedSeries(self):
=== modified file 'lib/lp/registry/model/distroseriesparent.py'
--- lib/lp/registry/model/distroseriesparent.py 2011-05-30 10:41:17 +0000
+++ lib/lp/registry/model/distroseriesparent.py 2011-06-10 10:56:52 +0000
@@ -55,6 +55,8 @@
component_id = Int(name='component', allow_none=True)
component = Reference(component_id, 'Component.id')
+ ordering = Int(allow_none=False, default=1)
+
class DistroSeriesParentSet:
"""See `IDistroSeriesParentSet`."""
@@ -62,7 +64,7 @@
title = "Cross reference of parent and derived distroseries."
def new(self, derived_series, parent_series, initialized,
- is_overlay=False, pocket=None, component=None):
+ is_overlay=False, pocket=None, component=None, ordering=1):
"""Make and return a new `DistroSeriesParent`."""
store = IMasterStore(DistroSeriesParent)
dsp = DistroSeriesParent()
@@ -72,6 +74,7 @@
dsp.is_overlay = is_overlay
dsp.pocket = pocket
dsp.component = component
+ dsp.ordering = ordering
store.add(dsp)
return dsp
=== modified file 'lib/lp/soyuz/interfaces/packagecloner.py'
--- lib/lp/soyuz/interfaces/packagecloner.py 2010-10-07 11:50:39 +0000
+++ lib/lp/soyuz/interfaces/packagecloner.py 2011-06-10 10:56:52 +0000
@@ -20,7 +20,7 @@
def clonePackages(
origin, destination, distroarchseries_list=None,
proc_families=None, sourcepackagenames=None,
- always_create=False):
+ always_create=False, no_duplicates=False):
"""Copies the source packages from origin to destination as
well as the binary packages for the DistroArchSeries specified.
@@ -33,6 +33,8 @@
:param sourcepackagenames: the source packages which are to be
copied.
:param always_create: if builds should always be created.
+ :param no_duplicates: if we should prevent the duplication of packages
+ with identical sourcepackagename in the destination.
"""
def mergeCopy(origin, destination):
=== modified file 'lib/lp/soyuz/model/packagecloner.py'
--- lib/lp/soyuz/model/packagecloner.py 2011-04-11 05:21:38 +0000
+++ lib/lp/soyuz/model/packagecloner.py 2011-06-10 10:56:52 +0000
@@ -62,7 +62,7 @@
def clonePackages(self, origin, destination, distroarchseries_list=None,
proc_families=None, sourcepackagenames=None,
- always_create=False):
+ always_create=False, no_duplicates=False):
"""Copies packages from origin to destination package location.
Binary packages are only copied for the `DistroArchSeries` pairs
@@ -84,10 +84,13 @@
@param always_create: if we should create builds for every source
package copied, useful if no binaries are to be copied.
@type always_create: Boolean
+ @param no_duplicates: if we should prevent the duplication of packages
+ with identical sourcepackagename in the destination.
+ @type no_duplicates: Boolean
"""
# First clone the source packages.
self._clone_source_packages(
- origin, destination, sourcepackagenames)
+ origin, destination, sourcepackagenames, no_duplicates)
# Are we also supposed to clone binary packages from origin to
# destination distroarchseries pairs?
@@ -399,7 +402,7 @@
store.execute(pop_query)
def _clone_source_packages(
- self, origin, destination, sourcepackagenames):
+ self, origin, destination, sourcepackagenames, no_duplicates):
"""Copy source publishing data from origin to destination.
@type origin: PackageLocation
@@ -410,7 +413,9 @@
to be copied.
@type sourcepackagenames: Iterable
@param sourcepackagenames: List of source packages to restrict
- the copy to
+ the copy to.
+ @param no_duplicates: if we should prevent the duplication of packages
+ with identical sourcepackagename in the destination.
"""
store = getUtility(IStoreSelector).get(MAIN_STORE, DEFAULT_FLAVOR)
query = '''
@@ -453,7 +458,28 @@
''' % sqlvalues([p.id for p in origin.packagesets])
if origin.component:
- query += "and spph.component = %s" % sqlvalues(origin.component)
+ query += "AND spph.component = %s" % sqlvalues(origin.component)
+
+ if no_duplicates:
+ query += '''AND spph.sourcepackagerelease NOT IN
+ (SELECT origin_spr.id
+ FROM SourcePackageRelease as origin_spr,
+ SourcePackageRelease as dest_spr,
+ SourcePackagePublishingHistory as
+ dest_spph
+ WHERE dest_spph.sourcepackagerelease =
+ dest_spr.id
+ AND dest_spr.sourcepackagename =
+ origin_spr.sourcepackagename
+ AND dest_spph.distroseries = %s
+ AND dest_spph.status in (%s, %s)
+ AND dest_spph.pocket = %s
+ AND dest_spph.archive = %s)
+ ''' % sqlvalues(
+ destination.distroseries,
+ PackagePublishingStatus.PENDING,
+ PackagePublishingStatus.PUBLISHED,
+ destination.pocket, destination.archive)
store.execute(query)
=== modified file 'lib/lp/soyuz/scripts/initialise_distroseries.py'
--- lib/lp/soyuz/scripts/initialise_distroseries.py 2011-06-01 12:46:52 +0000
+++ lib/lp/soyuz/scripts/initialise_distroseries.py 2011-06-10 10:56:52 +0000
@@ -17,13 +17,12 @@
from canonical.database.sqlbase import sqlvalues
from canonical.launchpad.helpers import ensure_unicode
-from canonical.launchpad.interfaces.lpstorm import (
- IMasterStore,
- IStore,
- )
+from canonical.launchpad.interfaces.lpstorm import IMasterStore
+from lp.app.errors import NotFoundError
from lp.buildmaster.enums import BuildStatus
from lp.registry.interfaces.distroseriesparent import IDistroSeriesParentSet
from lp.registry.interfaces.pocket import PackagePublishingPocket
+from lp.services.database import bulk
from lp.soyuz.adapters.packagelocation import PackageLocation
from lp.soyuz.enums import (
ArchivePurpose,
@@ -32,7 +31,10 @@
from lp.soyuz.interfaces.archive import IArchiveSet
from lp.soyuz.interfaces.component import IComponentSet
from lp.soyuz.interfaces.packagecloner import IPackageCloner
-from lp.soyuz.interfaces.packageset import IPackagesetSet
+from lp.soyuz.interfaces.packageset import (
+ IPackagesetSet,
+ NoSuchPackageSet,
+ )
from lp.soyuz.model.packageset import Packageset
@@ -75,13 +77,14 @@
# Avoid circular imports
from lp.registry.model.distroseries import DistroSeries
- # XXX: rvb 2011-05-27 bug=789091: This code should be fixed to support
- # initialising from multiple parents.
- self.parent_id = parents[0]
- self.parent = IStore(
- DistroSeries).get(DistroSeries, int(self.parent_id))
-
self.distroseries = distroseries
+ self.parent_ids = [int(id) for id in parents]
+ # Load parent objects in bulk...
+ parents_bulk = bulk.load(DistroSeries, self.parent_ids)
+ # ... sort the parents to match the order in the 'parents' parameter.
+ self.parents = sorted(
+ parents_bulk,
+ key=lambda parent: self.parent_ids.index(parent.id))
self.arches = arches
self.packagesets = [
ensure_unicode(packageset) for packageset in packagesets]
@@ -97,27 +100,28 @@
("DistroSeries {child.name} has already been initialized"
".").format(
child=self.distroseries))
- if self.distroseries.distribution.id == self.parent.distribution.id:
- self._checkBuilds()
- self._checkQueue()
+ for parent in self.parents:
+ if self.distroseries.distribution.id == parent.distribution.id:
+ self._checkBuilds(parent)
+ self._checkQueue(parent)
self._checkSeries()
- def _checkBuilds(self):
- """Assert there are no pending builds for parent series.
+ def _checkBuilds(self, parent):
+ """Assert there are no pending builds for the given parent series.
Only cares about the RELEASE pocket, which is the only one inherited
via initialiseFromParent method.
"""
# only the RELEASE pocket is inherited, so we only check
# pending build records for it.
- pending_builds = self.parent.getBuildRecords(
+ pending_builds = parent.getBuildRecords(
BuildStatus.NEEDSBUILD, pocket=PackagePublishingPocket.RELEASE)
if pending_builds.any():
raise InitialisationError("Parent series has pending builds.")
- def _checkQueue(self):
- """Assert upload queue is empty on parent series.
+ def _checkQueue(self, parent):
+ """Assert upload queue is empty on the given parent series.
Only cares about the RELEASE pocket, which is the only one inherited
via initialiseFromParent method.
@@ -127,7 +131,7 @@
for queue in (
PackageUploadStatus.NEW, PackageUploadStatus.ACCEPTED,
PackageUploadStatus.UNAPPROVED):
- items = self.parent.getQueueItems(
+ items = parent.getQueueItems(
queue, pocket=PackagePublishingPocket.RELEASE)
if items:
raise InitialisationError(
@@ -148,7 +152,7 @@
raise InitialisationError(error)
def initialise(self):
- self._set_parent()
+ self._set_parents()
self._copy_configuration()
self._copy_architectures()
self._copy_packages()
@@ -156,64 +160,80 @@
self._set_initialised()
transaction.commit()
- def _set_parent(self):
- # XXX: rvb 2011-05-27 bug=789091: This code should be fixed to support
- # initialising from multiple parents.
- dsp_set = getUtility(IDistroSeriesParentSet)
- if self.overlays and self.overlays[0]:
- pocket = PackagePublishingPocket.__metaclass__.getTermByToken(
- PackagePublishingPocket, self.overlay_pockets[0]).value
- component_set = getUtility(IComponentSet)
- component = component_set[self.overlay_components[0]]
- dsp_set.new(
- self.distroseries, self.parent, initialized=False,
- is_overlay=True, pocket=pocket, component=component)
- else:
- dsp_set.new(self.distroseries, self.parent, initialized=False)
+ def _set_parents(self):
+ count = 0
+ for parent in self.parents:
+ dsp_set = getUtility(IDistroSeriesParentSet)
+ if self.overlays and self.overlays[count]:
+ pocket = PackagePublishingPocket.__metaclass__.getTermByToken(
+ PackagePublishingPocket,
+ self.overlay_pockets[count]).value
+ component_set = getUtility(IComponentSet)
+ component = component_set[self.overlay_components[count]]
+ dsp_set.new(
+ self.distroseries, parent, initialized=False,
+ is_overlay=True, pocket=pocket, component=component,
+ ordering=count + 1)
+ else:
+ dsp_set.new(
+ self.distroseries, parent, initialized=False,
+ is_overlay=False, ordering=count + 1)
+ count += 1
def _set_initialised(self):
dsp_set = getUtility(IDistroSeriesParentSet)
- distroseriesparent = dsp_set.getByDerivedAndParentSeries(
- self.distroseries, self.parent)
- distroseriesparent.initialized = True
+ distroseriesparents = dsp_set.getByDerivedSeries(
+ self.distroseries)
+ for distroseriesparent in distroseriesparents:
+ distroseriesparent.initialized = True
def _copy_configuration(self):
- self.distroseries.backports_not_automatic = \
- self.parent.backports_not_automatic
+ self.distroseries.backports_not_automatic = any(
+ [parent.backports_not_automatic for parent in self.parents])
def _copy_architectures(self):
- include = ''
+ filtering = ' AND distroseries IN %s ' % (
+ sqlvalues(self.parent_ids))
if self.arches:
- include = "AND architecturetag IN %s" % sqlvalues(self.arches)
+ filtering += ' AND architecturetag IN %s ' % (
+ sqlvalues(self.arches))
self._store.execute("""
INSERT INTO DistroArchSeries
(distroseries, processorfamily, architecturetag, owner, official)
- SELECT %s, processorfamily, architecturetag, %s, official
- FROM DistroArchSeries WHERE distroseries = %s
- AND enabled = TRUE %s
- """ % (sqlvalues(self.distroseries, self.distroseries.owner,
- self.parent) + (include,)))
+ SELECT %s, processorfamily, architecturetag, %s,
+ bool_and(official)
+ FROM DistroArchSeries WHERE enabled = TRUE %s
+ GROUP BY processorfamily, architecturetag
+ """ % (sqlvalues(self.distroseries, self.distroseries.owner)
+ + (filtering, )))
self._store.flush()
+ # Take nominatedarchindep from the first parent.
self.distroseries.nominatedarchindep = self.distroseries[
- self.parent.nominatedarchindep.architecturetag]
+ self.parents[0].nominatedarchindep.architecturetag]
def _copy_packages(self):
# Perform the copies
self._copy_component_section_and_format_selections()
- # Prepare the list of distroarchseries for which binary packages
+ # Prepare the lists of distroarchseries for which binary packages
# shall be copied.
- distroarchseries_list = []
- for arch in self.distroseries.architectures:
- if self.arches and (arch.architecturetag not in self.arches):
- continue
- parent_arch = self.parent[arch.architecturetag]
- distroarchseries_list.append((parent_arch, arch))
+ distroarchseries_lists = {}
+ for parent in self.parents:
+ distroarchseries_lists[parent] = []
+ for arch in self.distroseries.architectures:
+ if self.arches and (arch.architecturetag not in self.arches):
+ continue
+ try:
+ parent_arch = parent[arch.architecturetag]
+ except NotFoundError:
+ continue
+
+ distroarchseries_lists[parent].append((parent_arch, arch))
# Now copy source and binary packages.
- self._copy_publishing_records(distroarchseries_list)
+ self._copy_publishing_records(distroarchseries_lists)
self._copy_packaging_links()
- def _copy_publishing_records(self, distroarchseries_list):
+ def _copy_publishing_records(self, distroarchseries_lists):
"""Copy the publishing records from the parent arch series
to the given arch series in ourselves.
@@ -232,58 +252,60 @@
pkgset = self._store.get(Packageset, int(pkgsetid))
spns += list(pkgset.getSourcesIncluded())
- for archive in self.parent.distribution.all_distro_archives:
- if archive.purpose not in (
- ArchivePurpose.PRIMARY, ArchivePurpose.DEBUG):
- continue
+ for parent in self.parents:
+ distroarchseries_list = distroarchseries_lists[parent]
+ for archive in parent.distribution.all_distro_archives:
+ if archive.purpose not in (
+ ArchivePurpose.PRIMARY, ArchivePurpose.DEBUG):
+ continue
- target_archive = archive_set.getByDistroPurpose(
- self.distroseries.distribution, archive.purpose)
- if archive.purpose is ArchivePurpose.PRIMARY:
- assert target_archive is not None, (
- "Target archive doesn't exist?")
- origin = PackageLocation(
- archive, self.parent.distribution, self.parent,
- PackagePublishingPocket.RELEASE)
- destination = PackageLocation(
- target_archive, self.distroseries.distribution,
- self.distroseries, PackagePublishingPocket.RELEASE)
- proc_families = None
- if self.rebuild:
- proc_families = [
- das[1].processorfamily
- for das in distroarchseries_list]
- distroarchseries_list = ()
- getUtility(IPackageCloner).clonePackages(
- origin, destination, distroarchseries_list,
- proc_families, spns, self.rebuild)
+ target_archive = archive_set.getByDistroPurpose(
+ self.distroseries.distribution, archive.purpose)
+ if archive.purpose is ArchivePurpose.PRIMARY:
+ assert target_archive is not None, (
+ "Target archive doesn't exist?")
+ origin = PackageLocation(
+ archive, parent.distribution, parent,
+ PackagePublishingPocket.RELEASE)
+ destination = PackageLocation(
+ target_archive, self.distroseries.distribution,
+ self.distroseries, PackagePublishingPocket.RELEASE)
+ proc_families = None
+ if self.rebuild:
+ proc_families = [
+ das[1].processorfamily
+ for das in distroarchseries_list]
+ distroarchseries_list = ()
+ getUtility(IPackageCloner).clonePackages(
+ origin, destination, distroarchseries_list,
+ proc_families, spns, self.rebuild, no_duplicates=True)
def _copy_component_section_and_format_selections(self):
- """Copy the section, component and format selections from the parent
+ """Copy the section, component and format selections from the parents
distro series into this one.
"""
# Copy the component selections
self._store.execute('''
INSERT INTO ComponentSelection (distroseries, component)
- SELECT %s AS distroseries, cs.component AS component
- FROM ComponentSelection AS cs WHERE cs.distroseries = %s
+ SELECT DISTINCT %s AS distroseries, cs.component AS component
+ FROM ComponentSelection AS cs WHERE cs.distroseries IN %s
''' % sqlvalues(self.distroseries.id,
- self.parent.id))
+ self.parent_ids))
# Copy the section selections
self._store.execute('''
INSERT INTO SectionSelection (distroseries, section)
- SELECT %s as distroseries, ss.section AS section
- FROM SectionSelection AS ss WHERE ss.distroseries = %s
+ SELECT DISTINCT %s as distroseries, ss.section AS section
+ FROM SectionSelection AS ss WHERE ss.distroseries IN %s
''' % sqlvalues(self.distroseries.id,
- self.parent.id))
+ self.parent_ids))
# Copy the source format selections
self._store.execute('''
INSERT INTO SourcePackageFormatSelection (distroseries, format)
- SELECT %s as distroseries, spfs.format AS format
+ SELECT DISTINCT %s as distroseries, spfs.format AS format
FROM SourcePackageFormatSelection AS spfs
- WHERE spfs.distroseries = %s
+ WHERE spfs.distroseries IN %s
''' % sqlvalues(self.distroseries.id,
- self.parent.id))
+ self.parent_ids))
def _copy_packaging_links(self):
"""Copy the packaging links from the parent series to this one."""
@@ -304,7 +326,7 @@
-- the data set for the series being updated, yet results are
-- in fact the data from the original series.
JOIN Distroseries ChildSeries
- ON Packaging.distroseries = %s
+ ON Packaging.distroseries IN %s
WHERE
-- Select only the packaging links that are in the parent
-- that are not in the child.
@@ -315,7 +337,7 @@
WHERE distroseries in (
SELECT id
FROM Distroseries
- WHERE id = %s
+ WHERE id IN %s
)
EXCEPT
SELECT sourcepackagename
@@ -326,26 +348,39 @@
WHERE id = ChildSeries.id
)
)
- """ % (self.parent.id, self.distroseries.id, self.parent.id))
+ """ % sqlvalues(
+ self.parent_ids, self.distroseries.id, self.parent_ids))
def _copy_packagesets(self):
"""Copy packagesets from the parent distroseries."""
- packagesets = self._store.find(Packageset, distroseries=self.parent)
+ # Avoid circular imports.
+ from lp.registry.model.distroseries import DistroSeries
+
+ packagesets = self._store.find(
+ Packageset, DistroSeries.id.is_in(self.parent_ids))
parent_to_child = {}
# Create the packagesets, and any archivepermissions
+ parent_distro_ids = [
+ parent.distribution.id for parent in self.parents]
for parent_ps in packagesets:
# Cross-distro initialisations get packagesets owned by the
# distro owner, otherwise the old owner is preserved.
if self.packagesets and str(parent_ps.id) not in self.packagesets:
continue
- if self.distroseries.distribution == self.parent.distribution:
- new_owner = parent_ps.owner
- else:
- new_owner = self.distroseries.owner
- child_ps = getUtility(IPackagesetSet).new(
- parent_ps.name, parent_ps.description,
- new_owner, distroseries=self.distroseries,
- related_set=parent_ps)
+ packageset_set = getUtility(IPackagesetSet)
+ # First, try to fetch an existing packageset with this name.
+ try:
+ child_ps = packageset_set.getByName(
+ parent_ps.name, self.distroseries)
+ except NoSuchPackageSet:
+ if self.distroseries.distribution.id in parent_distro_ids:
+ new_owner = parent_ps.owner
+ else:
+ new_owner = self.distroseries.owner
+ child_ps = getUtility(IPackagesetSet).new(
+ parent_ps.name, parent_ps.description,
+ new_owner, distroseries=self.distroseries,
+ related_set=parent_ps)
self._store.execute("""
INSERT INTO Archivepermission
(person, permission, archive, packageset, explicit)
@@ -355,7 +390,7 @@
self.distroseries.main_archive, child_ps.id,
parent_ps.id))
parent_to_child[parent_ps] = child_ps
- # Copy the relations between sets, and the contents
+ # Copy the relations between sets, and the contents.
for old_series_ps, new_series_ps in parent_to_child.items():
old_series_sets = old_series_ps.setsIncluded(
direct_inclusion=True)
=== modified file 'lib/lp/soyuz/scripts/tests/test_initialise_distroseries.py'
--- lib/lp/soyuz/scripts/tests/test_initialise_distroseries.py 2011-06-08 11:06:04 +0000
+++ lib/lp/soyuz/scripts/tests/test_initialise_distroseries.py 2011-06-10 10:56:52 +0000
@@ -31,7 +31,9 @@
from lp.soyuz.interfaces.sourcepackageformat import (
ISourcePackageFormatSelectionSet,
)
+from lp.soyuz.model.component import ComponentSelection
from lp.soyuz.model.distroarchseries import DistroArchSeries
+from lp.soyuz.model.section import SectionSelection
from lp.soyuz.scripts.initialise_distroseries import (
InitialisationError,
InitialiseDistroSeries,
@@ -43,40 +45,40 @@
layer = LaunchpadZopelessLayer
- def setUp(self):
- super(TestInitialiseDistroSeries, self).setUp()
- self.parent = self.factory.makeDistroSeries()
- pf = self.factory.makeProcessorFamily()
- pf.addProcessor('x86', '', '')
- self.parent_das = self.factory.makeDistroArchSeries(
- distroseries=self.parent, processorfamily=pf)
+ def setupParent(self, packages=None, format_selection=None):
+ parent = self.factory.makeDistroSeries()
+ parent_das = self.factory.makeDistroArchSeries(distroseries=parent)
lf = self.factory.makeLibraryFileAlias()
transaction.commit()
- self.parent_das.addOrUpdateChroot(lf)
- self.parent_das.supports_virtualized = True
- self.parent.nominatedarchindep = self.parent_das
+ parent_das.addOrUpdateChroot(lf)
+ parent_das.supports_virtualized = True
+ parent.nominatedarchindep = parent_das
+ if format_selection is None:
+ format_selection = SourcePackageFormat.FORMAT_1_0
getUtility(ISourcePackageFormatSelectionSet).add(
- self.parent, SourcePackageFormat.FORMAT_1_0)
- self.parent.backports_not_automatic = True
- self._populate_parent()
+ parent, format_selection)
+ parent.backports_not_automatic = True
+ self._populate_parent(parent, parent_das, packages)
+ return parent, parent_das
- def _populate_parent(self):
- packages = {'udev': '0.1-1', 'libc6': '2.8-1',
- 'postgresql': '9.0-1', 'chromium': '3.6'}
+ def _populate_parent(self, parent, parent_das, packages=None):
+ if packages is None:
+ packages = {'udev': '0.1-1', 'libc6': '2.8-1',
+ 'postgresql': '9.0-1', 'chromium': '3.6'}
for package in packages.keys():
- spn = self.factory.makeSourcePackageName(package)
+ spn = self.factory.getOrMakeSourcePackageName(package)
spph = self.factory.makeSourcePackagePublishingHistory(
sourcepackagename=spn, version=packages[package],
- distroseries=self.parent,
+ distroseries=parent,
pocket=PackagePublishingPocket.RELEASE,
status=PackagePublishingStatus.PUBLISHED)
status = BuildStatus.FULLYBUILT
if package is 'chromium':
status = BuildStatus.FAILEDTOBUILD
- bpn = self.factory.makeBinaryPackageName(package)
+ bpn = self.factory.getOrMakeBinaryPackageName(package)
build = self.factory.makeBinaryPackageBuild(
source_package_release=spph.sourcepackagerelease,
- distroarchseries=self.parent_das,
+ distroarchseries=parent_das,
status=status)
bpr = self.factory.makeBinaryPackageRelease(
binarypackagename=bpn, build=build,
@@ -84,13 +86,14 @@
if package is not 'chromium':
self.factory.makeBinaryPackagePublishingHistory(
binarypackagerelease=bpr,
- distroarchseries=self.parent_das,
+ distroarchseries=parent_das,
pocket=PackagePublishingPocket.RELEASE,
status=PackagePublishingStatus.PUBLISHED)
def test_failure_for_already_released_distroseries(self):
# Initialising a distro series that has already been used will
# error.
+ self.parent, self.parent_das = self.setupParent()
child = self.factory.makeDistroSeries()
self.factory.makeDistroArchSeries(distroseries=child)
ids = InitialiseDistroSeries(child, [self.parent.id])
@@ -103,6 +106,7 @@
# If the parent series has pending builds, and the child is a series
# of the same distribution (which means they share an archive), we
# can't initialise.
+ self.parent, self.parent_das = self.setupParent()
source = self.factory.makeSourcePackagePublishingHistory(
distroseries=self.parent,
pocket=PackagePublishingPocket.RELEASE)
@@ -117,16 +121,19 @@
def test_success_with_pending_builds(self):
# If the parent series has pending builds, and the child's
# distribution is different, we can initialise.
+ self.parent, self.parent_das = self.setupParent()
source = self.factory.makeSourcePackagePublishingHistory(
distroseries=self.parent,
pocket=PackagePublishingPocket.RELEASE)
source.createMissingBuilds()
- child = self._full_initialise()
- self.assertDistroSeriesInitialisedCorrectly(child)
+ child = self._fullInitialise([self.parent])
+ self.assertDistroSeriesInitialisedCorrectly(
+ child, self.parent, self.parent_das)
def test_failure_with_queue_items(self):
# If the parent series has items in its queues, such as NEW and
# UNAPPROVED, we can't initialise.
+ self.parent, self.parent_das = self.setupParent()
self.parent.createQueueEntry(
PackagePublishingPocket.RELEASE, self.parent.main_archive,
'foo.changes', 'bar')
@@ -136,16 +143,17 @@
InitialisationError, "Parent series queues are not empty.",
ids.check)
- def assertDistroSeriesInitialisedCorrectly(self, child):
+ def assertDistroSeriesInitialisedCorrectly(self, child, parent,
+ parent_das):
# Check that 'udev' has been copied correctly.
- parent_udev_pubs = self.parent.getPublishedSources('udev')
+ parent_udev_pubs = parent.getPublishedSources('udev')
child_udev_pubs = child.getPublishedSources('udev')
self.assertEqual(
parent_udev_pubs.count(), child_udev_pubs.count())
- parent_arch_udev_pubs = self.parent[
- self.parent_das.architecturetag].getReleasedPackages('udev')
+ parent_arch_udev_pubs = parent[
+ parent_das.architecturetag].getReleasedPackages('udev')
child_arch_udev_pubs = child[
- self.parent_das.architecturetag].getReleasedPackages('udev')
+ parent_das.architecturetag].getReleasedPackages('udev')
self.assertEqual(
len(parent_arch_udev_pubs), len(child_arch_udev_pubs))
# And the binary package, and linked source package look fine too.
@@ -154,16 +162,16 @@
self.assertEqual(
udev_bin.build.title,
u'%s build of udev 0.1-1 in %s %s RELEASE' % (
- self.parent_das.architecturetag, self.parent.parent.name,
- self.parent.name))
+ parent_das.architecturetag, parent.parent.name,
+ parent.name))
udev_src = udev_bin.build.source_package_release
self.assertEqual(udev_src.title, u'udev - 0.1-1')
# The build of udev 0.1-1 has been copied across.
child_udev = udev_src.getBuildByArch(
- child[self.parent_das.architecturetag], child.main_archive)
+ child[parent_das.architecturetag], child.main_archive)
parent_udev = udev_src.getBuildByArch(
- self.parent[self.parent_das.architecturetag],
- self.parent.main_archive)
+ parent[parent_das.architecturetag],
+ parent.main_archive)
self.assertEqual(parent_udev.id, child_udev.id)
# We also inherit the permitted source formats from our parent.
self.assertTrue(
@@ -172,30 +180,35 @@
# Other configuration bits are copied too.
self.assertTrue(child.backports_not_automatic)
- def _full_initialise(self, child=None, arches=(), packagesets=(),
- rebuild=False, distribution=None, overlays=(),
- overlay_pockets=(), overlay_components=()):
+ def _fullInitialise(self, parents, child=None, arches=(), packagesets=(),
+ rebuild=False, distribution=None, overlays=(),
+ overlay_pockets=(), overlay_components=()):
if child is None:
child = self.factory.makeDistroSeries(distribution=distribution)
ids = InitialiseDistroSeries(
- child, [self.parent.id], arches, packagesets, rebuild, overlays,
- overlay_pockets, overlay_components)
+ child, [parent.id for parent in parents], arches, packagesets,
+ rebuild, overlays, overlay_pockets, overlay_components)
ids.check()
ids.initialise()
return child
def test_initialise(self):
# Test a full initialise with no errors.
- child = self._full_initialise()
- self.assertDistroSeriesInitialisedCorrectly(child)
+ self.parent, self.parent_das = self.setupParent()
+ child = self._fullInitialise([self.parent])
+ self.assertDistroSeriesInitialisedCorrectly(
+ child, self.parent, self.parent_das)
def test_initialise_only_one_das(self):
# Test a full initialise with no errors, but only copy i386 to
# the child.
+ self.parent, self.parent_das = self.setupParent()
self.factory.makeDistroArchSeries(distroseries=self.parent)
- child = self._full_initialise(
+ child = self._fullInitialise(
+ [self.parent],
arches=[self.parent_das.architecturetag])
- self.assertDistroSeriesInitialisedCorrectly(child)
+ self.assertDistroSeriesInitialisedCorrectly(
+ child, self.parent, self.parent_das)
das = list(IStore(DistroArchSeries).find(
DistroArchSeries, distroseries=child))
self.assertEqual(len(das), 1)
@@ -204,6 +217,7 @@
def test_copying_packagesets(self):
# If a parent series has packagesets, we should copy them.
+ self.parent, self.parent_das = self.setupParent()
uploader = self.factory.makePerson()
test1 = getUtility(IPackagesetSet).new(
u'test1', u'test 1 packageset', self.parent.owner,
@@ -217,7 +231,7 @@
test1.addSources('udev')
getUtility(IArchivePermissionSet).newPackagesetUploader(
self.parent.main_archive, uploader, test1)
- child = self._full_initialise()
+ child = self._fullInitialise([self.parent])
# We can fetch the copied sets from the child.
child_test1 = getUtility(IPackagesetSet).getByName(
u'test1', distroseries=child)
@@ -255,10 +269,12 @@
def test_packageset_owner_preserved_within_distro(self):
# When initialising a new series within a distro, the copied
# packagesets have ownership preserved.
+ self.parent, self.parent_das = self.setupParent()
ps_owner = self.factory.makePerson()
getUtility(IPackagesetSet).new(
u'ps', u'packageset', ps_owner, distroseries=self.parent)
- child = self._full_initialise(distribution=self.parent.distribution)
+ child = self._fullInitialise(
+ [self.parent], distribution=self.parent.distribution)
child_ps = getUtility(IPackagesetSet).getByName(
u'ps', distroseries=child)
self.assertEqual(ps_owner, child_ps.owner)
@@ -266,10 +282,11 @@
def test_packageset_owner_not_preserved_cross_distro(self):
# In the case of a cross-distro initialisation, the new
# packagesets are owned by the new distro owner.
+ self.parent, self.parent_das = self.setupParent()
getUtility(IPackagesetSet).new(
u'ps', u'packageset', self.factory.makePerson(),
distroseries=self.parent)
- child = self._full_initialise()
+ child = self._fullInitialise([self.parent])
child_ps = getUtility(IPackagesetSet).getByName(
u'ps', distroseries=child)
self.assertEqual(child.owner, child_ps.owner)
@@ -277,6 +294,7 @@
def test_copy_limit_packagesets(self):
# If a parent series has packagesets, we can decide which ones we
# want to copy.
+ self.parent, self.parent_das = self.setupParent()
test1 = getUtility(IPackagesetSet).new(
u'test1', u'test 1 packageset', self.parent.owner,
distroseries=self.parent)
@@ -288,7 +306,8 @@
test1.addSources(pkg)
packageset1 = getUtility(IPackagesetSet).getByName(
u'test1', distroseries=self.parent)
- child = self._full_initialise(packagesets=(str(packageset1.id),))
+ child = self._fullInitialise(
+ [self.parent], packagesets=(str(packageset1.id),))
child_test1 = getUtility(IPackagesetSet).getByName(
u'test1', distroseries=child)
self.assertEqual(test1.description, child_test1.description)
@@ -301,12 +320,13 @@
self.assertEqual(parent_srcs, child_srcs)
child.updatePackageCount()
self.assertEqual(child.sourcecount, len(packages))
- self.assertEqual(child.binarycount, 2) # Chromium is FTBFS
+ self.assertEqual(child.binarycount, 2) # Chromium is FTBFS
def test_rebuild_flag(self):
# No binaries will get copied if we specify rebuild=True.
+ self.parent, self.parent_das = self.setupParent()
self.parent.updatePackageCount()
- child = self._full_initialise(rebuild=True)
+ child = self._fullInitialise([self.parent], rebuild=True)
child.updatePackageCount()
builds = child.getBuildRecords(
build_state=BuildStatus.NEEDSBUILD,
@@ -318,6 +338,7 @@
def test_limit_packagesets_rebuild_and_one_das(self):
# We can limit the source packages copied, and only builds
# for the copied source will be created.
+ self.parent, self.parent_das = self.setupParent()
test1 = getUtility(IPackagesetSet).new(
u'test1', u'test 1 packageset', self.parent.owner,
distroseries=self.parent)
@@ -328,7 +349,8 @@
for pkg in packages:
test1.addSources(pkg)
self.factory.makeDistroArchSeries(distroseries=self.parent)
- child = self._full_initialise(
+ child = self._fullInitialise(
+ [self.parent],
arches=[self.parent_das.architecturetag],
packagesets=(str(test1.id),), rebuild=True)
child.updatePackageCount()
@@ -346,10 +368,11 @@
def test_do_not_copy_disabled_dases(self):
# DASes that are disabled in the parent will not be copied.
+ self.parent, self.parent_das = self.setupParent()
ppc_das = self.factory.makeDistroArchSeries(
distroseries=self.parent)
ppc_das.enabled = False
- child = self._full_initialise()
+ child = self._fullInitialise([self.parent])
das = list(IStore(DistroArchSeries).find(
DistroArchSeries, distroseries=child))
self.assertEqual(len(das), 1)
@@ -358,6 +381,7 @@
def test_script(self):
# Do an end-to-end test using the command-line tool.
+ self.parent, self.parent_das = self.setupParent()
uploader = self.factory.makePerson()
test1 = getUtility(IPackagesetSet).new(
u'test1', u'test 1 packageset', self.parent.owner,
@@ -385,12 +409,14 @@
self.assertTrue(
"DEBUG Committing transaction." in stderr.split('\n'))
transaction.commit()
- self.assertDistroSeriesInitialisedCorrectly(child)
+ self.assertDistroSeriesInitialisedCorrectly(
+ child, self.parent, self.parent_das)
def test_is_initialized(self):
# At the end of the initialisation, the distroseriesparent is marked
# as 'initialised'.
- child = self._full_initialise(rebuild=True, overlays=())
+ self.parent, self.parent_das = self.setupParent()
+ child = self._fullInitialise([self.parent], rebuild=True, overlays=())
dsp_set = getUtility(IDistroSeriesParentSet)
distroseriesparent = dsp_set.getByDerivedAndParentSeries(
child, self.parent)
@@ -399,7 +425,8 @@
def test_no_overlays(self):
# Without the overlay parameter, no overlays are created.
- child = self._full_initialise(rebuild=True, overlays=[])
+ self.parent, self.parent_das = self.setupParent()
+ child = self._fullInitialise([self.parent], rebuild=True, overlays=[])
dsp_set = getUtility(IDistroSeriesParentSet)
distroseriesparent = dsp_set.getByDerivedAndParentSeries(
child, self.parent)
@@ -408,21 +435,167 @@
def test_setup_overlays(self):
# If the overlay parameter is passed, overlays are properly setup.
- child = self.factory.makeDistroSeries()
- overlays = [True]
- overlay_pockets = ['Updates']
- overlay_components = ['universe']
- child = self._full_initialise(
- child=child, rebuild=True, overlays=overlays,
+ self.parent1, unused = self.setupParent()
+ self.parent2, unused = self.setupParent()
+
+ overlays = [False, True]
+ overlay_pockets = [None, 'Updates']
+ overlay_components = [None, 'universe']
+ child = self._fullInitialise(
+ [self.parent1, self.parent2], rebuild=True,
+ overlays=overlays,
overlay_pockets=overlay_pockets,
overlay_components=overlay_components)
dsp_set = getUtility(IDistroSeriesParentSet)
- distroseriesparent = dsp_set.getByDerivedAndParentSeries(
- child, self.parent)
+ distroseriesparent1 = dsp_set.getByDerivedAndParentSeries(
+ child, self.parent1)
+ distroseriesparent2 = dsp_set.getByDerivedAndParentSeries(
+ child, self.parent2)
- self.assertTrue(distroseriesparent.is_overlay)
+ self.assertFalse(distroseriesparent1.is_overlay)
+ self.assertTrue(distroseriesparent2.is_overlay)
self.assertEqual(
getUtility(IComponentSet)['universe'],
- distroseriesparent.component)
+ distroseriesparent2.component)
self.assertEqual(
- PackagePublishingPocket.UPDATES, distroseriesparent.pocket)
+ PackagePublishingPocket.UPDATES, distroseriesparent2.pocket)
+
+ def test_multiple_parents_initialize(self):
+ self.parent, self.parent_das = self.setupParent()
+ self.parent2, self.parent_das2 = self.setupParent(
+ packages={'alpha': '0.1-1'})
+ child = self._fullInitialise([self.parent, self.parent2])
+ self.assertDistroSeriesInitialisedCorrectly(
+ child, self.parent, self.parent_das)
+
+ def test_multiple_parents_ordering(self):
+ # The parents' order is stored.
+ self.parent1, self.parent_das = self.setupParent()
+ self.parent2, self.parent_das2 = self.setupParent()
+ self.parent3, self.parent_das3 = self.setupParent()
+ child = self._fullInitialise(
+ [self.parent1, self.parent3, self.parent2])
+ dsp_set = getUtility(IDistroSeriesParentSet)
+ distroseriesparent1 = dsp_set.getByDerivedAndParentSeries(
+ child, self.parent1)
+ distroseriesparent2 = dsp_set.getByDerivedAndParentSeries(
+ child, self.parent2)
+ distroseriesparent3 = dsp_set.getByDerivedAndParentSeries(
+ child, self.parent3)
+
+ self.assertContentEqual(
+ [self.parent1, self.parent3, self.parent2],
+ child.getParentSeries())
+ self.assertEqual(1, distroseriesparent1.ordering)
+ self.assertEqual(3, distroseriesparent2.ordering)
+ self.assertEqual(2, distroseriesparent3.ordering)
+
+ def test_multiple_parent_packagesets_merge(self):
+ # Identical packagesets from the parents are merged as one
+ # packageset in the child.
+ self.parent1, self.parent_das1 = self.setupParent()
+ self.parent2, self.parent_das2 = self.setupParent()
+ uploader1 = self.factory.makePerson()
+ uploader2 = self.factory.makePerson()
+ test1_parent1 = getUtility(IPackagesetSet).new(
+ u'test1', u'test 1 packageset', self.parent1.owner,
+ distroseries=self.parent1)
+ test1_parent2 = getUtility(IPackagesetSet).new(
+ u'test1', u'test 1 packageset', self.parent2.owner,
+ distroseries=self.parent2)
+ test1_parent1.addSources('chromium')
+ test1_parent1.addSources('udev')
+ test1_parent2.addSources('udev')
+ test1_parent2.addSources('libc6')
+ getUtility(IArchivePermissionSet).newPackagesetUploader(
+ self.parent1.main_archive, uploader1, test1_parent1)
+ getUtility(IArchivePermissionSet).newPackagesetUploader(
+ self.parent2.main_archive, uploader2, test1_parent2)
+ child = self._fullInitialise([self.parent1, self.parent2])
+
+ # In the child, the identical packagesets are merged into one.
+ child_test1 = getUtility(IPackagesetSet).getByName(
+ u'test1', distroseries=child)
+ child_srcs = child_test1.getSourcesIncluded(
+ direct_inclusion=True)
+ parent1_srcs = test1_parent1.getSourcesIncluded(direct_inclusion=True)
+ parent2_srcs = test1_parent2.getSourcesIncluded(direct_inclusion=True)
+ self.assertContentEqual(
+ set(parent1_srcs).union(set(parent2_srcs)),
+ child_srcs)
+ # The uploaders can also upload to the new distroseries.
+ self.assertTrue(
+ getUtility(IArchivePermissionSet).isSourceUploadAllowed(
+ self.parent1.main_archive, 'udev', uploader1,
+ distroseries=self.parent1))
+ self.assertTrue(
+ getUtility(IArchivePermissionSet).isSourceUploadAllowed(
+ child.main_archive, 'udev', uploader1,
+ distroseries=child))
+ self.assertTrue(
+ getUtility(IArchivePermissionSet).isSourceUploadAllowed(
+ self.parent2.main_archive, 'libc6', uploader2,
+ distroseries=self.parent2))
+ self.assertTrue(
+ getUtility(IArchivePermissionSet).isSourceUploadAllowed(
+ child.main_archive, 'libc6', uploader2,
+ distroseries=child))
+
+ def test_multiple_parents_format_selection_union(self):
+ # The format selection for the derived series is the union of
+ # the format selections of the parents.
+ format1 = SourcePackageFormat.FORMAT_1_0
+ format2 = SourcePackageFormat.FORMAT_3_0_QUILT
+ self.parent1, notused = self.setupParent(format_selection=format1)
+ self.parent2, notused = self.setupParent(format_selection=format2)
+ child = self._fullInitialise([self.parent1, self.parent2])
+
+ self.assertTrue(child.isSourcePackageFormatPermitted(format1))
+ self.assertTrue(child.isSourcePackageFormatPermitted(format2))
+
+ def test_multiple_parents_component_merge(self):
+ # The components from the parents are merged to create the
+ # child's components.
+ self.comp1 = self.factory.makeComponent()
+ self.comp2 = self.factory.makeComponent()
+ self.parent1, notused = self.setupParent()
+ self.parent2, notused = self.setupParent()
+ ComponentSelection(distroseries=self.parent1, component=self.comp1)
+ ComponentSelection(distroseries=self.parent2, component=self.comp1)
+ ComponentSelection(distroseries=self.parent2, component=self.comp2)
+ child = self._fullInitialise([self.parent1, self.parent2])
+
+ self.assertContentEqual(
+ [self.comp1, self.comp2],
+ child.components)
+
+ def test_multiple_parents_section_merge(self):
+ # The sections from the parents are merged to create the child's
+ # sections.
+ self.section1 = self.factory.makeSection()
+ self.section2 = self.factory.makeSection()
+ self.parent1, notused = self.setupParent()
+ self.parent2, notused = self.setupParent()
+ SectionSelection(distroseries=self.parent1, section=self.section1)
+ SectionSelection(distroseries=self.parent2, section=self.section1)
+ SectionSelection(distroseries=self.parent2, section=self.section2)
+ child = self._fullInitialise([self.parent1, self.parent2])
+
+ self.assertContentEqual(
+ [self.section1, self.section2],
+ child.sections)
+
+ def test_multiple_parents_same_package(self):
+ # If the same package is published in different parents, the package
+ # in the first parents takes precedence.
+ self.parent1, self.parent_das1 = self.setupParent(
+ packages={'package': '0.3-1'})
+ self.parent2, self.parent_das2 = self.setupParent(
+ packages={'package': '0.1-1'})
+ child = self._fullInitialise([self.parent1, self.parent2])
+ published_sources = child.main_archive.getPublishedSources()
+
+ self.assertEquals(1, published_sources.count())
+ self.assertEquals(
+ u'0.3-1',
+ published_sources[0].sourcepackagerelease.version)
=== modified file 'lib/lp/soyuz/tests/test_packagecloner.py'
--- lib/lp/soyuz/tests/test_packagecloner.py 2010-10-04 19:50:45 +0000
+++ lib/lp/soyuz/tests/test_packagecloner.py 2011-06-10 10:56:52 +0000
@@ -58,21 +58,21 @@
(source.source_package_name, source.source_package_version))
self.assertEqual(expected_set, actual_set)
- def createSourceDistribution(self, package_infos):
+ def createSourceDistribution(self, package_infos, distro_name=None):
"""Create a distribution to be the source of a copy archive."""
- distroseries = self.createSourceDistroSeries()
+ distroseries = self.createSourceDistroSeries(distro_name=distro_name)
self.createSourcePublications(package_infos, distroseries)
return distroseries
- def createSourceDistroSeries(self):
+ def createSourceDistroSeries(self, distro_name="foobuntu",
+ distroseries_name="maudlin"):
"""Create a DistroSeries suitable for copying.
Creates a distroseries with a DistroArchSeries and nominatedarchindep,
- which makes it suitable for copying because it will create some builds.
+ which makes it suitable for copying because it will create some
+ builds.
"""
- distro_name = "foobuntu"
distro = self.factory.makeDistribution(name=distro_name)
- distroseries_name = "maudlin"
distroseries = self.factory.makeDistroSeries(
distribution=distro, name=distroseries_name)
das = self.factory.makeDistroArchSeries(
@@ -114,9 +114,10 @@
def makeCopyArchive(self, package_infos, component="main",
source_pocket=None, target_pocket=None,
- proc_families=None):
+ proc_families=None, distro_name=None):
"""Make a copy archive based on a new distribution."""
- distroseries = self.createSourceDistribution(package_infos)
+ distroseries = self.createSourceDistribution(
+ package_infos, distro_name)
copy_archive = self.getTargetArchive(distroseries.distribution)
to_component = getUtility(IComponentSet).ensure(component)
self.copyArchive(
@@ -145,7 +146,8 @@
def copyArchive(self, to_archive, to_distroseries, from_archive=None,
from_distroseries=None, from_pocket=None, to_pocket=None,
- to_component=None, packagesets=None, proc_families=None):
+ to_component=None, packagesets=None, proc_families=None,
+ no_duplicates=False):
"""Use a PackageCloner to copy an archive."""
if from_distroseries is None:
from_distroseries = to_distroseries
@@ -169,7 +171,7 @@
cloner = getUtility(IPackageCloner)
cloner.clonePackages(
origin, destination, distroarchseries_list=None,
- proc_families=proc_families)
+ proc_families=proc_families, no_duplicates=no_duplicates)
return cloner
def testCopiesPublished(self):
@@ -398,7 +400,6 @@
copy_archive, distroseries, proc_families=proc_families)
self.checkBuilds(copy_archive, [package_info, package_info])
-
def diffArchives(self, target_archive, target_distroseries,
source_archive=None, source_distroseries=None):
"""Run a packageSetDiff of two archives."""
@@ -422,6 +423,7 @@
expected_changed_tuples = [(e.name, e.version)
for e in expected_changed]
expected_new_tuples = [(e.name, e.version) for e in expected_new]
+
def get_tuples(source_keys):
tuples = []
for source_key in source_keys:
@@ -434,6 +436,7 @@
(naked_source.source_package_name,
naked_source.source_package_version))
return tuples
+
actual_changed_tuples = get_tuples(actual_changed_keys)
actual_new_tuples = get_tuples(actual_new_keys)
self.assertEqual(expected_changed_tuples, actual_changed_tuples)
@@ -502,7 +505,6 @@
[package_infos[0]], [package_infos[1]], diff,
distroseries.distribution.main_archive)
-
def mergeCopy(self, target_archive, target_distroseries,
source_archive=None, source_distroseries=None):
if source_distroseries is None:
@@ -660,3 +662,62 @@
self.checkBuilds(
copy_archive,
package_infos + package_infos + package_infos2 + package_infos2)
+
+ def testCopyNoDuplicates(self):
+ # No duplicate SourcePackagePublishingHistory is created by the
+ # packagecloner if no_duplicates=True is passed to
+ # cloner.clonePackages.
+ package_infos = [
+ PackageInfo(
+ "bzr", "2.1", status=PackagePublishingStatus.PUBLISHED),
+ PackageInfo(
+ "bzr", "2.2", status=PackagePublishingStatus.PUBLISHED),
+ PackageInfo(
+ "bzr", "2.3", status=PackagePublishingStatus.PUBLISHED)]
+ proc_families = [ProcessorFamilySet().getByName("x86")]
+ # Create bzr 2.1 (superseeded) and 2.2 (published) in archive1.
+ archive1, distroseries1 = self.makeCopyArchive(
+ package_infos[:2], proc_families=proc_families)
+ self.checkCopiedSources(
+ archive1, distroseries1, package_infos[1:2])
+ self.checkBuilds(archive1, package_infos[1:2])
+
+ # Create bzr 2.2 (superseeded) and 2.3 (published) in archive2.
+ archive2, distroseries2 = self.makeCopyArchive(
+ package_infos, proc_families=proc_families,
+ distro_name="foo2")
+ self.checkCopiedSources(
+ archive2, distroseries2, package_infos[2:])
+ self.checkBuilds(archive2, package_infos[2:])
+
+ # Copy into a new archive from the two archives we just created.
+ dest_series = self.createSourceDistroSeries(distro_name="foo3")
+ dest_archive = self.getTargetArchive(dest_series.distribution)
+ self.copyArchive(
+ dest_archive, dest_series, from_archive=archive1,
+ from_distroseries=distroseries1, proc_families=proc_families,
+ no_duplicates=True)
+ self.copyArchive(
+ dest_archive, dest_series, from_archive=archive2,
+ from_distroseries=distroseries2, proc_families=proc_families,
+ no_duplicates=True)
+
+ # Fetch published sources and builds in the new archive.
+ published_sources = dest_archive.getPublishedSources(
+ name='bzr')
+ naked_published_sources = removeSecurityProxy(published_sources)
+ binarypackagebuild_set = getUtility(IBinaryPackageBuildSet)
+ builds = binarypackagebuild_set.getBuildsForArchive(
+ dest_archive, status=BuildStatus.NEEDSBUILD)
+ naked_builds = removeSecurityProxy(builds)
+
+ # Since we passed no_duplicates=True to the packagecloner, the
+ # second copy has not created another SourcePackagePublishingHistory
+ # for bzr.
+ self.assertEquals(1, naked_published_sources.count())
+ self.assertEquals(
+ u"2.2",
+ naked_published_sources[0].sourcepackagerelease.version)
+ # Only *missing builds* are created by the packagecloner so only
+ # the first copy creates a build.
+ self.assertEquals(1, naked_builds.count())