launchpad-reviewers team mailing list archive
-
launchpad-reviewers team
-
Mailing list archive
-
Message #00371
[Merge] lp:~stevenk/launchpad/move-ifp-from-idistroseries into lp:launchpad/devel
Steve Kowalik has proposed merging lp:~stevenk/launchpad/move-ifp-from-idistroseries into lp:launchpad/devel.
Requested reviews:
Launchpad code reviewers (launchpad-reviewers)
This branch moves IDistroSeries.initialiseFromParent out.
--
https://code.launchpad.net/~stevenk/launchpad/move-ifp-from-idistroseries/+merge/31520
Your team Launchpad code reviewers is requested to review the proposed merge of lp:~stevenk/launchpad/move-ifp-from-idistroseries into lp:launchpad/devel.
=== modified file 'lib/lp/archiveuploader/tests/test_uploadprocessor.py'
--- lib/lp/archiveuploader/tests/test_uploadprocessor.py 2010-07-21 14:14:54 +0000
+++ lib/lp/archiveuploader/tests/test_uploadprocessor.py 2010-08-02 08:56:03 +0000
@@ -192,6 +192,7 @@
breezy_i386.addOrUpdateChroot(fake_chroot)
self.breezy.changeslist = 'breezy-changes@xxxxxxxxxx'
+ # XXX StevenK Use of initialiseFromParent
self.breezy.initialiseFromParent()
if permitted_formats is None:
=== modified file 'lib/lp/registry/doc/distroseries.txt'
--- lib/lp/registry/doc/distroseries.txt 2010-07-28 02:02:25 +0000
+++ lib/lp/registry/doc/distroseries.txt 2010-08-02 08:56:03 +0000
@@ -309,6 +309,8 @@
publishing records etc. Essentially this is a "Do not push this button
again" type set of assertions.
+XXX StevenK Use of initialiseFromParent
+
>>> login("foo.bar@xxxxxxxxxxxxx")
>>> humpy = ubuntu.newSeries('humpy', 'Humpy Hippo',
... 'The Humpy Hippo', 'Fat', 'Yo Momma',
=== modified file 'lib/lp/registry/interfaces/distroseries.py'
--- lib/lp/registry/interfaces/distroseries.py 2010-07-28 02:02:25 +0000
+++ lib/lp/registry/interfaces/distroseries.py 2010-08-02 08:56:03 +0000
@@ -727,35 +727,6 @@
supports_virtualized=False):
"""Create a new port or DistroArchSeries for this DistroSeries."""
- def initialiseFromParent():
- """Copy in all of the parent distroseries's configuration. This
- includes all configuration for distroseries and distroarchseries
- publishing and all publishing records for sources and binaries.
-
- Preconditions:
- The distroseries must have been set up with its distroarchseriess
- as needed. It should have its nominated arch-indep set up along
- with all other basic requirements for the structure of the
- distroseries. This distroseries and all its distroarchseriess
- must have empty publishing sets. Section and component selections
- must be empty.
-
- Outcome:
- The publishing structure will be copied from the parent. All
- PUBLISHED and PENDING packages in the parent will be created in
- this distroseries and its distroarchseriess. The lucille config
- will be copied in, all component and section selections will be
- duplicated as will any permission-related structures.
-
- Note:
- This method will assert all of its preconditions where possible.
- After this is run, you still need to construct chroots for building,
- you need to add anything missing wrt. ports etc. This method is
- only meant to give you a basic copy of a parent series in order
- to assist you in preparing a new series of a distribution or
- in the initialisation of a derivative.
- """
-
def copyTranslationsFromParent(ztm):
"""Copy any translation done in parent that we lack.
=== modified file 'lib/lp/registry/model/distroseries.py'
--- lib/lp/registry/model/distroseries.py 2010-07-29 15:16:43 +0000
+++ lib/lp/registry/model/distroseries.py 2010-08-02 08:56:03 +0000
@@ -1590,175 +1590,6 @@
"""See BugTargetBase."""
return 'BugTask.distroseries = %s' % sqlvalues(self)
- def initialiseFromParent(self):
- """See `IDistroSeries`."""
- archives = self.distribution.all_distro_archive_ids
- assert self.parent_series is not None, "Parent series must be present"
- assert SourcePackagePublishingHistory.select("""
- Distroseries = %s AND
- Archive IN %s""" % sqlvalues(self.id, archives)).count() == 0, (
- "Source Publishing must be empty")
- for arch in self.architectures:
- assert BinaryPackagePublishingHistory.select("""
- DistroArchSeries = %s AND
- Archive IN %s""" % sqlvalues(arch, archives)).count() == 0, (
- "Binary Publishing must be empty")
- try:
- parent_arch = self.parent_series[arch.architecturetag]
- assert parent_arch.processorfamily == arch.processorfamily, (
- "The arch tags must match the processor families.")
- except KeyError:
- raise AssertionError("Parent series lacks %s" % (
- arch.architecturetag))
- assert self.nominatedarchindep is not None, (
- "Must have a nominated archindep architecture.")
- assert self.components.count() == 0, (
- "Component selections must be empty.")
- assert self.sections.count() == 0, (
- "Section selections must be empty.")
-
- # MAINTAINER: dsilvers: 20051031
- # Here we go underneath the SQLObject caching layers in order to
- # generate what will potentially be tens of thousands of rows
- # in various tables. Thus we flush pending updates from the SQLObject
- # layer, perform our work directly in the transaction and then throw
- # the rest of the SQLObject cache away to make sure it hasn't cached
- # anything that is no longer true.
-
- # Prepare for everything by flushing updates to the database.
- flush_database_updates()
- cur = cursor()
-
- # Perform the copies
- self._copy_component_section_and_format_selections(cur)
-
- # Prepare the list of distroarchseries for which binary packages
- # shall be copied.
- distroarchseries_list = []
- for arch in self.architectures:
- parent_arch = self.parent_series[arch.architecturetag]
- distroarchseries_list.append((parent_arch, arch))
- # Now copy source and binary packages.
- self._copy_publishing_records(distroarchseries_list)
- self._copy_lucille_config(cur)
- self._copy_packaging_links(cur)
-
- # Finally, flush the caches because we've altered stuff behind the
- # back of sqlobject.
- flush_database_caches()
-
- def _copy_lucille_config(self, cur):
- """Copy all lucille related configuration from our parent series."""
- cur.execute('''
- UPDATE DistroSeries SET lucilleconfig=(
- SELECT pdr.lucilleconfig FROM DistroSeries AS pdr
- WHERE pdr.id = %s)
- WHERE id = %s
- ''' % sqlvalues(self.parent_series.id, self.id))
-
- def _copy_publishing_records(self, distroarchseries_list):
- """Copy the publishing records from the parent arch series
- to the given arch series in ourselves.
-
- We copy all PENDING and PUBLISHED records as PENDING into our own
- publishing records.
-
- We copy only the RELEASE pocket in the PRIMARY and PARTNER
- archives.
- """
- archive_set = getUtility(IArchiveSet)
-
- for archive in self.parent_series.distribution.all_distro_archives:
- # We only want to copy PRIMARY and PARTNER archives.
- if archive.purpose not in MAIN_ARCHIVE_PURPOSES:
- continue
-
- # XXX cprov 20080612: Implicitly creating a PARTNER archive for
- # the destination distroseries is bad. Why are we copying
- # partner to a series in another distribution anyway ?
- # See bug #239807 for further information.
- target_archive = archive_set.getByDistroPurpose(
- self.distribution, archive.purpose)
- if target_archive is None:
- target_archive = archive_set.new(
- distribution=self.distribution, purpose=archive.purpose,
- owner=self.distribution.owner)
-
- origin = PackageLocation(
- archive, self.parent_series.distribution, self.parent_series,
- PackagePublishingPocket.RELEASE)
- destination = PackageLocation(
- target_archive, self.distribution, self,
- PackagePublishingPocket.RELEASE)
- clone_packages(origin, destination, distroarchseries_list)
-
- def _copy_component_section_and_format_selections(self, cur):
- """Copy the section, component and format selections from the parent
- distro series into this one.
- """
- # Copy the component selections
- cur.execute('''
- INSERT INTO ComponentSelection (distroseries, component)
- SELECT %s AS distroseries, cs.component AS component
- FROM ComponentSelection AS cs WHERE cs.distroseries = %s
- ''' % sqlvalues(self.id, self.parent_series.id))
- # Copy the section selections
- cur.execute('''
- INSERT INTO SectionSelection (distroseries, section)
- SELECT %s as distroseries, ss.section AS section
- FROM SectionSelection AS ss WHERE ss.distroseries = %s
- ''' % sqlvalues(self.id, self.parent_series.id))
- # Copy the source format selections
- cur.execute('''
- INSERT INTO SourcePackageFormatSelection (distroseries, format)
- SELECT %s as distroseries, spfs.format AS format
- FROM SourcePackageFormatSelection AS spfs
- WHERE spfs.distroseries = %s
- ''' % sqlvalues(self.id, self.parent_series.id))
-
- def _copy_packaging_links(self, cur):
- """Copy the packaging links from the parent series to this one."""
- cur.execute("""
- INSERT INTO
- Packaging(
- distroseries, sourcepackagename, productseries,
- packaging, owner)
- SELECT
- ChildSeries.id,
- Packaging.sourcepackagename,
- Packaging.productseries,
- Packaging.packaging,
- Packaging.owner
- FROM
- Packaging
- -- Joining the parent distroseries permits the query to build
- -- the data set for the series being updated, yet results are
- -- in fact the data from the original series.
- JOIN Distroseries ChildSeries
- ON Packaging.distroseries = ChildSeries.parent_series
- WHERE
- -- Select only the packaging links that are in the parent
- -- that are not in the child.
- ChildSeries.id = %s
- AND Packaging.sourcepackagename in (
- SELECT sourcepackagename
- FROM Packaging
- WHERE distroseries in (
- SELECT id
- FROM Distroseries
- WHERE id = ChildSeries.parent_series
- )
- EXCEPT
- SELECT sourcepackagename
- FROM Packaging
- WHERE distroseries in (
- SELECT id
- FROM Distroseries
- WHERE id = ChildSeries.id
- )
- )
- """ % self.id)
-
def copyTranslationsFromParent(self, transaction, logger=None):
"""See `IDistroSeries`."""
if logger is None:
=== removed file 'lib/lp/soyuz/doc/initialise-from-parent.txt'
--- lib/lp/soyuz/doc/initialise-from-parent.txt 2010-05-14 06:14:12 +0000
+++ lib/lp/soyuz/doc/initialise-from-parent.txt 1970-01-01 00:00:00 +0000
@@ -1,194 +0,0 @@
-Check the behaviour of the initialise_from_parent script. It basically
-calls IDistroSeries.initialiseFromParent method with experimental extra
-checks and tasks.
-
-We need to create an initialisable DistroSeries as a child of Ubuntu
-Hoary (we do it inside the ubuntutest distribution to avoid conflicts
-with other tests)
-
- >>> from canonical.launchpad.interfaces import IDistributionSet
- >>> from canonical.launchpad.ftests import login
-
- >>> login("foo.bar@xxxxxxxxxxxxx")
- >>> distribution_set = getUtility(IDistributionSet)
- >>> ubuntutest = distribution_set['ubuntutest']
- >>> ubuntu = distribution_set['ubuntu']
- >>> hoary = ubuntu['hoary']
-
- # XXX cprov 2006-05-29 bug=49133:
- # New distroseries should be provided by IDistribution.
- # This maybe affected by derivation design and is documented in bug.
-
- >>> foobuntu = ubuntutest.newSeries('foobuntu', 'FooBuntu',
- ... 'The Foobuntu', 'yeck', 'doom',
- ... '888', hoary, hoary.owner)
-
-
-The script will check that there are no NEEDSBUILD builds in the parent
-distroseries' release pocket, so we need to tweak the status of the NEEDSBUILD
-builds that exist in Ubuntu Hoary in the sampledata:
-
- >>> from lp.buildmaster.interfaces.buildbase import BuildStatus
- >>> from lp.registry.interfaces.pocket import PackagePublishingPocket
-
- >>> pending_builds = hoary.getBuildRecords(BuildStatus.NEEDSBUILD,
- ... pocket=PackagePublishingPocket.RELEASE)
- >>> for build in pending_builds:
- ... build.status = BuildStatus.FAILEDTOBUILD
-
- >>> import transaction
- >>> transaction.commit()
-
-
- >>> import subprocess
- >>> import os
- >>> import sys
- >>> from canonical.config import config
-
-
-Check if it fails for an already released distroseries:
-
- >>> script = os.path.join(config.root, "scripts", "ftpmaster-tools",
- ... "initialise-from-parent.py")
- >>> process = subprocess.Popen([sys.executable, script, "-vv",
- ... "breezy-autotest"],
- ... stdout=subprocess.PIPE,
- ... stderr=subprocess.PIPE,)
- >>> stdout, stderr = process.communicate()
- >>> process.returncode
- 1
- >>> print stderr
- DEBUG Acquiring lock
- DEBUG Initialising connection.
- DEBUG Check empty mutable queues in parentseries
- DEBUG Check for no pending builds in parentseries
- DEBUG Copying distroarchseries from parent and setting nominatedarchindep.
- Traceback (most recent call last):
- ...
- AssertionError: Can not copy distroarchseries from parent, there are already distroarchseries(s) initialised for this series.
- <BLANKLINE>
-
-
-Let's initialise the just created distroseries:
-
- >>> process = subprocess.Popen([sys.executable, script, "-vv",
- ... "-d", "ubuntutest", "foobuntu"],
- ... stdout=subprocess.PIPE,
- ... stderr=subprocess.PIPE,)
- >>> stdout, stderr = process.communicate()
- >>> process.returncode
- 0
- >>> print stderr
- DEBUG Acquiring lock
- DEBUG Initialising connection.
- DEBUG Check empty mutable queues in parentseries
- DEBUG Check for no pending builds in parentseries
- DEBUG Copying distroarchseries from parent and setting nominatedarchindep.
- DEBUG initialising from parent, copying publishing records.
- DEBUG Committing transaction.
- DEBUG Releasing lock
- <BLANKLINE>
-
-
-Checking the published sources and binaries of ubuntutest/foobuntu
-against its parent, ubuntu/hoary:
-
- >>> hoary_pmount_pubs = hoary.getPublishedReleases('pmount')
- >>> foobuntu_pmount_pubs = foobuntu.getPublishedReleases('pmount')
- >>> len(foobuntu_pmount_pubs) == len(hoary_pmount_pubs)
- True
-
- >>> hoary_i386_pmount_pubs = hoary['i386'].getReleasedPackages('pmount')
- >>> foobuntu_i386_pmount_pubs = (
- ... foobuntu['i386'].getReleasedPackages('pmount'))
- >>> len(foobuntu_i386_pmount_pubs) == len(hoary_i386_pmount_pubs)
- True
-
-Check how the publication records behave in a just-initialise distroseries.
-First we get a binarypackagerelease published in foobuntu:
-
- >>> pmount_binrel = (
- ... foobuntu['i386'].getReleasedPackages(
- ... 'pmount')[0].binarypackagerelease)
- >>> pmount_binrel.title
- u'pmount-0.1-1'
-
-Follow BPR.build and discover it was built in the parent series:
-
- >>> pmount_binrel.build.id
- 7
- >>> pmount_binrel.build.title
- u'i386 build of pmount 0.1-1 in ubuntu hoary RELEASE'
-
-Now we obtain the sourcepackagerelease from the build:
-
- >>> pmount_srcrel = pmount_binrel.build.source_package_release
- >>> pmount_srcrel.title
- u'pmount - 0.1-1'
-
-and check it the ISPR.getBuildByArch() would find out the same build
-record for foobuntu and it's parent series (hoary):
-
- >>> foobuntu_pmount = pmount_srcrel.getBuildByArch(
- ... foobuntu['i386'], foobuntu.main_archive)
- >>> hoary_pmount = pmount_srcrel.getBuildByArch(
- ... hoary['i386'], hoary.main_archive)
-
- >>> foobuntu_pmount.id == hoary_pmount.id
- True
-
-It means that queuebuilder doesn't need to create a new build record
-in for pmount_0.1-1 in foobuntu.
-
-In the other hand there is a newer source for pmount published in
-hoary and consequently in foobuntu:
-
-Note: This is a very unlikely situation, since ubuntu/hoary was marked
-as RELEASED before build pmount_0.1-2 in the sampledata. So when we
-try initialise a distroseries in another distribution based on hoary,
-since they have independent archives (pool), pmount_0.1-1 binary
-becomes a NBS (not build from source) since the pmount_0.1-1 source
-was superseded in hoary and won't be inherited by the initialised
-distroseries.
-
- >>> pmount_source = hoary.getSourcePackage('pmount').currentrelease
- >>> print pmount_source.title
- "pmount" 0.1-2 source package in The Hoary Hedgehog Release
-
- >>> pmount_source = foobuntu.getSourcePackage('pmount').currentrelease
- >>> print pmount_source.title
- "pmount" 0.1-2 source package in The Foobuntu
-
-
-Since pmount_0.1-2 source is published we can safely look up for the
-respective build record:
-
- >>> pmount_source.sourcepackagerelease.getBuildByArch(
- ... foobuntu['i386'], ubuntu.main_archive) is None
- True
-
-It's not present, Let's create it to check if getBuildByArch responds
-appropriately (we won't care about the source architecturehintlist in
-this test, see more details in buildd-queuebuilder)
-
- >>> from lp.registry.interfaces.pocket import PackagePublishingPocket
- >>> created_build = pmount_source.sourcepackagerelease.createBuild(
- ... foobuntu['i386'], PackagePublishingPocket.RELEASE,
- ... ubuntu.main_archive)
-
- >>> retrieved_build = pmount_source.sourcepackagerelease.getBuildByArch(
- ... foobuntu['i386'], ubuntu.main_archive)
-
- >>> retrieved_build.id == created_build.id
- True
-
- >>> pmount_source.sourcepackagerelease.getBuildByArch(
- ... foobuntu['hppa'], ubuntu.main_archive) is None
- True
-
-initialiseFromParent also copies the permitted source formats from the
-parent series.
-
- >>> from lp.soyuz.interfaces.sourcepackageformat import SourcePackageFormat
- >>> foobuntu.isSourcePackageFormatPermitted(SourcePackageFormat.FORMAT_1_0)
- True
=== modified file 'lib/lp/soyuz/doc/soyuz-set-of-uploads.txt'
--- lib/lp/soyuz/doc/soyuz-set-of-uploads.txt 2010-07-12 13:06:41 +0000
+++ lib/lp/soyuz/doc/soyuz-set-of-uploads.txt 2010-08-02 08:56:03 +0000
@@ -69,6 +69,8 @@
Having set up that infrastructure we need to prepare a breezy distroseries
for the ubuntutest distribution.
+XXX StevenK Use of initialiseFromParent
+
>>> from lp.registry.model.distribution import Distribution
>>> from canonical.launchpad.database import LibraryFileAlias
>>> ubuntu = Distribution.byName('ubuntu')
=== added file 'lib/lp/soyuz/scripts/initialise_distroseries.py'
--- lib/lp/soyuz/scripts/initialise_distroseries.py 1970-01-01 00:00:00 +0000
+++ lib/lp/soyuz/scripts/initialise_distroseries.py 2010-08-02 08:56:03 +0000
@@ -0,0 +1,269 @@
+# Copyright 2009 Canonical Ltd. This software is licensed under the
+# GNU Affero General Public License version 3 (see the file LICENSE).
+
+"""Initialise a distroseries from its parent distroseries."""
+
+
+__metaclass__ = type
+__all__ = [
+ 'InitialiseDistroSeries',
+ 'ParentSeriesRequired',
+ 'PendingBuilds',
+ 'QueueNotEmpty',
+ 'SeriesAlreadyInUse',
+ ]
+
+from canonical.database.sqlbase import (
+ cursor, flush_database_caches, flush_database_updates, quote_like,
+ quote, SQLBase, sqlvalues)
+
+from lp.buildmaster.interfaces.buildbase import BuildStatus
+from lp.registry.interfaces.pocket import PackagePublishingPocket
+from lp.soyuz.interfaces.queue import PackageUploadStatus
+from lp.soyuz.model.publishing import (
+ BinaryPackagePublishingHistory, SourcePackagePublishingHistory)
+
+
+class PendingBuilds(Exception):
+ """Raised when the parent distroseries has pending builds."""
+
+
+class QueueNotEmpty(Exception):
+ """Raised when the parent distroseries has items in its queues."""
+
+
+class ParentSeriesRequired(Exception):
+ """Raised when the distroseries does not have a parent series set."""
+
+
+class SeriesAlreadyInUse(Exception):
+ """Raised when the distroseries already contains things."""
+
+
+class InitialiseDistroSeries:
+ def __init__(self, distroseries):
+ self.distroseries = distroseries
+ self._check()
+
+ def _check(self):
+ if self.distroseries.parent_series is None:
+ raise ParentSeriesRequired
+ self._checkBuilds()
+ self._checkQueue()
+ self._checkSeries()
+
+ def _checkBuilds(self):
+ """Assert there are no pending builds for parent series.
+
+ Only cares about the RELEASE pocket, which is the only one inherited
+ via initialiseFromParent method.
+ """
+ parentseries = self.distroseries.parent_series
+
+ # only the RELEASE pocket is inherited, so we only check
+ # pending build records for it.
+ pending_builds = parentseries.getBuildRecords(
+ BuildStatus.NEEDSBUILD, pocket=PackagePublishingPocket.RELEASE)
+
+ if pending_builds.count():
+ raise PendingBuilds
+
+ def _checkQueue(self):
+ """Assert upload queue is empty on parent series.
+
+ Only cares about the RELEASE pocket, which is the only one inherited
+ via initialiseFromParent method.
+ """
+ parentseries = self.distroseries.parent_series
+
+ # only the RELEASE pocket is inherited, so we only check
+ # queue items for it.
+ for queue in (
+ PackageUploadStatus.NEW, PackageUploadStatus.ACCEPTED,
+ PackageUploadStatus.UNAPPROVED):
+ items = parentseries.getQueueItems(
+ queue, pocket=PackagePublishingPocket.RELEASE)
+ if items:
+ raise QueueNotEmpty
+
+ def _checkSeries(self):
+ sources = self.distroseries.getAllPublishedSources()
+ if sources.count():
+ raise SeriesAlreadyInUse
+ binaries = self.distroseries.getAllPublishedBinaries()
+ if binaries.count():
+ raise SeriesAlreadyInUse
+ if self.distroseries.architectures.count():
+ raise SeriesAlreadyInUse
+ if self.distroseries.components.count():
+ raise SeriesAlreadyInUse
+ if self.distroseries.sections.count():
+ raise SeriesAlreadyInUse
+
+ def initialise(self):
+ """See `IDistroSeries`."""
+ # MAINTAINER: dsilvers: 20051031
+ # Here we go underneath the SQLObject caching layers in order to
+ # generate what will potentially be tens of thousands of rows
+ # in various tables. Thus we flush pending updates from the SQLObject
+ # layer, perform our work directly in the transaction and then throw
+ # the rest of the SQLObject cache away to make sure it hasn't cached
+ # anything that is no longer true.
+
+ # Prepare for everything by flushing updates to the database.
+ flush_database_updates()
+ cur = cursor()
+
+ # Perform the copies
+ self._copy_component_section_and_format_selections(cur)
+
+ # Prepare the list of distroarchseries for which binary packages
+ # shall be copied.
+ distroarchseries_list = []
+ for arch in self.architectures:
+ parent_arch = self.parent_series[arch.architecturetag]
+ distroarchseries_list.append((parent_arch, arch))
+ # Now copy source and binary packages.
+ self._copy_publishing_records(distroarchseries_list)
+ self._copy_lucille_config(cur)
+ self._copy_packaging_links(cur)
+
+ # Finally, flush the caches because we've altered stuff behind the
+ # back of sqlobject.
+ flush_database_caches()
+
+ def _copy_lucille_config(self, cur):
+ """Copy all lucille related configuration from our parent series."""
+ cur.execute('''
+ UPDATE DistroSeries SET lucilleconfig=(
+ SELECT pdr.lucilleconfig FROM DistroSeries AS pdr
+ WHERE pdr.id = %s)
+ WHERE id = %s
+ ''' % sqlvalues(self.parent_series.id, self.id))
+
+ def _copy_publishing_records(self, distroarchseries_list):
+ """Copy the publishing records from the parent arch series
+ to the given arch series in ourselves.
+
+ We copy all PENDING and PUBLISHED records as PENDING into our own
+ publishing records.
+
+ We copy only the RELEASE pocket in the PRIMARY and PARTNER
+ archives.
+ """
+ archive_set = getUtility(IArchiveSet)
+
+ for archive in self.parent_series.distribution.all_distro_archives:
+ # We only want to copy PRIMARY and PARTNER archives.
+ if archive.purpose not in MAIN_ARCHIVE_PURPOSES:
+ continue
+
+ # XXX cprov 20080612: Implicitly creating a PARTNER archive for
+ # the destination distroseries is bad. Why are we copying
+ # partner to a series in another distribution anyway ?
+ # See bug #239807 for further information.
+ target_archive = archive_set.getByDistroPurpose(
+ self.distribution, archive.purpose)
+ if target_archive is None:
+ target_archive = archive_set.new(
+ distribution=self.distribution, purpose=archive.purpose,
+ owner=self.distribution.owner)
+
+ origin = PackageLocation(
+ archive, self.parent_series.distribution, self.parent_series,
+ PackagePublishingPocket.RELEASE)
+ destination = PackageLocation(
+ target_archive, self.distribution, self,
+ PackagePublishingPocket.RELEASE)
+ clone_packages(origin, destination, distroarchseries_list)
+
+ def _copy_component_section_and_format_selections(self, cur):
+ """Copy the section, component and format selections from the parent
+ distro series into this one.
+ """
+ # Copy the component selections
+ cur.execute('''
+ INSERT INTO ComponentSelection (distroseries, component)
+ SELECT %s AS distroseries, cs.component AS component
+ FROM ComponentSelection AS cs WHERE cs.distroseries = %s
+ ''' % sqlvalues(self.id, self.parent_series.id))
+ # Copy the section selections
+ cur.execute('''
+ INSERT INTO SectionSelection (distroseries, section)
+ SELECT %s as distroseries, ss.section AS section
+ FROM SectionSelection AS ss WHERE ss.distroseries = %s
+ ''' % sqlvalues(self.id, self.parent_series.id))
+ # Copy the source format selections
+ cur.execute('''
+ INSERT INTO SourcePackageFormatSelection (distroseries, format)
+ SELECT %s as distroseries, spfs.format AS format
+ FROM SourcePackageFormatSelection AS spfs
+ WHERE spfs.distroseries = %s
+ ''' % sqlvalues(self.id, self.parent_series.id))
+
+ def _copy_packaging_links(self, cur):
+ """Copy the packaging links from the parent series to this one."""
+ cur.execute("""
+ INSERT INTO
+ Packaging(
+ distroseries, sourcepackagename, productseries,
+ packaging, owner)
+ SELECT
+ ChildSeries.id,
+ Packaging.sourcepackagename,
+ Packaging.productseries,
+ Packaging.packaging,
+ Packaging.owner
+ FROM
+ Packaging
+ -- Joining the parent distroseries permits the query to build
+ -- the data set for the series being updated, yet results are
+ -- in fact the data from the original series.
+ JOIN Distroseries ChildSeries
+ ON Packaging.distroseries = ChildSeries.parent_series
+ WHERE
+ -- Select only the packaging links that are in the parent
+ -- that are not in the child.
+ ChildSeries.id = %s
+ AND Packaging.sourcepackagename in (
+ SELECT sourcepackagename
+ FROM Packaging
+ WHERE distroseries in (
+ SELECT id
+ FROM Distroseries
+ WHERE id = ChildSeries.parent_series
+ )
+ EXCEPT
+ SELECT sourcepackagename
+ FROM Packaging
+ WHERE distroseries in (
+ SELECT id
+ FROM Distroseries
+ WHERE id = ChildSeries.id
+ )
+ )
+ """ % self.id)
+
+def copy_architectures(distroseries):
+ """Overlap SQLObject and copy architecture from the parent.
+
+ Also set the nominatedarchindep properly in target.
+ """
+ assert distroseries.architectures.count() is 0, (
+ "Can not copy distroarchseries from parent, there are already "
+ "distroarchseries(s) initialised for this series.")
+ flush_database_updates()
+ cur = cursor()
+ cur.execute("""
+ INSERT INTO DistroArchSeries
+ (distroseries, processorfamily, architecturetag, owner, official)
+ SELECT %s, processorfamily, architecturetag, %s, official
+ FROM DistroArchSeries WHERE distroseries = %s
+ """ % sqlvalues(distroseries, distroseries.owner,
+ distroseries.parent_series))
+ flush_database_caches()
+
+ distroseries.nominatedarchindep = distroseries[
+ distroseries.parent_series.nominatedarchindep.architecturetag]
+
+
=== added file 'lib/lp/soyuz/scripts/tests/test_initialise_distroseries.py'
--- lib/lp/soyuz/scripts/tests/test_initialise_distroseries.py 1970-01-01 00:00:00 +0000
+++ lib/lp/soyuz/scripts/tests/test_initialise_distroseries.py 2010-08-02 08:56:03 +0000
@@ -0,0 +1,59 @@
+# Copyright 2010 Canonical Ltd. This software is licensed under the
+# GNU Affero General Public License version 3 (see the file LICENSE).
+
+"""Test the initialise_distroseries script machinery."""
+
+__metaclass__ = type
+
+from zope.component import getUtility
+
+from lp.buildmaster.interfaces.buildbase import BuildStatus
+from lp.registry.interfaces.pocket import PackagePublishingPocket
+from lp.soyuz.scripts.initialise_distroseries import (
+ InitialiseDistroSeries, ParentSeriesRequired, SeriesAlreadyInUse)
+from lp.testing import TestCaseWithFactory
+
+from canonical.launchpad.interfaces import IDistributionSet
+from canonical.launchpad.ftests import login, logout
+from canonical.testing.layers import LaunchpadZopelessLayer
+
+
+class TestInitialiseDistroSeries(TestCaseWithFactory):
+
+ layer = LaunchpadZopelessLayer
+
+ def setUp(self):
+ super(TestInitialiseDistroSeries, self).setUp()
+ login("foo.bar@xxxxxxxxxxxxx")
+ distribution_set = getUtility(IDistributionSet)
+ self.ubuntutest = distribution_set['ubuntutest']
+ ubuntu = distribution_set['ubuntu']
+ self.hoary = ubuntu['hoary']
+ # XXX cprov 2006-05-29 bug=49133:
+ # New distroseries should be provided by IDistribution.
+ # This maybe affected by derivation design and is documented in bug.
+ self.foobuntu = self.ubuntutest.newSeries(
+ 'foobuntu', 'FooBuntu', 'The Foobuntu', 'yeck', 'doom',
+ '888', None, self.hoary.owner)
+ logout()
+
+ def test_failure_with_no_parent_series(self):
+ self.assertRaises(
+ ParentSeriesRequired, InitialiseDistroSeries, self.foobuntu)
+
+ def test_failure_for_already_released_distroseries(self):
+ login("foo.bar@xxxxxxxxxxxxx")
+ breezy_autotest = self.ubuntutest['breezy-autotest']
+ self.assertRaises(
+ SeriesAlreadyInUse, InitialiseDistroSeries, breezy_autotest)
+ logout()
+
+ def test_failure_with_pending_builds(self):
+ pass
+
+ def test_failure_with_queue_items(self):
+ pass
+
+ def test_initialise(self):
+ pass
+
=== modified file 'scripts/ftpmaster-tools/initialise-from-parent.py'
--- scripts/ftpmaster-tools/initialise-from-parent.py 2010-04-27 19:48:39 +0000
+++ scripts/ftpmaster-tools/initialise-from-parent.py 2010-08-02 08:56:03 +0000
@@ -100,79 +100,6 @@
return 0
-def check_builds(distroseries):
- """Assert there are no pending builds for parent series.
-
- Only cares about the RELEASE pocket, which is the only one inherited
- via initialiseFromParent method.
- """
- # Avoid circular import.
- from lp.buildmaster.interfaces.buildbase import BuildStatus
- from lp.registry.interfaces.pocket import PackagePublishingPocket
-
- parentseries = distroseries.parent_series
-
- # only the RELEASE pocket is inherited, so we only check
- # pending build records for it.
- pending_builds = parentseries.getBuildRecords(
- BuildStatus.NEEDSBUILD, pocket=PackagePublishingPocket.RELEASE)
-
- assert pending_builds.count() == 0, (
- 'Parent must not have PENDING builds')
-
-def check_queue(distroseries):
- """Assert upload queue is empty on parent series.
-
- Only cares about the RELEASE pocket, which is the only one inherited
- via initialiseFromParent method.
- """
- # Avoid circular import.
- from lp.registry.interfaces.pocket import PackagePublishingPocket
-
- parentseries = distroseries.parent_series
-
- # only the RELEASE pocket is inherited, so we only check
- # queue items for it.
- new_items = parentseries.getQueueItems(
- PackageUploadStatus.NEW,
- pocket=PackagePublishingPocket.RELEASE)
- accepted_items = parentseries.getQueueItems(
- PackageUploadStatus.ACCEPTED,
- pocket=PackagePublishingPocket.RELEASE)
- unapproved_items = parentseries.getQueueItems(
- PackageUploadStatus.UNAPPROVED,
- pocket=PackagePublishingPocket.RELEASE)
-
- assert new_items.count() == 0, (
- 'Parent NEW queue must be empty')
- assert accepted_items.count() == 0, (
- 'Parent ACCEPTED queue must be empty')
- assert unapproved_items.count() == 0, (
- 'Parent UNAPPROVED queue must be empty')
-
-def copy_architectures(distroseries):
- """Overlap SQLObject and copy architecture from the parent.
-
- Also set the nominatedarchindep properly in target.
- """
- assert distroseries.architectures.count() is 0, (
- "Can not copy distroarchseries from parent, there are already "
- "distroarchseries(s) initialised for this series.")
- flush_database_updates()
- cur = cursor()
- cur.execute("""
- INSERT INTO DistroArchSeries
- (distroseries, processorfamily, architecturetag, owner, official)
- SELECT %s, processorfamily, architecturetag, %s, official
- FROM DistroArchSeries WHERE distroseries = %s
- """ % sqlvalues(distroseries, distroseries.owner,
- distroseries.parent_series))
- flush_database_caches()
-
- distroseries.nominatedarchindep = distroseries[
- distroseries.parent_series.nominatedarchindep.architecturetag]
-
-
if __name__ == '__main__':
sys.exit(main())
Follow ups