launchpad-reviewers team mailing list archive
-
launchpad-reviewers team
-
Mailing list archive
-
Message #28352
[Merge] ~cjwatson/launchpad:artifactory-publish into launchpad:master
Colin Watson has proposed merging ~cjwatson/launchpad:artifactory-publish into launchpad:master with ~cjwatson/launchpad:fix-artifactory-pool as a prerequisite.
Commit message:
Implement Artifactory publishing method
Requested reviews:
Launchpad code reviewers (launchpad-reviewers)
For more details, see:
https://code.launchpad.net/~cjwatson/launchpad/+git/launchpad/+merge/419733
The arrangements for identifying the target Artifactory repository are somewhat basic (they currently use `Archive.name`, which isn't guaranteed to be unique across a distribution), and the publisher currently needs to fetch all the currently-published artifacts as part of figuring out which properties it needs to update. However, aside from those problems, this is a functional basic implementation: I've been able to use this to import a subset of Ubuntu focal and republish it to a test repository in Artifactory.
--
Your team Launchpad code reviewers is requested to review the proposed merge of ~cjwatson/launchpad:artifactory-publish into launchpad:master.
diff --git a/lib/lp/archivepublisher/config.py b/lib/lp/archivepublisher/config.py
index 92c3917..c5f9b44 100644
--- a/lib/lp/archivepublisher/config.py
+++ b/lib/lp/archivepublisher/config.py
@@ -11,12 +11,14 @@ import os
from zope.component import getUtility
+from lp.archivepublisher.artifactory import ArtifactoryPool
from lp.archivepublisher.diskpool import DiskPool
from lp.archivepublisher.interfaces.publisherconfig import IPublisherConfigSet
from lp.registry.interfaces.distribution import IDistributionSet
from lp.services.config import config
from lp.soyuz.enums import (
archive_suffixes,
+ ArchivePublishingMethod,
ArchivePurpose,
)
@@ -31,7 +33,7 @@ def getPubConfig(archive):
modified according local context, it basically fixes the archive
paths to cope with non-primary and PPA archives publication workflow.
"""
- pubconf = Config()
+ pubconf = Config(archive)
ppa_config = config.personalpackagearchive
db_pubconf = getUtility(
IPublisherConfigSet).getByDistribution(archive.distribution)
@@ -41,7 +43,19 @@ def getPubConfig(archive):
pubconf.temproot = os.path.join(
db_pubconf.root_dir, '%s-temp' % archive.distribution.name)
- if archive.is_ppa:
+ if archive.publishing_method == ArchivePublishingMethod.ARTIFACTORY:
+ if config.artifactory.base_url is None:
+ raise AssertionError(
+ "Cannot publish to Artifactory because "
+ "config.artifactory.base_url is unset.")
+ pubconf.distroroot = None
+ # XXX cjwatson 2022-04-01: This assumes that only admins can
+ # configure archives to publish to Artifactory, since Archive.name
+ # isn't unique. We may eventually need to use a new column with a
+ # unique constraint, but this is enough to get us going for now.
+ pubconf.archiveroot = "%s/%s" % (
+ config.artifactory.base_url.rstrip("/"), archive.name)
+ elif archive.is_ppa:
if archive.private:
pubconf.distroroot = ppa_config.private_root
else:
@@ -70,7 +84,8 @@ def getPubConfig(archive):
if archive.is_copy:
pubconf.temproot = pubconf.archiveroot + '-temp'
- if archive.purpose in APT_FTPARCHIVE_PURPOSES:
+ if (archive.publishing_method == ArchivePublishingMethod.LOCAL and
+ archive.purpose in APT_FTPARCHIVE_PURPOSES):
pubconf.overrideroot = pubconf.archiveroot + '-overrides'
pubconf.cacheroot = pubconf.archiveroot + '-cache'
pubconf.miscroot = pubconf.archiveroot + '-misc'
@@ -107,7 +122,8 @@ def getPubConfig(archive):
# by a few PPAs, and only by USC, so we leave metaroot unset and
# ignore the uploads for anything except Ubuntu PPAs.
ubuntu = getUtility(IDistributionSet).getByName('ubuntu')
- if archive.is_ppa and archive.distribution == ubuntu:
+ if (archive.publishing_method == ArchivePublishingMethod.LOCAL and
+ archive.is_ppa and archive.distribution == ubuntu):
meta_root = os.path.join(
pubconf.distroroot, archive.owner.name)
pubconf.metaroot = os.path.join(
@@ -133,8 +149,24 @@ class Config:
how the database stores configuration then the publisher will not
need to be re-coded to cope"""
+ disk_pool_factory = {
+ ArchivePublishingMethod.LOCAL: DiskPool,
+ ArchivePublishingMethod.ARTIFACTORY: (
+ # Artifactory publishing doesn't need a temporary directory on
+ # the same filesystem as the pool, since the pool is remote
+ # anyway.
+ lambda rootpath, temppath, logger: ArtifactoryPool(
+ rootpath, logger)),
+ }
+
+ def __init__(self, archive):
+ self.archive = archive
+
def setupArchiveDirs(self):
"""Create missing required directories in archive."""
+ if self.archive.publishing_method != ArchivePublishingMethod.LOCAL:
+ return
+
required_directories = [
self.distroroot,
self.poolroot,
@@ -168,7 +200,8 @@ class Config:
pool_root = pool_root_override
if pool_root is None:
pool_root = self.poolroot
- dp = DiskPool(pool_root, self.temproot, dp_log)
+ dp_factory = self.disk_pool_factory[self.archive.publishing_method]
+ dp = dp_factory(pool_root, self.temproot, dp_log)
# Set the diskpool's log level to INFO to suppress debug output.
dp_log.setLevel(logging.INFO)
return dp
diff --git a/lib/lp/archivepublisher/publishing.py b/lib/lp/archivepublisher/publishing.py
index a063e07..0639716 100644
--- a/lib/lp/archivepublisher/publishing.py
+++ b/lib/lp/archivepublisher/publishing.py
@@ -75,6 +75,7 @@ from lp.services.osutils import (
)
from lp.services.utils import file_exists
from lp.soyuz.enums import (
+ ArchivePublishingMethod,
ArchivePurpose,
ArchiveStatus,
BinaryPackageFormat,
@@ -719,6 +720,42 @@ class Publisher:
self._writeComponentIndexes(
distroseries, pocket, component)
+ def C_updateArtifactoryProperties(self, is_careful):
+ """Update Artifactory properties to match our database."""
+ self.log.debug("* Step C'': Updating properties in Artifactory")
+ # We don't currently have a more efficient approach available than
+ # just syncing up the entire repository. At the moment it's
+ # difficult to do better, since Launchpad's publishing model
+ # normally assumes that the disk pool only needs to know about
+ # removals once they get to the point of being removed entirely from
+ # disk, as opposed to just being removed from a single
+ # suite/channel. A more complete overhaul of the
+ # publisher/dominator might be able to deal with this.
+ publishing_set = getUtility(IPublishingSet)
+ pubs_by_id = defaultdict(list)
+ spphs_by_spr = defaultdict(list)
+ bpphs_by_bpr = defaultdict(list)
+ for spph in publishing_set.getSourcesForPublishing(
+ archive=self.archive):
+ spphs_by_spr[spph.sourcepackagereleaseID].append(spph)
+ pubs_by_id["source:%d" % spph.sourcepackagereleaseID].append(spph)
+ for bpph in publishing_set.getBinariesForPublishing(
+ archive=self.archive):
+ bpphs_by_bpr[bpph.binarypackagereleaseID].append(bpph)
+ pubs_by_id["binary:%d" % bpph.binarypackagereleaseID].append(bpph)
+ artifacts = self._diskpool.getAllArtifacts(
+ self.archive.name, self.archive.repository_format)
+
+ for path, properties in sorted(artifacts.items()):
+ release_id = properties.get("launchpad.release-id")
+ source_name = properties.get("launchpad.source-name")
+ if not release_id or not source_name:
+ # Skip any files that Launchpad didn't put in Artifactory.
+ continue
+ self._diskpool.updateProperties(
+ source_name[0], path.name, pubs_by_id.get(release_id[0]),
+ old_properties=properties)
+
def D_writeReleaseFiles(self, is_careful):
"""Write out the Release files for the provided distribution.
@@ -1435,6 +1472,10 @@ class Publisher:
be caught and an OOPS report generated.
"""
assert self.archive.is_ppa
+ if self.archive.publishing_method != ArchivePublishingMethod.LOCAL:
+ raise NotImplementedError(
+ "Don't know how to delete archives published using %s" %
+ self.archive.publishing_method.title)
self.log.info(
"Attempting to delete archive '%s/%s' at '%s'." % (
self.archive.owner.name, self.archive.name,
diff --git a/lib/lp/archivepublisher/scripts/publishdistro.py b/lib/lp/archivepublisher/scripts/publishdistro.py
index b82f992..7f8ed62 100644
--- a/lib/lp/archivepublisher/scripts/publishdistro.py
+++ b/lib/lp/archivepublisher/scripts/publishdistro.py
@@ -27,6 +27,7 @@ from lp.services.webapp.adapter import (
set_request_started,
)
from lp.soyuz.enums import (
+ ArchivePublishingMethod,
ArchivePurpose,
ArchiveStatus,
)
@@ -309,6 +310,8 @@ class PublishDistro(PublisherScript):
Commits transactions along the way.
"""
+ publishing_method = archive.publishing_method
+
for distroseries, pocket in self.findExplicitlyDirtySuites(archive):
if not cannot_modify_suite(archive, distroseries, pocket):
publisher.markSuiteDirty(distroseries, pocket)
@@ -333,23 +336,31 @@ class PublishDistro(PublisherScript):
self.txn.commit()
if self.options.enable_apt:
- # The primary and copy archives use apt-ftparchive to
- # generate the indexes, everything else uses the newer
- # internal LP code.
careful_indexing = self.isCareful(self.options.careful_apt)
- if archive.purpose in (
- ArchivePurpose.PRIMARY, ArchivePurpose.COPY):
- publisher.C_doFTPArchive(careful_indexing)
+ if publishing_method == ArchivePublishingMethod.LOCAL:
+ # The primary and copy archives use apt-ftparchive to
+ # generate the indexes, everything else uses the newer
+ # internal LP code.
+ if archive.purpose in (
+ ArchivePurpose.PRIMARY, ArchivePurpose.COPY):
+ publisher.C_doFTPArchive(careful_indexing)
+ else:
+ publisher.C_writeIndexes(careful_indexing)
+ elif publishing_method == ArchivePublishingMethod.ARTIFACTORY:
+ publisher.C_updateArtifactoryProperties(careful_indexing)
else:
- publisher.C_writeIndexes(careful_indexing)
+ raise AssertionError(
+ "Unhandled publishing method: %r" % publishing_method)
self.txn.commit()
- if self.options.enable_release:
+ if (self.options.enable_release and
+ publishing_method == ArchivePublishingMethod.LOCAL):
publisher.D_writeReleaseFiles(self.isCareful(
self.options.careful_apt or self.options.careful_release))
# The caller will commit this last step.
- if self.options.enable_apt:
+ if (self.options.enable_apt and
+ publishing_method == ArchivePublishingMethod.LOCAL):
publisher.createSeriesAliases()
def processArchive(self, archive_id, reset_store=True):
diff --git a/lib/lp/archivepublisher/tests/test_publishdistro.py b/lib/lp/archivepublisher/tests/test_publishdistro.py
index 399d907..fa137f4 100644
--- a/lib/lp/archivepublisher/tests/test_publishdistro.py
+++ b/lib/lp/archivepublisher/tests/test_publishdistro.py
@@ -41,6 +41,7 @@ from lp.services.log.logger import (
)
from lp.services.scripts.base import LaunchpadScriptFailure
from lp.soyuz.enums import (
+ ArchivePublishingMethod,
ArchivePurpose,
ArchiveStatus,
PackagePublishingStatus,
@@ -512,6 +513,7 @@ class FakeArchive:
self.purpose = purpose
self.status = ArchiveStatus.ACTIVE
self.dirty_suites = []
+ self.publishing_method = ArchivePublishingMethod.LOCAL
class FakePublisher:
diff --git a/lib/lp/archivepublisher/tests/test_publisher.py b/lib/lp/archivepublisher/tests/test_publisher.py
index 754138b..cfb48d5 100644
--- a/lib/lp/archivepublisher/tests/test_publisher.py
+++ b/lib/lp/archivepublisher/tests/test_publisher.py
@@ -71,6 +71,9 @@ from lp.archivepublisher.publishing import (
I18nIndex,
Publisher,
)
+from lp.archivepublisher.tests.artifactory_fixture import (
+ FakeArtifactoryFixture,
+ )
from lp.archivepublisher.tests.test_run_parts import RunPartsMixin
from lp.archivepublisher.utils import RepositoryIndexFile
from lp.registry.interfaces.distribution import IDistributionSet
@@ -94,7 +97,9 @@ from lp.services.log.logger import (
from lp.services.osutils import open_for_writing
from lp.services.utils import file_exists
from lp.soyuz.enums import (
+ ArchivePublishingMethod,
ArchivePurpose,
+ ArchiveRepositoryFormat,
ArchiveStatus,
BinaryPackageFormat,
IndexCompressionType,
@@ -3545,4 +3550,186 @@ class TestDirectoryHash(TestDirectoryHashHelpers):
self.assertThat(self.fetchSums(rootdir), MatchesDict(expected))
+class TestArtifactoryPublishing(TestPublisherBase):
+ """Test publishing to Artifactory."""
+
+ def setUpArtifactory(self, repository_format):
+ self.base_url = "https://foo.example.com/artifactory"
+ self.pushConfig("artifactory", base_url=self.base_url)
+ self.archive = self.factory.makeArchive(
+ distribution=self.ubuntutest,
+ publishing_method=ArchivePublishingMethod.ARTIFACTORY,
+ repository_format=repository_format)
+ self.config = getPubConfig(self.archive)
+ self.disk_pool = self.config.getDiskPool(self.logger)
+ self.disk_pool.logger = self.logger
+ self.artifactory = self.useFixture(
+ FakeArtifactoryFixture(self.base_url, self.archive.name))
+
+ def test_publish_files(self):
+ """The actual publishing of packages' files to Artifactory works."""
+ self.setUpArtifactory(ArchiveRepositoryFormat.DEBIAN)
+ publisher = Publisher(
+ self.logger, self.config, self.disk_pool, self.archive)
+ pub_source = self.getPubSource(
+ sourcename="foo", version="666", filecontent=b"Hello world",
+ archive=self.archive)
+
+ publisher.A_publish(False)
+
+ self.assertEqual({"breezy-autotest"}, publisher.dirty_suites)
+ self.assertEqual(PackagePublishingStatus.PUBLISHED, pub_source.status)
+ path = self.disk_pool.rootpath / "f" / "foo" / "foo_666.dsc"
+ with path.open() as f:
+ self.assertEqual(b"Hello world", f.read())
+
+ def test_initialize_properties(self):
+ """We set initial properties for newly-published files."""
+ self.setUpArtifactory(ArchiveRepositoryFormat.DEBIAN)
+ publisher = Publisher(
+ self.logger, self.config, self.disk_pool, self.archive)
+ source = self.getPubSource(
+ sourcename="hello", version="1.0", archive=self.archive,
+ architecturehintlist="i386")
+ binary = self.getPubBinaries(
+ binaryname="hello", version="1.0", archive=self.archive,
+ pub_source=source)[0]
+
+ publisher.A_publish(False)
+ publisher.C_updateArtifactoryProperties(False)
+
+ source_path = self.disk_pool.rootpath / "h" / "hello" / "hello_1.0.dsc"
+ self.assertEqual(
+ {
+ "deb.component": ["main"],
+ "deb.distribution": ["breezy-autotest"],
+ "launchpad.release-id":
+ ["source:%d" % source.sourcepackagereleaseID],
+ "launchpad.source-name": ["hello"],
+ },
+ source_path.properties)
+ binary_path = (
+ self.disk_pool.rootpath / "h" / "hello" /
+ ("hello_1.0_%s.deb" % binary.distroarchseries.architecturetag))
+ self.assertEqual(
+ {
+ "deb.architecture": [binary.distroarchseries.architecturetag],
+ "deb.component": ["main"],
+ "deb.distribution": ["breezy-autotest"],
+ "launchpad.release-id":
+ ["binary:%d" % binary.binarypackagereleaseID],
+ "launchpad.source-name": ["hello"],
+ },
+ binary_path.properties)
+
+ def test_update_properties(self):
+ """We update properties when publishing contexts are updated."""
+ self.setUpArtifactory(ArchiveRepositoryFormat.DEBIAN)
+ publisher = Publisher(
+ self.logger, self.config, self.disk_pool, self.archive)
+ source = self.getPubSource(
+ sourcename="hello", version="1.0", archive=self.archive,
+ architecturehintlist="i386")
+ binary = self.getPubBinaries(
+ binaryname="hello", version="1.0", archive=self.archive,
+ pub_source=source)[0]
+
+ # Do an initial publication so that we have something to update.
+ publisher.A_publish(False)
+ publisher.C_updateArtifactoryProperties(False)
+
+ source_path = self.disk_pool.rootpath / "h" / "hello" / "hello_1.0.dsc"
+ self.assertEqual(
+ ["breezy-autotest"], source_path.properties["deb.distribution"])
+ binary_path = (
+ self.disk_pool.rootpath / "h" / "hello" /
+ ("hello_1.0_%s.deb" % binary.distroarchseries.architecturetag))
+ self.assertEqual(
+ ["breezy-autotest"], binary_path.properties["deb.distribution"])
+
+ # Copy the source and binary to another publishing context, and
+ # ensure that we update their Artifactory properties accordingly.
+ source.copyTo(
+ archive=self.archive,
+ distroseries=self.archive.distribution["hoary-test"],
+ pocket=PackagePublishingPocket.RELEASE)
+ binary.copyTo(
+ archive=self.archive,
+ distroseries=self.archive.distribution["hoary-test"],
+ pocket=PackagePublishingPocket.RELEASE)
+
+ publisher.A_publish(False)
+ publisher.C_updateArtifactoryProperties(False)
+
+ self.assertEqual(
+ {
+ "deb.component": ["main"],
+ "deb.distribution": ["breezy-autotest", "hoary-test"],
+ "launchpad.release-id":
+ ["source:%d" % source.sourcepackagereleaseID],
+ "launchpad.source-name": ["hello"],
+ },
+ source_path.properties)
+ self.assertEqual(
+ {
+ "deb.architecture": [binary.distroarchseries.architecturetag],
+ "deb.component": ["main"],
+ "deb.distribution": ["breezy-autotest", "hoary-test"],
+ "launchpad.release-id":
+ ["binary:%d" % binary.binarypackagereleaseID],
+ "launchpad.source-name": ["hello"],
+ },
+ binary_path.properties)
+
+ def test_remove_properties(self):
+ """We remove properties if a file is no longer published anywhere."""
+ self.setUpArtifactory(ArchiveRepositoryFormat.DEBIAN)
+ publisher = Publisher(
+ self.logger, self.config, self.disk_pool, self.archive)
+ source = self.getPubSource(
+ sourcename="hello", version="1.0", archive=self.archive,
+ architecturehintlist="i386")
+ binary = self.getPubBinaries(
+ binaryname="hello", version="1.0", archive=self.archive,
+ pub_source=source)[0]
+
+ # Do an initial publication so that we have something to update.
+ publisher.A_publish(False)
+ publisher.C_updateArtifactoryProperties(False)
+
+ source_path = self.disk_pool.rootpath / "h" / "hello" / "hello_1.0.dsc"
+ self.assertEqual(
+ ["breezy-autotest"], source_path.properties["deb.distribution"])
+ binary_path = (
+ self.disk_pool.rootpath / "h" / "hello" /
+ ("hello_1.0_%s.deb" % binary.distroarchseries.architecturetag))
+ self.assertEqual(
+ ["breezy-autotest"], binary_path.properties["deb.distribution"])
+
+ source.requestDeletion(self.archive.owner)
+ binary.requestDeletion(self.archive.owner)
+
+ publisher.A_publish(False)
+ publisher.A2_markPocketsWithDeletionsDirty()
+ publisher.C_updateArtifactoryProperties(False)
+
+ # The artifacts are still present until process-death-row runs, but
+ # they no longer have any properties that would cause them to be
+ # included in indexes.
+ self.assertEqual(
+ {
+ "launchpad.release-id":
+ ["source:%d" % source.sourcepackagereleaseID],
+ "launchpad.source-name": ["hello"],
+ },
+ source_path.properties)
+ self.assertEqual(
+ {
+ "launchpad.release-id":
+ ["binary:%d" % binary.binarypackagereleaseID],
+ "launchpad.source-name": ["hello"],
+ },
+ binary_path.properties)
+
+
load_tests = load_tests_apply_scenarios
diff --git a/lib/lp/soyuz/interfaces/publishing.py b/lib/lp/soyuz/interfaces/publishing.py
index f98cdb0..a1cfeab 100644
--- a/lib/lp/soyuz/interfaces/publishing.py
+++ b/lib/lp/soyuz/interfaces/publishing.py
@@ -1154,26 +1154,28 @@ class IPublishingSet(Interface):
release in the given `archive`, `distroseries`, and `pocket`.
"""
- def getSourcesForPublishing(archive, distroseries, pocket, component):
+ def getSourcesForPublishing(archive, distroseries=None, pocket=None,
+ component=None):
"""Get source publications which are published in a given context.
:param archive: The `Archive` to search.
- :param distroseries: The `DistroSeries` to search.
- :param pocket: The `PackagePublishingPocket` to search.
- :param component: The `Component` to search.
+ :param distroseries: The `DistroSeries` to search, or None.
+ :param pocket: The `PackagePublishingPocket` to search, or None.
+ :param component: The `Component` to search, or None.
:return: A result set of `SourcePackagePublishingHistory` objects in
the given context and with the `PUBLISHED` status, ordered by
source package name, with associated publisher-relevant objects
preloaded.
"""
- def getBinariesForPublishing(archive, distroarchseries, pocket, component):
+ def getBinariesForPublishing(archive, distroarchseries=None, pocket=None,
+ component=None):
"""Get binary publications which are published in a given context.
:param archive: The `Archive` to search.
- :param distroarchseries: The `DistroArchSeries` to search.
- :param pocket: The `PackagePublishingPocket` to search.
- :param component: The `Component` to search.
+ :param distroarchseries: The `DistroArchSeries` to search, or None.
+ :param pocket: The `PackagePublishingPocket` to search, or None.
+ :param component: The `Component` to search, or None.
:return: A result set of `BinaryPackagePublishingHistory` objects in
the given context and with the `PUBLISHED` status, ordered by
binary package name, with associated publisher-relevant objects
diff --git a/lib/lp/soyuz/model/publishing.py b/lib/lp/soyuz/model/publishing.py
index 7341b6c..cdab414 100644
--- a/lib/lp/soyuz/model/publishing.py
+++ b/lib/lp/soyuz/model/publishing.py
@@ -177,7 +177,8 @@ class ArchivePublisherBase:
try:
for pub_file in self.files:
source = self.source_package_name
- component = self.component.name
+ component = (
+ None if self.component is None else self.component.name)
filename = pub_file.libraryfile.filename
path = diskpool.pathFor(component, source, filename)
@@ -1548,19 +1549,27 @@ class PublishingSet:
active_publishing_status),
BinaryPackageRelease.architecturespecific == True)
- def getSourcesForPublishing(self, archive, distroseries, pocket,
- component):
+ def getSourcesForPublishing(self, archive, distroseries=None, pocket=None,
+ component=None):
"""See `IPublishingSet`."""
- spphs = IStore(SourcePackagePublishingHistory).find(
- SourcePackagePublishingHistory,
+ clauses = [
SourcePackagePublishingHistory.archive == archive,
- SourcePackagePublishingHistory.distroseries == distroseries,
- SourcePackagePublishingHistory.pocket == pocket,
- SourcePackagePublishingHistory.component == component,
SourcePackagePublishingHistory.status ==
PackagePublishingStatus.PUBLISHED,
SourcePackagePublishingHistory.sourcepackagename ==
- SourcePackageName.id).order_by(SourcePackageName.name)
+ SourcePackageName.id,
+ ]
+ if distroseries is not None:
+ clauses.append(
+ SourcePackagePublishingHistory.distroseries == distroseries)
+ if pocket is not None:
+ clauses.append(SourcePackagePublishingHistory.pocket == pocket)
+ if component is not None:
+ clauses.append(
+ SourcePackagePublishingHistory.component == component)
+ spphs = IStore(SourcePackagePublishingHistory).find(
+ SourcePackagePublishingHistory,
+ *clauses).order_by(SourcePackageName.name)
def eager_load(spphs):
# Preload everything which will be used by archivepublisher's
@@ -1585,20 +1594,28 @@ class PublishingSet:
return DecoratedResultSet(spphs, pre_iter_hook=eager_load)
- def getBinariesForPublishing(self, archive, distroarchseries, pocket,
- component):
+ def getBinariesForPublishing(self, archive, distroarchseries=None,
+ pocket=None, component=None):
"""See `IPublishingSet`."""
- bpphs = IStore(BinaryPackagePublishingHistory).find(
- BinaryPackagePublishingHistory,
+ clauses = [
BinaryPackagePublishingHistory.archive == archive,
- BinaryPackagePublishingHistory.distroarchseries ==
- distroarchseries,
- BinaryPackagePublishingHistory.pocket == pocket,
- BinaryPackagePublishingHistory.component == component,
BinaryPackagePublishingHistory.status ==
PackagePublishingStatus.PUBLISHED,
BinaryPackagePublishingHistory.binarypackagename ==
- BinaryPackageName.id).order_by(BinaryPackageName.name)
+ BinaryPackageName.id,
+ ]
+ if distroarchseries is not None:
+ clauses.append(
+ BinaryPackagePublishingHistory.distroarchseries ==
+ distroarchseries)
+ if pocket is not None:
+ clauses.append(BinaryPackagePublishingHistory.pocket == pocket)
+ if component is not None:
+ clauses.append(
+ BinaryPackagePublishingHistory.component == component)
+ bpphs = IStore(BinaryPackagePublishingHistory).find(
+ BinaryPackagePublishingHistory,
+ *clauses).order_by(BinaryPackageName.name)
def eager_load(bpphs):
# Preload everything which will be used by archivepublisher's
diff --git a/lib/lp/soyuz/tests/test_publishing.py b/lib/lp/soyuz/tests/test_publishing.py
index c153c8e..41cdf25 100644
--- a/lib/lp/soyuz/tests/test_publishing.py
+++ b/lib/lp/soyuz/tests/test_publishing.py
@@ -625,7 +625,7 @@ class TestNativePublishingBase(TestCaseWithFactory, SoyuzTestPublisher):
self.config.distroroot,
config.personalpackagearchive.root,
config.personalpackagearchive.private_root):
- if os.path.exists(root):
+ if root is not None and os.path.exists(root):
shutil.rmtree(root)
def getPubSource(self, *args, **kwargs):
@@ -1095,6 +1095,10 @@ class TestPublishingSetLite(TestCaseWithFactory):
archive=debian_archive, distroseries=hoary,
pocket=PackagePublishingPocket.RELEASE,
component=component_main).count())
+ self.assertEqual(
+ 14,
+ publishing_set.getSourcesForPublishing(
+ archive=hoary.main_archive).count())
def test_getSourcesForPublishing_query_count(self):
# Check that the number of queries required to publish source
@@ -1183,6 +1187,10 @@ class TestPublishingSetLite(TestCaseWithFactory):
archive=debian_archive, distroarchseries=warty_i386,
pocket=PackagePublishingPocket.RELEASE,
component=component_main).count())
+ self.assertEqual(
+ 12,
+ publishing_set.getBinariesForPublishing(
+ archive=warty.main_archive).count())
def test_getBinariesForPublishing_query_count(self):
# Check that the number of queries required to publish binary