launchpad-reviewers team mailing list archive
-
launchpad-reviewers team
-
Mailing list archive
-
Message #28591
[Merge] ~cjwatson/launchpad:conda-archive-layout into launchpad:master
Colin Watson has proposed merging ~cjwatson/launchpad:conda-archive-layout into launchpad:master.
Commit message:
Handle publishing to the Conda archive layout
Requested reviews:
Launchpad code reviewers (launchpad-reviewers)
For more details, see:
https://code.launchpad.net/~cjwatson/launchpad/+git/launchpad/+merge/424574
This required some refactoring, as the directory where a Conda package file should be published was difficult to determine using only information that the pool implementation already had: it would have had to derive the Conda architecture name from the Debian architecture name, which would have been fragile and difficult to maintain.
Instead, parse the `subdir` entry from the Conda index, and pass that down via `BinaryPackageRelease.user_defined_fields`. This requires ensuring that the pool has access to the `BinaryPackageRelease` (via `IPackageReleaseFile`). In one case that's difficult: `DiskPool.pathFor` is called by `FTPArchiveHandler.publishFileLists`, and getting hold of the whole release file row in that case would require significantly widening a query with an already large result set. Fortunately, it's straightforward in every other non-test case, so I was able to arrange for `DiskPool.pathFor` to accept either an `IPackageReleaseFile` or a file name with only a small amount of ugliness, and everything else derives the file name from the `IPackageReleaseFile` where needed.
`Publisher.C_updateArtifactoryProperties` also needed a little work. It now needs to pass the appropriate `IPackageReleaseFile`, but the current code could only find that if that file was still published anywhere, and not if all those publications had been deleted but the file was still in the pool pending removal. Use an alternative approach to get at the release files in such cases.
--
Your team Launchpad code reviewers is requested to review the proposed merge of ~cjwatson/launchpad:conda-archive-layout into launchpad:master.
diff --git a/lib/lp/archivepublisher/artifactory.py b/lib/lp/archivepublisher/artifactory.py
index 6d43b6c..740519b 100644
--- a/lib/lp/archivepublisher/artifactory.py
+++ b/lib/lp/archivepublisher/artifactory.py
@@ -42,30 +42,40 @@ from lp.soyuz.interfaces.publishing import (
def _path_for(archive: IArchive, rootpath: ArtifactoryPath, source_name: str,
- source_version: str, filename: Optional[str] = None) -> Path:
+ source_version: str, pub_file: IPackageReleaseFile) -> Path:
repository_format = archive.repository_format
if repository_format == ArchiveRepositoryFormat.DEBIAN:
path = rootpath / poolify(source_name)
elif repository_format == ArchiveRepositoryFormat.PYTHON:
path = rootpath / source_name / source_version
+ elif repository_format == ArchiveRepositoryFormat.CONDA:
+ user_defined_fields = (
+ pub_file.binarypackagerelease.user_defined_fields)
+ subdir = next(
+ (value for key, value in user_defined_fields if key == "subdir"),
+ None)
+ if subdir is None:
+ raise AssertionError(
+ "Cannot publish a Conda package with no subdir")
+ path = rootpath / subdir
else:
raise AssertionError(
"Unsupported repository format: %r" % repository_format)
- if filename:
- path = path / filename
+ path = path / pub_file.libraryfile.filename
return path
class ArtifactoryPoolEntry:
def __init__(self, archive: IArchive, rootpath: ArtifactoryPath,
- source_name: str, source_version: str, filename: str,
+ source_name: str, source_version: str,
+ pub_file: IPackageReleaseFile,
logger: logging.Logger) -> None:
self.archive = archive
self.rootpath = rootpath
self.source_name = source_name
self.source_version = source_version
- self.filename = filename
+ self.pub_file = pub_file
self.logger = logger
def debug(self, *args, **kwargs) -> None:
@@ -80,7 +90,7 @@ class ArtifactoryPoolEntry:
# in order to update an artifact's properties.
return _path_for(
self.archive, self.rootpath, self.source_name, self.source_version,
- self.filename)
+ self.pub_file)
def makeReleaseID(self, pub_file: IPackageReleaseFile) -> str:
"""
@@ -177,11 +187,11 @@ class ArtifactoryPoolEntry:
for pub in publications})
return properties
- def addFile(self, pub_file: IPackageReleaseFile):
+ def addFile(self):
targetpath = self.pathFor()
if not targetpath.parent.exists():
targetpath.parent.mkdir()
- lfa = pub_file.libraryfile
+ lfa = self.pub_file.libraryfile
if targetpath.exists():
file_hash = targetpath.stat().sha1
@@ -193,7 +203,7 @@ class ArtifactoryPoolEntry:
self.debug("Deploying %s", targetpath)
properties = self.calculateProperties(
- self.makeReleaseID(pub_file), [])
+ self.makeReleaseID(self.pub_file), [])
fd, name = tempfile.mkstemp(prefix="temp-download.")
f = os.fdopen(fd, "wb")
try:
@@ -280,32 +290,25 @@ class ArtifactoryPool:
return session
def _getEntry(self, source_name: str, source_version: str,
- file: str) -> ArtifactoryPoolEntry:
+ pub_file: IPackageReleaseFile) -> ArtifactoryPoolEntry:
"""See `DiskPool._getEntry`."""
return ArtifactoryPoolEntry(
- self.archive, self.rootpath, source_name, source_version, file,
+ self.archive, self.rootpath, source_name, source_version, pub_file,
self.logger)
def pathFor(self, comp: str, source_name: str, source_version: str,
- file: Optional[str] = None) -> Path:
- """Return the path for the given pool folder or file.
-
- If file is none, the path to the folder containing all packages
- for the given source package name will be returned.
-
- If file is specified, the path to the specific package file will
- be returned.
- """
+ pub_file: Optional[IPackageReleaseFile] = None) -> Path:
+ """Return the path for the given pool file."""
# For Artifactory publication, we ignore the component. There's
# only marginal benefit in having it be explicitly represented in
# the pool structure, and doing so would introduce significant
# complications in terms of having to keep track of components just
# in order to update an artifact's properties.
return _path_for(
- self.archive, self.rootpath, source_name, source_version, file)
+ self.archive, self.rootpath, source_name, source_version, pub_file)
def addFile(self, component: str, source_name: str, source_version: str,
- filename: str, pub_file: IPackageReleaseFile):
+ pub_file: IPackageReleaseFile):
"""Add a file with the given contents to the pool.
`source_name`, `source_version`, and `filename` are used to
@@ -329,11 +332,11 @@ class ArtifactoryPool:
This is similar to `DiskPool.addFile`, except that there is no
symlink handling and the component is ignored.
"""
- entry = self._getEntry(source_name, source_version, filename)
- return entry.addFile(pub_file)
+ entry = self._getEntry(source_name, source_version, pub_file)
+ return entry.addFile()
def removeFile(self, component: str, source_name: str, source_version: str,
- filename: str) -> int:
+ pub_file: IPackageReleaseFile) -> int:
"""Remove the specified file from the pool.
There are two possible outcomes:
@@ -345,13 +348,14 @@ class ArtifactoryPool:
This is similar to `DiskPool.removeFile`, except that there is no
symlink handling and the component is ignored.
"""
- entry = self._getEntry(source_name, source_version, filename)
+ entry = self._getEntry(source_name, source_version, pub_file)
return entry.removeFile()
def updateProperties(self, source_name: str, source_version: str,
- filename: str, publications, old_properties=None):
+ pub_file: IPackageReleaseFile, publications,
+ old_properties=None):
"""Update a file's properties in Artifactory."""
- entry = self._getEntry(source_name, source_version, filename)
+ entry = self._getEntry(source_name, source_version, pub_file)
entry.updateProperties(publications, old_properties=old_properties)
def getArtifactPatterns(self, repository_format):
diff --git a/lib/lp/archivepublisher/deathrow.py b/lib/lp/archivepublisher/deathrow.py
index 5ad3c61..db73277 100644
--- a/lib/lp/archivepublisher/deathrow.py
+++ b/lib/lp/archivepublisher/deathrow.py
@@ -88,12 +88,13 @@ class DeathRow:
if dry_run:
# Don't actually remove the files if we are dry running
def _mockRemoveFile(component_name, pool_name, pool_version,
- file_name):
+ pub_file):
self.logger.debug(
"(Not really!) removing %s %s/%s/%s" %
- (component_name, pool_name, pool_version, file_name))
+ (component_name, pool_name, pool_version,
+ pub_file.libraryfile.filename))
fullpath = self.diskpool.pathFor(
- component_name, pool_name, pool_version, file_name)
+ component_name, pool_name, pool_version, pub_file)
if not fullpath.exists():
raise NotInPool
return fullpath.lstat().st_size
@@ -233,7 +234,7 @@ class DeathRow:
pub_record.component_name,
pub_record.pool_name,
pub_record.pool_version,
- pub_file.libraryfile.filename,
+ pub_file,
)
file_path = str(self.diskpool.pathFor(*pub_file_details))
@@ -268,11 +269,11 @@ class DeathRow:
"Removing %s files marked for reaping" % len(condemned_files))
for condemned_file in sorted(condemned_files, reverse=True):
- component_name, pool_name, pool_version, file_name = (
+ component_name, pool_name, pool_version, pub_file = (
details[condemned_file])
try:
bytes += self._removeFile(
- component_name, pool_name, pool_version, file_name)
+ component_name, pool_name, pool_version, pub_file)
except NotInPool as info:
# It's safe for us to let this slide because it means that
# the file is already gone.
diff --git a/lib/lp/archivepublisher/diskpool.py b/lib/lp/archivepublisher/diskpool.py
index 01ce763..84b7874 100644
--- a/lib/lp/archivepublisher/diskpool.py
+++ b/lib/lp/archivepublisher/diskpool.py
@@ -139,14 +139,15 @@ class DiskPoolEntry:
require manual removal after further investigation.
"""
def __init__(self, archive: IArchive, rootpath: Path, temppath: Path,
- source_name: str, source_version: str, filename: str,
+ source_name: str, source_version: str,
+ pub_file: IPackageReleaseFile,
logger: logging.Logger) -> None:
self.archive = archive
self.rootpath = rootpath
self.temppath = temppath
self.source_name = source_name
self.source_version = source_version
- self.filename = filename
+ self.pub_file = pub_file
self.logger = logger
self.file_component = None
@@ -169,7 +170,7 @@ class DiskPoolEntry:
"""Return the path for this file in the given component."""
return (
self.rootpath / poolify(self.source_name, component) /
- self.filename)
+ self.pub_file.libraryfile.filename)
def preferredComponent(self, add: Optional[str] = None,
remove: Optional[str] = None) -> Optional[str]:
@@ -199,13 +200,13 @@ class DiskPoolEntry:
targetpath = self.pathFor(self.file_component)
return sha1_from_path(str(targetpath))
- def addFile(self, component: str, pub_file: IPackageReleaseFile):
+ def addFile(self, component: str):
"""See DiskPool.addFile."""
assert component in HARDCODED_COMPONENT_ORDER
targetpath = self.pathFor(component)
targetpath.parent.mkdir(parents=True, exist_ok=True)
- lfa = pub_file.libraryfile
+ lfa = self.pub_file.libraryfile
if self.file_component:
# There's something on disk. Check hash.
@@ -234,7 +235,7 @@ class DiskPoolEntry:
assert not targetpath.exists()
self.debug("Making new file in %s for %s/%s" %
- (component, self.source_name, self.filename))
+ (component, self.source_name, lfa.filename))
file_to_write = _diskpool_atomicfile(
targetpath, "wb", rootpath=self.temppath)
@@ -254,16 +255,17 @@ class DiskPoolEntry:
3) Remove the main file and there are symlinks left.
"""
+ filename = self.pub_file.libraryfile.filename
if not self.file_component:
raise NotInPool(
"File for removing %s %s/%s is not in pool, skipping." %
- (component, self.source_name, self.filename))
+ (component, self.source_name, filename))
# Okay, it's there, if it's a symlink then we need to remove
# it simply.
if component in self.symlink_components:
self.debug("Removing %s %s/%s as it is a symlink"
- % (component, self.source_name, self.filename))
+ % (component, self.source_name, filename))
# ensure we are removing a symbolic link and
# it is published in one or more components
link_path = self.pathFor(component)
@@ -273,13 +275,13 @@ class DiskPoolEntry:
if component != self.file_component:
raise MissingSymlinkInPool(
"Symlink for %s/%s in %s is missing, skipping." %
- (self.source_name, self.filename, component))
+ (self.source_name, filename, component))
# It's not a symlink, this means we need to check whether we
# have symlinks or not.
if len(self.symlink_components) == 0:
self.debug("Removing %s/%s from %s" %
- (self.source_name, self.filename, component))
+ (self.source_name, filename, component))
else:
# The target for removal is the real file, and there are symlinks
# pointing to it. In order to avoid breakage, we need to first
@@ -318,13 +320,14 @@ class DiskPoolEntry:
# We're already in the right place.
return
+ filename = self.pub_file.libraryfile.filename
if targetcomponent not in self.symlink_components:
raise ValueError(
"Target component '%s' is not a symlink for %s" %
- (targetcomponent, self.filename))
+ (targetcomponent, filename))
self.debug("Shuffling symlinks so primary for %s is in %s" %
- (self.filename, targetcomponent))
+ (filename, targetcomponent))
# Okay, so first up, we unlink the targetcomponent symlink.
targetpath = self.pathFor(targetcomponent)
@@ -403,32 +406,27 @@ class DiskPool:
self.logger = logger
def _getEntry(self, source_name: str, source_version: str,
- file: str) -> DiskPoolEntry:
+ pub_file: IPackageReleaseFile) -> DiskPoolEntry:
"""Return a new DiskPoolEntry for the given source and file."""
return DiskPoolEntry(
self.archive, self.rootpath, self.temppath, source_name,
- source_version, file, self.logger)
+ source_version, pub_file, self.logger)
def pathFor(self, comp: str, source_name: str, source_version: str,
+ pub_file: Optional[IPackageReleaseFile] = None,
file: Optional[str] = None) -> Path:
- """Return the path for the given pool folder or file.
-
- If file is none, the path to the folder containing all packages
- for the given component and source package will be returned.
-
- If file is specified, the path to the specific package file will
- be returned.
- """
- path = self.rootpath / poolify(source_name, comp)
- if file:
- path = path / file
- return path
+ """Return the path for the given pool file."""
+ if file is None:
+ file = pub_file.libraryfile.filename
+ if file is None:
+ raise AssertionError("Must pass either pub_file or file")
+ return self.rootpath / poolify(source_name, comp) / file
def addFile(self, component: str, source_name: str, source_version: str,
- filename: str, pub_file: IPackageReleaseFile):
+ pub_file: IPackageReleaseFile):
"""Add a file with the given contents to the pool.
- `component`, `source_name`, `source_version`, and `filename` are
+ `component`, `source_name`, `source_version`, and `pub_file` are
used to calculate the on-disk location.
pub_file is an `IPackageReleaseFile` providing the file's contents
@@ -455,11 +453,11 @@ class DiskPool:
either as a file or a symlink, and the hash check passes,
results.NONE will be returned and nothing will be done.
"""
- entry = self._getEntry(source_name, source_version, filename)
- return entry.addFile(component, pub_file)
+ entry = self._getEntry(source_name, source_version, pub_file)
+ return entry.addFile(component)
def removeFile(self, component: str, source_name: str, source_version: str,
- filename: str) -> int:
+ pub_file: IPackageReleaseFile) -> int:
"""Remove the specified file from the pool.
There are three possible outcomes:
@@ -474,5 +472,5 @@ class DiskPool:
will be deleted, and the file will be moved to replace it. The
size of the deleted symlink will be returned.
"""
- entry = self._getEntry(source_name, source_version, filename)
+ entry = self._getEntry(source_name, source_version, pub_file)
return entry.removeFile(component)
diff --git a/lib/lp/archivepublisher/model/ftparchive.py b/lib/lp/archivepublisher/model/ftparchive.py
index 7e94441..5ab2ef5 100644
--- a/lib/lp/archivepublisher/model/ftparchive.py
+++ b/lib/lp/archivepublisher/model/ftparchive.py
@@ -701,7 +701,7 @@ class FTPArchiveHandler:
# involved here; so we just pass None as the version.
ondiskname = str(
self._diskpool.pathFor(
- component, sourcepackagename, None, filename))
+ component, sourcepackagename, None, file=filename))
if architecturetag is None:
architecturetag = "source"
filelist[component][architecturetag].append(ondiskname)
diff --git a/lib/lp/archivepublisher/publishing.py b/lib/lp/archivepublisher/publishing.py
index b0ddd32..18f2f48 100644
--- a/lib/lp/archivepublisher/publishing.py
+++ b/lib/lp/archivepublisher/publishing.py
@@ -27,6 +27,7 @@ from itertools import (
import lzma
from operator import attrgetter
import os
+import re
import shutil
from debian.deb822 import (
@@ -64,6 +65,7 @@ from lp.registry.interfaces.pocket import (
)
from lp.registry.interfaces.series import SeriesStatus
from lp.registry.model.distroseries import DistroSeries
+from lp.services.database.bulk import load
from lp.services.database.constants import UTC_NOW
from lp.services.database.interfaces import IStore
from lp.services.features import getFeatureFlag
@@ -87,11 +89,13 @@ from lp.soyuz.interfaces.publishing import (
active_publishing_status,
IPublishingSet,
)
+from lp.soyuz.model.binarypackagerelease import BinaryPackageRelease
from lp.soyuz.model.distroarchseries import DistroArchSeries
from lp.soyuz.model.publishing import (
BinaryPackagePublishingHistory,
SourcePackagePublishingHistory,
)
+from lp.soyuz.model.sourcepackagerelease import SourcePackageRelease
# Use this as the lock file name for all scripts that may manipulate
@@ -735,20 +739,26 @@ class Publisher:
# suite/channel. A more complete overhaul of the
# publisher/dominator might be able to deal with this.
publishing_set = getUtility(IPublishingSet)
+ releases_by_id = {}
pubs_by_id = defaultdict(list)
spphs_by_spr = defaultdict(list)
bpphs_by_bpr = defaultdict(list)
for spph in publishing_set.getSourcesForPublishing(
archive=self.archive):
spphs_by_spr[spph.sourcepackagereleaseID].append(spph)
- pubs_by_id["source:%d" % spph.sourcepackagereleaseID].append(spph)
+ release_id = "source:%d" % spph.sourcepackagereleaseID
+ releases_by_id.setdefault(release_id, spph.sourcepackagerelease)
+ pubs_by_id[release_id].append(spph)
for bpph in publishing_set.getBinariesForPublishing(
archive=self.archive):
bpphs_by_bpr[bpph.binarypackagereleaseID].append(bpph)
- pubs_by_id["binary:%d" % bpph.binarypackagereleaseID].append(bpph)
+ release_id = "binary:%d" % bpph.binarypackagereleaseID
+ releases_by_id.setdefault(release_id, bpph.binarypackagerelease)
+ pubs_by_id[release_id].append(bpph)
artifacts = self._diskpool.getAllArtifacts(
self.archive.name, self.archive.repository_format)
+ plan = []
for path, properties in sorted(artifacts.items()):
release_id = properties.get("launchpad.release-id")
source_name = properties.get("launchpad.source-name")
@@ -756,9 +766,35 @@ class Publisher:
if not release_id or not source_name or not source_version:
# Skip any files that Launchpad didn't put in Artifactory.
continue
+ plan.append(
+ (source_name[0], source_version[0], release_id[0], properties))
+
+ # Releases that have been removed may still have corresponding
+ # artifacts but no corresponding publishing history rows. Bulk-load
+ # any of these that we find so that we can track down the
+ # corresponding pool entries.
+ missing_sources = set()
+ missing_binaries = set()
+ for _, _, release_id, _ in plan:
+ if release_id in releases_by_id:
+ continue
+ match = re.match(r"^source:(\d+)$", release_id)
+ if match is not None:
+ missing_sources.add(int(match.group(1)))
+ else:
+ match = re.match(r"^binary:(\d+)$", release_id)
+ if match is not None:
+ missing_binaries.add(int(match.group(1)))
+ for spr in load(SourcePackageRelease, missing_sources):
+ releases_by_id["source:%d" % spr.id] = spr
+ for bpr in load(BinaryPackageRelease, missing_binaries):
+ releases_by_id["binary:%d" % bpr.id] = bpr
+
+ for source_name, source_version, release_id, properties in plan:
self._diskpool.updateProperties(
- source_name[0], source_version[0], path.name,
- pubs_by_id.get(release_id[0]), old_properties=properties)
+ source_name, source_version,
+ releases_by_id[release_id].files[0],
+ pubs_by_id.get(release_id), old_properties=properties)
def D_writeReleaseFiles(self, is_careful):
"""Write out the Release files for the provided distribution.
diff --git a/lib/lp/archivepublisher/tests/deathrow.txt b/lib/lp/archivepublisher/tests/deathrow.txt
index e85eece..e239c7c 100644
--- a/lib/lp/archivepublisher/tests/deathrow.txt
+++ b/lib/lp/archivepublisher/tests/deathrow.txt
@@ -209,7 +209,7 @@ Publish files on disk and build a list of all created file paths
... pub.component.name,
... pub.pool_name,
... pub.pool_version,
- ... pub_file.libraryfile.filename
+ ... pub_file,
... )
... unique_file_paths.add(file_path)
... pub.publish(quiet_disk_pool, BufferLogger())
diff --git a/lib/lp/archivepublisher/tests/test_artifactory.py b/lib/lp/archivepublisher/tests/test_artifactory.py
index 6fe9ad1..16c2fe6 100644
--- a/lib/lp/archivepublisher/tests/test_artifactory.py
+++ b/lib/lp/archivepublisher/tests/test_artifactory.py
@@ -15,6 +15,7 @@ from lp.archivepublisher.tests.artifactory_fixture import (
)
from lp.archivepublisher.tests.test_pool import (
FakeArchive,
+ FakePackageReleaseFile,
FakeReleaseType,
PoolTestingFile,
)
@@ -68,7 +69,7 @@ class ArtifactoryPoolTestingFile(PoolTestingFile):
def getProperties(self):
path = self.pool.pathFor(
- None, self.source_name, self.source_version, self.filename)
+ None, self.source_name, self.source_version, self.pub_file)
return path.properties
@@ -91,35 +92,34 @@ class TestArtifactoryPool(TestCase):
return ArtifactoryPool(
FakeArchive(repository_format), root_url, BufferLogger())
- def test_pathFor_debian_without_file(self):
- pool = self.makePool()
- self.assertEqual(
- ArtifactoryPath(
- "https://foo.example.com/artifactory/repository/pool/f/foo"),
- pool.pathFor(None, "foo", "1.0"))
-
def test_pathFor_debian_with_file(self):
pool = self.makePool()
+ pub_file = FakePackageReleaseFile(b"foo", "foo-1.0.deb")
self.assertEqual(
ArtifactoryPath(
"https://foo.example.com/artifactory/repository/pool/f/foo/"
"foo-1.0.deb"),
- pool.pathFor(None, "foo", "1.0", "foo-1.0.deb"))
-
- def test_pathFor_python_without_file(self):
- pool = self.makePool(ArchiveRepositoryFormat.PYTHON)
- self.assertEqual(
- ArtifactoryPath(
- "https://foo.example.com/artifactory/repository/foo/1.0"),
- pool.pathFor(None, "foo", "1.0"))
+ pool.pathFor(None, "foo", "1.0", pub_file))
def test_pathFor_python_with_file(self):
pool = self.makePool(ArchiveRepositoryFormat.PYTHON)
+ pub_file = FakePackageReleaseFile(b"foo", "foo-1.0.whl")
self.assertEqual(
ArtifactoryPath(
"https://foo.example.com/artifactory/repository/foo/1.0/"
"foo-1.0.whl"),
- pool.pathFor(None, "foo", "1.0", "foo-1.0.whl"))
+ pool.pathFor(None, "foo", "1.0", pub_file))
+
+ def test_pathFor_conda_with_file(self):
+ pool = self.makePool(ArchiveRepositoryFormat.CONDA)
+ pub_file = FakePackageReleaseFile(
+ b"foo", "foo-1.0.tar.bz2",
+ user_defined_fields=[("subdir", "linux-64")])
+ self.assertEqual(
+ ArtifactoryPath(
+ "https://foo.example.com/artifactory/repository/linux-64/"
+ "foo-1.0.tar.bz2"),
+ pool.pathFor(None, "foo", "1.0", pub_file))
def test_addFile(self):
pool = self.makePool()
@@ -159,7 +159,7 @@ class TestArtifactoryPool(TestCase):
release_id=1)
foo.addToPool()
self.assertTrue(foo.checkIsFile())
- foo.contents = b"different"
+ foo.pub_file.libraryfile.contents = b"different"
self.assertRaises(PoolFileOverwriteError, foo.addToPool)
def test_removeFile(self):
@@ -303,8 +303,7 @@ class TestArtifactoryPoolFromLibrarian(TestCaseWithFactory):
spphs.append(spph.copyTo(
dses[1], PackagePublishingPocket.RELEASE, pool.archive))
transaction.commit()
- pool.addFile(
- None, spr.name, spr.version, sprf.libraryfile.filename, sprf)
+ pool.addFile(None, spr.name, spr.version, sprf)
path = pool.rootpath / "f" / "foo" / "foo_1.0.dsc"
self.assertTrue(path.exists())
self.assertFalse(path.is_symlink())
@@ -315,8 +314,7 @@ class TestArtifactoryPoolFromLibrarian(TestCaseWithFactory):
"launchpad.source-version": ["1.0"],
},
path.properties)
- pool.updateProperties(
- spr.name, spr.version, sprf.libraryfile.filename, spphs)
+ pool.updateProperties(spr.name, spr.version, sprf, spphs)
self.assertEqual(
{
"launchpad.release-id": ["source:%d" % spr.id],
@@ -356,8 +354,7 @@ class TestArtifactoryPoolFromLibrarian(TestCaseWithFactory):
dses[1], PackagePublishingPocket.RELEASE, pool.archive)[0])
transaction.commit()
pool.addFile(
- None, bpr.sourcepackagename, bpr.sourcepackageversion,
- bpf.libraryfile.filename, bpf)
+ None, bpr.sourcepackagename, bpr.sourcepackageversion, bpf)
path = (
pool.rootpath / "f" / "foo" / ("foo_1.0_%s.deb" % processor.name))
self.assertTrue(path.exists())
@@ -370,8 +367,7 @@ class TestArtifactoryPoolFromLibrarian(TestCaseWithFactory):
},
path.properties)
pool.updateProperties(
- bpr.sourcepackagename, bpr.sourcepackageversion,
- bpf.libraryfile.filename, bpphs)
+ bpr.sourcepackagename, bpr.sourcepackageversion, bpf, bpphs)
self.assertEqual(
{
"launchpad.release-id": ["binary:%d" % bpr.id],
@@ -408,8 +404,7 @@ class TestArtifactoryPoolFromLibrarian(TestCaseWithFactory):
{bpr: (bpr.component, bpr.section, bpr.priority, None)})
transaction.commit()
pool.addFile(
- None, bpr.sourcepackagename, bpr.sourcepackageversion,
- bpf.libraryfile.filename, bpf)
+ None, bpr.sourcepackagename, bpr.sourcepackageversion, bpf)
path = pool.rootpath / "f" / "foo" / "foo_1.0_all.deb"
self.assertTrue(path.exists())
self.assertFalse(path.is_symlink())
@@ -421,8 +416,7 @@ class TestArtifactoryPoolFromLibrarian(TestCaseWithFactory):
},
path.properties)
pool.updateProperties(
- bpr.sourcepackagename, bpr.sourcepackageversion,
- bpf.libraryfile.filename, bpphs)
+ bpr.sourcepackagename, bpr.sourcepackageversion, bpf, bpphs)
self.assertEqual(
{
"launchpad.release-id": ["binary:%d" % bpr.id],
@@ -456,8 +450,7 @@ class TestArtifactoryPoolFromLibrarian(TestCaseWithFactory):
spphs.append(spph.copyTo(
dses[1], PackagePublishingPocket.RELEASE, pool.archive))
transaction.commit()
- pool.addFile(
- None, spr.name, spr.version, sprf.libraryfile.filename, sprf)
+ pool.addFile(None, spr.name, spr.version, sprf)
path = pool.rootpath / "foo" / "1.0" / "foo-1.0.tar.gz"
self.assertTrue(path.exists())
self.assertFalse(path.is_symlink())
@@ -468,8 +461,7 @@ class TestArtifactoryPoolFromLibrarian(TestCaseWithFactory):
"launchpad.source-version": ["1.0"],
},
path.properties)
- pool.updateProperties(
- spr.name, spr.version, sprf.libraryfile.filename, spphs)
+ pool.updateProperties(spr.name, spr.version, sprf, spphs)
self.assertEqual(
{
"launchpad.release-id": ["source:%d" % spr.id],
@@ -513,8 +505,7 @@ class TestArtifactoryPoolFromLibrarian(TestCaseWithFactory):
channel="edge")[0])
transaction.commit()
pool.addFile(
- None, bpr.sourcepackagename, bpr.sourcepackageversion,
- bpf.libraryfile.filename, bpf)
+ None, bpr.sourcepackagename, bpr.sourcepackageversion, bpf)
path = pool.rootpath / "foo" / "1.0" / "foo-1.0-py3-none-any.whl"
self.assertTrue(path.exists())
self.assertFalse(path.is_symlink())
@@ -526,8 +517,115 @@ class TestArtifactoryPoolFromLibrarian(TestCaseWithFactory):
},
path.properties)
pool.updateProperties(
- bpr.sourcepackagename, bpr.sourcepackageversion,
- bpf.libraryfile.filename, bpphs)
+ bpr.sourcepackagename, bpr.sourcepackageversion, bpf, bpphs)
+ self.assertEqual(
+ {
+ "launchpad.release-id": ["binary:%d" % bpr.id],
+ "launchpad.source-name": ["foo"],
+ "launchpad.source-version": ["1.0"],
+ "launchpad.channel": list(
+ sorted("%s:edge" % ds.name for ds in dses)),
+ },
+ path.properties)
+
+ def test_updateProperties_conda_v1(self):
+ pool = self.makePool(ArchiveRepositoryFormat.CONDA)
+ dses = [
+ self.factory.makeDistroSeries(
+ distribution=pool.archive.distribution)
+ for _ in range(2)]
+ processor = self.factory.makeProcessor()
+ dases = [
+ self.factory.makeDistroArchSeries(
+ distroseries=ds, architecturetag=processor.name)
+ for ds in dses]
+ ci_build = self.factory.makeCIBuild(distro_arch_series=dases[0])
+ bpn = self.factory.makeBinaryPackageName(name="foo")
+ bpr = self.factory.makeBinaryPackageRelease(
+ binarypackagename=bpn, version="1.0", ci_build=ci_build,
+ binpackageformat=BinaryPackageFormat.CONDA_V1,
+ user_defined_fields=[("subdir", "linux-64")])
+ bpf = self.factory.makeBinaryPackageFile(
+ binarypackagerelease=bpr,
+ library_file=self.factory.makeLibraryFileAlias(
+ filename="foo-1.0.tar.bz2"),
+ filetype=BinaryPackageFileType.CONDA_V1)
+ bpph = self.factory.makeBinaryPackagePublishingHistory(
+ binarypackagerelease=bpr, archive=pool.archive,
+ distroarchseries=dases[0], pocket=PackagePublishingPocket.RELEASE,
+ architecturespecific=False, channel="edge")
+ bpphs = [bpph]
+ bpphs.append(
+ getUtility(IPublishingSet).copyBinaries(
+ pool.archive, dses[1], PackagePublishingPocket.RELEASE, [bpph],
+ channel="edge")[0])
+ transaction.commit()
+ pool.addFile(None, bpph.pool_name, bpph.pool_version, bpf)
+ path = pool.rootpath / "linux-64" / "foo-1.0.tar.bz2"
+ self.assertTrue(path.exists())
+ self.assertFalse(path.is_symlink())
+ self.assertEqual(
+ {
+ "launchpad.release-id": ["binary:%d" % bpr.id],
+ "launchpad.source-name": ["foo"],
+ "launchpad.source-version": ["1.0"],
+ },
+ path.properties)
+ pool.updateProperties(bpph.pool_name, bpph.pool_version, bpf, bpphs)
+ self.assertEqual(
+ {
+ "launchpad.release-id": ["binary:%d" % bpr.id],
+ "launchpad.source-name": ["foo"],
+ "launchpad.source-version": ["1.0"],
+ "launchpad.channel": list(
+ sorted("%s:edge" % ds.name for ds in dses)),
+ },
+ path.properties)
+
+ def test_updateProperties_conda_v2(self):
+ pool = self.makePool(ArchiveRepositoryFormat.CONDA)
+ dses = [
+ self.factory.makeDistroSeries(
+ distribution=pool.archive.distribution)
+ for _ in range(2)]
+ processor = self.factory.makeProcessor()
+ dases = [
+ self.factory.makeDistroArchSeries(
+ distroseries=ds, architecturetag=processor.name)
+ for ds in dses]
+ ci_build = self.factory.makeCIBuild(distro_arch_series=dases[0])
+ bpn = self.factory.makeBinaryPackageName(name="foo")
+ bpr = self.factory.makeBinaryPackageRelease(
+ binarypackagename=bpn, version="1.0", ci_build=ci_build,
+ binpackageformat=BinaryPackageFormat.CONDA_V2,
+ user_defined_fields=[("subdir", "noarch")])
+ bpf = self.factory.makeBinaryPackageFile(
+ binarypackagerelease=bpr,
+ library_file=self.factory.makeLibraryFileAlias(
+ filename="foo-1.0.conda"),
+ filetype=BinaryPackageFileType.CONDA_V2)
+ bpph = self.factory.makeBinaryPackagePublishingHistory(
+ binarypackagerelease=bpr, archive=pool.archive,
+ distroarchseries=dases[0], pocket=PackagePublishingPocket.RELEASE,
+ architecturespecific=True, channel="edge")
+ bpphs = [bpph]
+ bpphs.append(
+ getUtility(IPublishingSet).copyBinaries(
+ pool.archive, dses[1], PackagePublishingPocket.RELEASE, [bpph],
+ channel="edge")[0])
+ transaction.commit()
+ pool.addFile(None, bpph.pool_name, bpph.pool_version, bpf)
+ path = pool.rootpath / "noarch" / "foo-1.0.conda"
+ self.assertTrue(path.exists())
+ self.assertFalse(path.is_symlink())
+ self.assertEqual(
+ {
+ "launchpad.release-id": ["binary:%d" % bpr.id],
+ "launchpad.source-name": ["foo"],
+ "launchpad.source-version": ["1.0"],
+ },
+ path.properties)
+ pool.updateProperties(bpph.pool_name, bpph.pool_version, bpf, bpphs)
self.assertEqual(
{
"launchpad.release-id": ["binary:%d" % bpr.id],
@@ -563,8 +661,7 @@ class TestArtifactoryPoolFromLibrarian(TestCaseWithFactory):
{bpr: (bpr.component, bpr.section, bpr.priority, None)})
transaction.commit()
pool.addFile(
- None, bpr.sourcepackagename, bpr.sourcepackageversion,
- bpf.libraryfile.filename, bpf)
+ None, bpr.sourcepackagename, bpr.sourcepackageversion, bpf)
path = pool.rootpath / "f" / "foo" / "foo_1.0_all.deb"
path.set_properties({"deb.version": ["1.0"]}, recursive=False)
self.assertEqual(
@@ -576,8 +673,7 @@ class TestArtifactoryPoolFromLibrarian(TestCaseWithFactory):
},
path.properties)
pool.updateProperties(
- bpr.sourcepackagename, bpr.sourcepackageversion,
- bpf.libraryfile.filename, bpphs)
+ bpr.sourcepackagename, bpr.sourcepackageversion, bpf, bpphs)
self.assertEqual(
{
"launchpad.release-id": ["binary:%d" % bpr.id],
diff --git a/lib/lp/archivepublisher/tests/test_deathrow.py b/lib/lp/archivepublisher/tests/test_deathrow.py
index 0bfb423..e7c82f1 100644
--- a/lib/lp/archivepublisher/tests/test_deathrow.py
+++ b/lib/lp/archivepublisher/tests/test_deathrow.py
@@ -54,7 +54,7 @@ class TestDeathRow(TestCase):
pub.component.name,
pub.pool_name,
pub.pool_version,
- pub_file.libraryfile.filename)
+ pub_file)
def assertIsFile(self, path: Path) -> None:
"""Assert the path exists and is a regular file."""
diff --git a/lib/lp/archivepublisher/tests/test_ftparchive.py b/lib/lp/archivepublisher/tests/test_ftparchive.py
index 9b407a7..4528b15 100755
--- a/lib/lp/archivepublisher/tests/test_ftparchive.py
+++ b/lib/lp/archivepublisher/tests/test_ftparchive.py
@@ -151,7 +151,7 @@ class TestFTPArchive(TestCaseWithFactory):
leafname, samplename=None):
"""Create a repository file."""
fullpath = self._dp.pathFor(
- component, source_name, source_version, leafname)
+ component, source_name, source_version, file=leafname)
fullpath.parent.mkdir(parents=True, exist_ok=True)
if samplename is None:
samplename = leafname
@@ -791,7 +791,7 @@ class TestFTPArchive(TestCaseWithFactory):
# something to do.
for i in range(49):
self._dp.pathFor(
- "main", "bin%d" % i, "1", "bin%d_1_i386.deb" % i).unlink()
+ "main", "bin%d" % i, "1", file="bin%d_1_i386.deb" % i).unlink()
cache_path = os.path.join(self._config.cacheroot, "packages-i386.db")
old_cache_size = os.stat(cache_path).st_size
diff --git a/lib/lp/archivepublisher/tests/test_pool.py b/lib/lp/archivepublisher/tests/test_pool.py
index 97669ff..7ede0f4 100644
--- a/lib/lp/archivepublisher/tests/test_pool.py
+++ b/lib/lp/archivepublisher/tests/test_pool.py
@@ -45,9 +45,13 @@ class FakeLibraryFileContent:
class FakeLibraryFileAlias:
- def __init__(self, contents):
- self.content = FakeLibraryFileContent(contents)
+ def __init__(self, contents, filename):
self.contents = contents
+ self.filename = filename
+
+ @property
+ def content(self):
+ return FakeLibraryFileContent(self.contents)
def open(self):
self.loc = 0
@@ -62,6 +66,13 @@ class FakeLibraryFileAlias:
pass
+class FakePackageRelease:
+
+ def __init__(self, release_id, user_defined_fields=None):
+ self.id = release_id
+ self.user_defined_fields = user_defined_fields
+
+
class FakeReleaseType(EnumeratedType):
SOURCE = Item("Source")
@@ -71,13 +82,18 @@ class FakeReleaseType(EnumeratedType):
@implementer(IPackageReleaseFile)
class FakePackageReleaseFile:
- def __init__(self, contents, release_type, release_id):
- self.libraryfile = FakeLibraryFileAlias(contents)
+ def __init__(self, contents, filename, release_type=FakeReleaseType.BINARY,
+ release_id=1, user_defined_fields=None):
+ self.libraryfile = FakeLibraryFileAlias(contents, filename)
if release_type == FakeReleaseType.SOURCE:
self.sourcepackagereleaseID = release_id
+ self.sourcepackagerelease = FakePackageRelease(
+ release_id, user_defined_fields=user_defined_fields)
alsoProvides(self, ISourcePackageReleaseFile)
elif release_type == FakeReleaseType.BINARY:
self.binarypackagereleaseID = release_id
+ self.binarypackagerelease = FakePackageRelease(
+ release_id, user_defined_fields=user_defined_fields)
alsoProvides(self, IBinaryPackageFile)
@@ -88,29 +104,26 @@ class PoolTestingFile:
self.pool = pool
self.source_name = source_name
self.source_version = source_version
- self.filename = filename
- self.contents = source_name.encode("UTF-8")
- self.release_type = release_type
- self.release_id = release_id
+ self.pub_file = FakePackageReleaseFile(
+ source_name.encode(), filename, release_type=release_type,
+ release_id=release_id)
def addToPool(self, component: str):
return self.pool.addFile(
- component, self.source_name, self.source_version, self.filename,
- FakePackageReleaseFile(
- self.contents, self.release_type, self.release_id))
+ component, self.source_name, self.source_version, self.pub_file)
def removeFromPool(self, component: str) -> int:
return self.pool.removeFile(
- component, self.source_name, self.source_version, self.filename)
+ component, self.source_name, self.source_version, self.pub_file)
def checkExists(self, component: str) -> bool:
path = self.pool.pathFor(
- component, self.source_name, self.source_version, self.filename)
+ component, self.source_name, self.source_version, self.pub_file)
return path.exists()
def checkIsLink(self, component: str) -> bool:
path = self.pool.pathFor(
- component, self.source_name, self.source_version, self.filename)
+ component, self.source_name, self.source_version, self.pub_file)
return path.is_symlink()
def checkIsFile(self, component: str) -> bool:
diff --git a/lib/lp/code/interfaces/cibuild.py b/lib/lp/code/interfaces/cibuild.py
index fc7a197..484aa4e 100644
--- a/lib/lp/code/interfaces/cibuild.py
+++ b/lib/lp/code/interfaces/cibuild.py
@@ -179,7 +179,8 @@ class ICIBuildView(IPackageBuildView, IPrivacy):
def createBinaryPackageRelease(
binarypackagename, version, summary, description, binpackageformat,
- architecturespecific, installedsize=None, homepage=None):
+ architecturespecific, installedsize=None, homepage=None,
+ user_defined_fields=None):
"""Create and return a `BinaryPackageRelease` for this CI build.
The new binary package release will be linked to this build.
diff --git a/lib/lp/code/model/cibuild.py b/lib/lp/code/model/cibuild.py
index 4e1ddbd..7c9248c 100644
--- a/lib/lp/code/model/cibuild.py
+++ b/lib/lp/code/model/cibuild.py
@@ -469,14 +469,15 @@ class CIBuild(PackageBuildMixin, StormBase):
def createBinaryPackageRelease(
self, binarypackagename, version, summary, description,
binpackageformat, architecturespecific, installedsize=None,
- homepage=None):
+ homepage=None, user_defined_fields=None):
"""See `ICIBuild`."""
return BinaryPackageRelease(
ci_build=self, binarypackagename=binarypackagename,
version=version, summary=summary, description=description,
binpackageformat=binpackageformat,
architecturespecific=architecturespecific,
- installedsize=installedsize, homepage=homepage)
+ installedsize=installedsize, homepage=homepage,
+ user_defined_fields=user_defined_fields)
@implementer(ICIBuildSet)
diff --git a/lib/lp/soyuz/model/archivejob.py b/lib/lp/soyuz/model/archivejob.py
index cc9b75b..9e559f8 100644
--- a/lib/lp/soyuz/model/archivejob.py
+++ b/lib/lp/soyuz/model/archivejob.py
@@ -266,6 +266,11 @@ class CIBuildUploadJob(ArchiveJobDerived):
"description": about.get("description", ""),
"architecturespecific": index["platform"] is not None,
"homepage": about.get("home", ""),
+ # We should perhaps model this explicitly since it's used by the
+ # publisher, but this gives us an easy way to pass this through
+ # without needing to add a column to a large table that's only
+ # relevant to a tiny minority of rows.
+ "user_defined_fields": [("subdir", index["subdir"])],
}
def _scanFile(self, path):
diff --git a/lib/lp/soyuz/model/publishing.py b/lib/lp/soyuz/model/publishing.py
index 7ae3765..fb1cba2 100644
--- a/lib/lp/soyuz/model/publishing.py
+++ b/lib/lp/soyuz/model/publishing.py
@@ -182,12 +182,11 @@ class ArchivePublisherBase:
pool_version = self.pool_version
component = (
None if self.component is None else self.component.name)
- filename = pub_file.libraryfile.filename
path = diskpool.pathFor(
- component, pool_name, pool_version, filename)
+ component, pool_name, pool_version, pub_file)
action = diskpool.addFile(
- component, pool_name, pool_version, filename, pub_file)
+ component, pool_name, pool_version, pub_file)
if action == diskpool.results.FILE_ADDED:
log.debug("Added %s from library" % path)
elif action == diskpool.results.SYMLINK_ADDED:
diff --git a/lib/lp/soyuz/tests/test_archivejob.py b/lib/lp/soyuz/tests/test_archivejob.py
index d599c48..db27d4a 100644
--- a/lib/lp/soyuz/tests/test_archivejob.py
+++ b/lib/lp/soyuz/tests/test_archivejob.py
@@ -248,6 +248,7 @@ class TestCIBuildUploadJob(TestCaseWithFactory):
"binpackageformat": BinaryPackageFormat.CONDA_V1,
"architecturespecific": False,
"homepage": "",
+ "user_defined_fields": [("subdir", "noarch")],
}
self.assertEqual(expected, job._scanFile(datadir(path)))
@@ -268,6 +269,7 @@ class TestCIBuildUploadJob(TestCaseWithFactory):
"binpackageformat": BinaryPackageFormat.CONDA_V1,
"architecturespecific": True,
"homepage": "http://example.com/",
+ "user_defined_fields": [("subdir", "linux-64")],
}
self.assertEqual(expected, job._scanFile(datadir(path)))
@@ -288,6 +290,7 @@ class TestCIBuildUploadJob(TestCaseWithFactory):
"binpackageformat": BinaryPackageFormat.CONDA_V2,
"architecturespecific": False,
"homepage": "",
+ "user_defined_fields": [("subdir", "noarch")],
}
self.assertEqual(expected, job._scanFile(datadir(path)))
@@ -308,6 +311,7 @@ class TestCIBuildUploadJob(TestCaseWithFactory):
"binpackageformat": BinaryPackageFormat.CONDA_V2,
"architecturespecific": True,
"homepage": "http://example.com/",
+ "user_defined_fields": [("subdir", "linux-64")],
}
self.assertEqual(expected, job._scanFile(datadir(path)))
diff --git a/lib/lp/testing/factory.py b/lib/lp/testing/factory.py
index 6a5e459..2a5c160 100644
--- a/lib/lp/testing/factory.py
+++ b/lib/lp/testing/factory.py
@@ -4274,7 +4274,7 @@ class BareLaunchpadObjectFactory(ObjectFactory):
enhances=None, breaks=None,
essential=False, installed_size=None,
date_created=None, debug_package=None,
- homepage=None):
+ homepage=None, user_defined_fields=None):
"""Make a `BinaryPackageRelease`."""
if build is None and ci_build is None:
build = self.makeBinaryPackageBuild()
@@ -4314,6 +4314,7 @@ class BareLaunchpadObjectFactory(ObjectFactory):
"architecturespecific": architecturespecific,
"installedsize": installed_size,
"homepage": homepage,
+ "user_defined_fields": user_defined_fields,
}
if build is not None:
kwargs.update({