launchpad-reviewers team mailing list archive
-
launchpad-reviewers team
-
Mailing list archive
-
Message #29316
[Merge] ~cjwatson/launchpad:archive-translate-path-non-pool-multiple into launchpad:master
Colin Watson has proposed merging ~cjwatson/launchpad:archive-translate-path-non-pool-multiple into launchpad:master.
Commit message:
Tolerate multiple non-pool files with the same name
Requested reviews:
Launchpad code reviewers (launchpad-reviewers)
For more details, see:
https://code.launchpad.net/~cjwatson/launchpad/+git/launchpad/+merge/431673
This happens in practice when an file has been "condemned" (scheduled for deletion) but hasn't yet actually been deleted; publisher runs typically do this to old versions of archive metadata files when publishing new versions of them.
--
Your team Launchpad code reviewers is requested to review the proposed merge of ~cjwatson/launchpad:archive-translate-path-non-pool-multiple into launchpad:master.
diff --git a/lib/lp/archivepublisher/publishing.py b/lib/lp/archivepublisher/publishing.py
index f714c77..58d948a 100644
--- a/lib/lp/archivepublisher/publishing.py
+++ b/lib/lp/archivepublisher/publishing.py
@@ -1221,10 +1221,7 @@ class Publisher:
)
uncondemned_files = set()
for db_file in archive_file_set.getByArchive(
- self.archive,
- container=container,
- only_condemned=True,
- eager_load=True,
+ self.archive, container=container, condemned=True, eager_load=True
):
stripped_path = strip_dists(db_file.path)
if stripped_path in current_files:
diff --git a/lib/lp/soyuz/interfaces/archivefile.py b/lib/lp/soyuz/interfaces/archivefile.py
index b72c54e..e41548b 100644
--- a/lib/lp/soyuz/interfaces/archivefile.py
+++ b/lib/lp/soyuz/interfaces/archivefile.py
@@ -89,7 +89,7 @@ class IArchiveFileSet(Interface):
container=None,
path=None,
sha256=None,
- only_condemned=False,
+ condemned=None,
eager_load=False,
):
"""Get files in an archive.
@@ -101,8 +101,10 @@ class IArchiveFileSet(Interface):
directory is this path.
:param sha256: If not None, return only files with this SHA-256
checksum.
- :param only_condemned: If True, return only files with a
- scheduled_deletion_date set.
+ :param condemned: If True, return only files with a
+ scheduled_deletion_date set; if False, return only files without
+ a scheduled_deletion_date set; if None (the default), return
+ both.
:param eager_load: If True, preload related `LibraryFileAlias` and
`LibraryFileContent` rows.
:return: An iterable of matched files.
diff --git a/lib/lp/soyuz/model/archivefile.py b/lib/lp/soyuz/model/archivefile.py
index 933e6d7..fe4ba4c 100644
--- a/lib/lp/soyuz/model/archivefile.py
+++ b/lib/lp/soyuz/model/archivefile.py
@@ -101,7 +101,7 @@ class ArchiveFileSet:
path=None,
path_parent=None,
sha256=None,
- only_condemned=False,
+ condemned=None,
eager_load=False,
):
"""See `IArchiveFileSet`."""
@@ -126,8 +126,11 @@ class ArchiveFileSet:
LibraryFileContent.sha256 == sha256,
]
)
- if only_condemned:
- clauses.append(ArchiveFile.scheduled_deletion_date != None)
+ if condemned is not None:
+ if condemned:
+ clauses.append(ArchiveFile.scheduled_deletion_date != None)
+ else:
+ clauses.append(ArchiveFile.scheduled_deletion_date == None)
archive_files = IStore(ArchiveFile).find(ArchiveFile, *clauses)
def eager_load(rows):
diff --git a/lib/lp/soyuz/tests/test_archivefile.py b/lib/lp/soyuz/tests/test_archivefile.py
index ca295ad..6e8f819 100644
--- a/lib/lp/soyuz/tests/test_archivefile.py
+++ b/lib/lp/soyuz/tests/test_archivefile.py
@@ -92,7 +92,11 @@ class TestArchiveFile(TestCaseWithFactory):
)
self.assertContentEqual(
[archive_files[0]],
- archive_file_set.getByArchive(archives[0], only_condemned=True),
+ archive_file_set.getByArchive(archives[0], condemned=True),
+ )
+ self.assertContentEqual(
+ [archive_files[1]],
+ archive_file_set.getByArchive(archives[0], condemned=False),
)
self.assertContentEqual(
archive_files[2:], archive_file_set.getByArchive(archives[1])
@@ -115,7 +119,11 @@ class TestArchiveFile(TestCaseWithFactory):
)
self.assertContentEqual(
[archive_files[2]],
- archive_file_set.getByArchive(archives[1], only_condemned=True),
+ archive_file_set.getByArchive(archives[1], condemned=True),
+ )
+ self.assertContentEqual(
+ [archive_files[3]],
+ archive_file_set.getByArchive(archives[1], condemned=False),
)
self.assertContentEqual(
[archive_files[0]],
diff --git a/lib/lp/soyuz/xmlrpc/archive.py b/lib/lp/soyuz/xmlrpc/archive.py
index 7c78ab8..d5e5f69 100644
--- a/lib/lp/soyuz/xmlrpc/archive.py
+++ b/lib/lp/soyuz/xmlrpc/archive.py
@@ -162,7 +162,9 @@ class ArchiveAPI(LaunchpadXMLRPCView):
) -> Optional[str]:
archive_file = (
getUtility(IArchiveFileSet)
- .getByArchive(archive=archive, path=path.as_posix())
+ .getByArchive(
+ archive=archive, path=path.as_posix(), condemned=False
+ )
.one()
)
if archive_file is None:
diff --git a/lib/lp/soyuz/xmlrpc/tests/test_archive.py b/lib/lp/soyuz/xmlrpc/tests/test_archive.py
index 39bb7c6..3d725ce 100644
--- a/lib/lp/soyuz/xmlrpc/tests/test_archive.py
+++ b/lib/lp/soyuz/xmlrpc/tests/test_archive.py
@@ -3,6 +3,8 @@
"""Tests for the internal Soyuz archive API."""
+from datetime import timedelta
+
from fixtures import FakeLogger
from zope.component import getUtility
from zope.security.proxy import removeSecurityProxy
@@ -13,6 +15,7 @@ from lp.services.features.testing import FeatureFixture
from lp.services.macaroons.interfaces import IMacaroonIssuer
from lp.soyuz.enums import ArchiveRepositoryFormat, PackagePublishingStatus
from lp.soyuz.interfaces.archive import NAMED_AUTH_TOKEN_FEATURE_FLAG
+from lp.soyuz.interfaces.archivefile import IArchiveFileSet
from lp.soyuz.xmlrpc.archive import ArchiveAPI
from lp.testing import TestCaseWithFactory, person_logged_in
from lp.testing.layers import LaunchpadFunctionalLayer
@@ -383,38 +386,48 @@ class TestArchiveAPI(TestCaseWithFactory):
def test_translatePath_non_pool_found(self):
archive = removeSecurityProxy(self.factory.makeArchive())
self.factory.makeArchiveFile(archive=archive)
- archive_file = self.factory.makeArchiveFile(archive=archive)
+ path = "dists/focal/InRelease"
+ archive_files = [
+ self.factory.makeArchiveFile(archive=archive, path=path)
+ for _ in range(2)
+ ]
+ getUtility(IArchiveFileSet).scheduleDeletion(
+ [archive_files[0]], timedelta(days=1)
+ )
self.assertEqual(
- archive_file.library_file.getURL(),
- self.archive_api.translatePath(
- archive.reference, archive_file.path
- ),
+ archive_files[1].library_file.getURL(),
+ self.archive_api.translatePath(archive.reference, path),
)
self.assertLogs(
"%s: %s (non-pool) -> LFA %d"
% (
archive.reference,
- archive_file.path,
- archive_file.library_file.id,
+ path,
+ archive_files[1].library_file.id,
)
)
def test_translatePath_non_pool_found_private(self):
archive = removeSecurityProxy(self.factory.makeArchive(private=True))
self.factory.makeArchiveFile(archive=archive)
- archive_file = self.factory.makeArchiveFile(archive=archive)
+ path = "dists/focal/InRelease"
+ archive_files = [
+ self.factory.makeArchiveFile(archive=archive, path=path)
+ for _ in range(2)
+ ]
+ getUtility(IArchiveFileSet).scheduleDeletion(
+ [archive_files[0]], timedelta(days=1)
+ )
self.assertStartsWith(
- archive_file.library_file.getURL() + "?token=",
- self.archive_api.translatePath(
- archive.reference, archive_file.path
- ),
+ archive_files[1].library_file.getURL() + "?token=",
+ self.archive_api.translatePath(archive.reference, path),
)
self.assertLogs(
"%s: %s (non-pool) -> LFA %d"
% (
archive.reference,
- archive_file.path,
- archive_file.library_file.id,
+ path,
+ archive_files[1].library_file.id,
)
)
Follow ups