launchpad-reviewers team mailing list archive
-
launchpad-reviewers team
-
Mailing list archive
-
Message #29624
[Merge] ~cjwatson/launchpad:archive-api-snapshots into launchpad:master
Colin Watson has proposed merging ~cjwatson/launchpad:archive-api-snapshots into launchpad:master with ~cjwatson/launchpad:archive-file-deeper-history as a prerequisite.
Commit message:
Add snapshot handling to ArchiveAPI.translatePath
Requested reviews:
Launchpad code reviewers (launchpad-reviewers)
For more details, see:
https://code.launchpad.net/~cjwatson/launchpad/+git/launchpad/+merge/436591
Now that we record enough history of archive files in the database, we can extend the XML-RPC API to allow querying it. This commit adds an optional `live_at` parameter to `ArchiveAPI.translatePath`; if given, the API will return files as they existed in the archive at that time.
We needed some corresponding additions to internal APIs:
* `Archive.getPoolFileByPath` handles pool files, which are simple: they either exist at a given time or they don't.
* `ArchiveFileSet.getByArchive` handles index files, where there are some subtleties because the same paths have different contents at different times. For the ordinary paths (e.g. `dists/jammy/InRelease`), `ArchiveFile` rows stop holding those paths once they've been superseded; but for the `by-hash` paths (e.g. `dists/jammy/by-hash/SHA256/<sha256>`), `ArchiveFile` rows are valid until they're marked as having been removed. We need separate `live_at` and `existed_at` parameters to capture this distinction.
--
Your team Launchpad code reviewers is requested to review the proposed merge of ~cjwatson/launchpad:archive-api-snapshots into launchpad:master.
diff --git a/lib/lp/soyuz/interfaces/archive.py b/lib/lp/soyuz/interfaces/archive.py
index 37b81a7..ad9fa02 100644
--- a/lib/lp/soyuz/interfaces/archive.py
+++ b/lib/lp/soyuz/interfaces/archive.py
@@ -55,6 +55,8 @@ __all__ = [
import http.client
import re
import typing
+from datetime import datetime
+from pathlib import PurePath
from urllib.parse import urlparse
from lazr.restful.declarations import (
@@ -792,12 +794,16 @@ class IArchiveSubscriberView(Interface):
:return: A collection containing `BinaryPackagePublishingHistory`.
"""
- def getPoolFileByPath(path):
+ def getPoolFileByPath(
+ path: PurePath, live_at: typing.Optional[datetime] = None
+ ):
"""Return the `ILibraryFileAlias` for a path in this archive's pool.
:param path: A `PurePath` for where a source or binary package file
is published in this archive's pool, e.g.
"pool/main/p/package/package_1.dsc".
+ :param live_at: If not None, return only files that existed in the
+ archive at this `datetime`.
:return: An `ILibraryFileAlias`, or None.
"""
diff --git a/lib/lp/soyuz/interfaces/archiveapi.py b/lib/lp/soyuz/interfaces/archiveapi.py
index b8acecf..bc424b3 100644
--- a/lib/lp/soyuz/interfaces/archiveapi.py
+++ b/lib/lp/soyuz/interfaces/archiveapi.py
@@ -8,6 +8,9 @@ __all__ = [
"IArchiveApplication",
]
+from datetime import datetime
+from typing import Optional
+
from zope.interface import Interface
from lp.services.webapp.interfaces import ILaunchpadApplication
@@ -42,12 +45,16 @@ class IArchiveAPI(Interface):
None.
"""
- def translatePath(archive_reference, path):
+ def translatePath(
+ archive_reference: str, path: str, live_at: Optional[datetime] = None
+ ):
"""Find the librarian URL for a relative path within an archive.
:param archive_reference: The reference form of the archive to check.
:param path: The relative path within the archive. This should not
begin with a "/" character.
+ :param live_at: An optional timestamp; if passed, only return paths
+ that existed at this timestamp.
:return: A `NotFound` fault if `archive_reference` does not identify
an archive, or the archive's repository format is something
diff --git a/lib/lp/soyuz/interfaces/archivefile.py b/lib/lp/soyuz/interfaces/archivefile.py
index 6c530b3..3031d12 100644
--- a/lib/lp/soyuz/interfaces/archivefile.py
+++ b/lib/lp/soyuz/interfaces/archivefile.py
@@ -112,7 +112,8 @@ class IArchiveFileSet(Interface):
container=None,
path=None,
sha256=None,
- condemned=None,
+ live_at=None,
+ existed_at=None,
only_published=False,
eager_load=False,
):
@@ -125,14 +126,21 @@ class IArchiveFileSet(Interface):
directory is this path.
:param sha256: If not None, return only files with this SHA-256
checksum.
- :param condemned: If True, return only files with a
- scheduled_deletion_date set; if False, return only files without
- a scheduled_deletion_date set; if None (the default), return
- both.
+ :param live_at: If not None, return only files that held their path
+ in the archive at this `datetime` (or
+ `lp.services.database.constants.UTC_NOW`).
+ :param existed_at: If not None, return only files that existed in
+ the archive at this `datetime` (or
+ `lp.services.database.constants.UTC_NOW`). This includes files
+ that did not hold their path (e.g. `dists/jammy/InRelease`) and
+ that are merely still published in a `by-hash` directory; it
+ should normally be used together with `sha256`.
:param only_published: If True, return only files without a
`date_removed` set.
:param eager_load: If True, preload related `LibraryFileAlias` and
`LibraryFileContent` rows.
+ :raises IncompatibleArguments: if both `live_at` and `existed_at`
+ are specified.
:return: An iterable of matched files.
"""
diff --git a/lib/lp/soyuz/model/archive.py b/lib/lp/soyuz/model/archive.py
index b74321f..867817a 100644
--- a/lib/lp/soyuz/model/archive.py
+++ b/lib/lp/soyuz/model/archive.py
@@ -13,6 +13,7 @@ __all__ = [
import re
import typing
+from datetime import datetime
from operator import attrgetter
from pathlib import PurePath
@@ -2027,7 +2028,7 @@ class Archive(SQLBase):
return archive_file
def getPoolFileByPath(
- self, path: PurePath
+ self, path: PurePath, live_at: typing.Optional[datetime] = None
) -> typing.Optional[LibraryFileAlias]:
"""See `IArchive`."""
try:
@@ -2080,10 +2081,21 @@ class Archive(SQLBase):
xPPH.archive == self,
xPPH.component == Component.id,
xPPH.datepublished != None,
- xPPH.dateremoved == None,
xPF.libraryfile == LibraryFileAlias.id,
]
)
+ if live_at:
+ clauses.extend(
+ [
+ xPPH.datepublished <= live_at,
+ Or(
+ xPPH.dateremoved == None,
+ xPPH.dateremoved > live_at,
+ ),
+ ]
+ )
+ else:
+ clauses.append(xPPH.dateremoved == None)
return (
store.find(LibraryFileAlias, *clauses)
.config(distinct=True)
diff --git a/lib/lp/soyuz/model/archivefile.py b/lib/lp/soyuz/model/archivefile.py
index 08a6bcf..b76cbab 100644
--- a/lib/lp/soyuz/model/archivefile.py
+++ b/lib/lp/soyuz/model/archivefile.py
@@ -12,10 +12,11 @@ import os.path
import re
import pytz
-from storm.locals import DateTime, Int, Reference, Storm, Unicode
+from storm.locals import DateTime, Int, Or, Reference, Storm, Unicode
from zope.component import getUtility
from zope.interface import implementer
+from lp.app.errors import IncompatibleArguments
from lp.services.database.bulk import load_related
from lp.services.database.constants import UTC_NOW
from lp.services.database.decoratedresultset import DecoratedResultSet
@@ -118,7 +119,8 @@ class ArchiveFileSet:
path=None,
path_parent=None,
sha256=None,
- condemned=None,
+ live_at=None,
+ existed_at=None,
only_published=False,
eager_load=False,
):
@@ -144,11 +146,42 @@ class ArchiveFileSet:
LibraryFileContent.sha256 == sha256,
]
)
- if condemned is not None:
- if condemned:
- clauses.append(ArchiveFile.scheduled_deletion_date != None)
- else:
- clauses.append(ArchiveFile.scheduled_deletion_date == None)
+
+ if live_at is not None and existed_at is not None:
+ raise IncompatibleArguments(
+ "You cannot specify both 'live_at' and 'existed_at'."
+ )
+ if live_at is not None:
+ clauses.extend(
+ [
+ Or(
+ # Rows predating the introduction of date_created
+ # will have it set to null.
+ ArchiveFile.date_created == None,
+ ArchiveFile.date_created <= live_at,
+ ),
+ Or(
+ ArchiveFile.date_superseded == None,
+ ArchiveFile.date_superseded > live_at,
+ ),
+ ]
+ )
+ elif existed_at is not None:
+ clauses.extend(
+ [
+ Or(
+ # Rows predating the introduction of date_created
+ # will have it set to null.
+ ArchiveFile.date_created == None,
+ ArchiveFile.date_created <= existed_at,
+ ),
+ Or(
+ ArchiveFile.date_removed == None,
+ ArchiveFile.date_removed > existed_at,
+ ),
+ ]
+ )
+
if only_published:
clauses.append(ArchiveFile.date_removed == None)
archive_files = IStore(ArchiveFile).find(ArchiveFile, *clauses)
diff --git a/lib/lp/soyuz/tests/test_archive.py b/lib/lp/soyuz/tests/test_archive.py
index d366f80..ad2de6b 100644
--- a/lib/lp/soyuz/tests/test_archive.py
+++ b/lib/lp/soyuz/tests/test_archive.py
@@ -3373,6 +3373,65 @@ class TestGetPoolFileByPath(TestCaseWithFactory):
),
)
+ def test_source_live_at(self):
+ now = datetime.now(UTC)
+ archive = self.factory.makeArchive()
+ spph_1 = self.factory.makeSourcePackagePublishingHistory(
+ archive=archive,
+ status=PackagePublishingStatus.DELETED,
+ sourcepackagename="test-package",
+ component="main",
+ version="1",
+ )
+ removeSecurityProxy(spph_1).datepublished = now - timedelta(days=3)
+ removeSecurityProxy(spph_1).dateremoved = now - timedelta(days=1)
+ sprf_1 = self.factory.makeSourcePackageReleaseFile(
+ sourcepackagerelease=spph_1.sourcepackagerelease,
+ library_file=self.factory.makeLibraryFileAlias(
+ filename="test-package_1.dsc", db_only=True
+ ),
+ )
+ spph_2 = self.factory.makeSourcePackagePublishingHistory(
+ archive=archive,
+ status=PackagePublishingStatus.PUBLISHED,
+ sourcepackagename="test-package",
+ component="main",
+ version="2",
+ )
+ removeSecurityProxy(spph_2).datepublished = now - timedelta(days=2)
+ sprf_2 = self.factory.makeSourcePackageReleaseFile(
+ sourcepackagerelease=spph_2.sourcepackagerelease,
+ library_file=self.factory.makeLibraryFileAlias(
+ filename="test-package_2.dsc", db_only=True
+ ),
+ )
+ IStore(archive).flush()
+ for days, expected_file in (
+ (4, None),
+ (3, sprf_1.libraryfile),
+ (2, sprf_1.libraryfile),
+ (1, None),
+ ):
+ self.assertEqual(
+ expected_file,
+ archive.getPoolFileByPath(
+ PurePath("pool/main/t/test-package/test-package_1.dsc"),
+ live_at=now - timedelta(days=days),
+ ),
+ )
+ for days, expected_file in (
+ (3, None),
+ (2, sprf_2.libraryfile),
+ (1, sprf_2.libraryfile),
+ ):
+ self.assertEqual(
+ expected_file,
+ archive.getPoolFileByPath(
+ PurePath("pool/main/t/test-package/test-package_2.dsc"),
+ live_at=now - timedelta(days=days),
+ ),
+ )
+
def test_binary_not_found(self):
archive = self.factory.makeArchive()
self.factory.makeBinaryPackagePublishingHistory(
@@ -3465,6 +3524,69 @@ class TestGetPoolFileByPath(TestCaseWithFactory):
),
)
+ def test_binary_live_at(self):
+ now = datetime.now(UTC)
+ archive = self.factory.makeArchive()
+ bpph_1 = self.factory.makeBinaryPackagePublishingHistory(
+ archive=archive,
+ status=PackagePublishingStatus.DELETED,
+ sourcepackagename="test-package",
+ component="main",
+ version="1",
+ )
+ removeSecurityProxy(bpph_1).datepublished = now - timedelta(days=3)
+ removeSecurityProxy(bpph_1).dateremoved = now - timedelta(days=1)
+ bpf_1 = self.factory.makeBinaryPackageFile(
+ binarypackagerelease=bpph_1.binarypackagerelease,
+ library_file=self.factory.makeLibraryFileAlias(
+ filename="test-package_1_amd64.deb", db_only=True
+ ),
+ )
+ bpph_2 = self.factory.makeBinaryPackagePublishingHistory(
+ archive=archive,
+ status=PackagePublishingStatus.PUBLISHED,
+ sourcepackagename="test-package",
+ component="main",
+ version="2",
+ )
+ removeSecurityProxy(bpph_2).datepublished = now - timedelta(days=2)
+ bpf_2 = self.factory.makeBinaryPackageFile(
+ binarypackagerelease=bpph_2.binarypackagerelease,
+ library_file=self.factory.makeLibraryFileAlias(
+ filename="test-package_2_amd64.deb", db_only=True
+ ),
+ )
+ IStore(archive).flush()
+ for days, expected_file in (
+ (4, None),
+ (3, bpf_1.libraryfile),
+ (2, bpf_1.libraryfile),
+ (1, None),
+ ):
+ self.assertEqual(
+ expected_file,
+ archive.getPoolFileByPath(
+ PurePath(
+ "pool/main/t/test-package/test-package_1_amd64.deb"
+ ),
+ live_at=now - timedelta(days=days),
+ ),
+ )
+ for days, expected_file in (
+ (3, None),
+ (2, bpf_2.libraryfile),
+ (1, bpf_2.libraryfile),
+ ):
+ self.assertEqual(
+ expected_file,
+ archive.getPoolFileByPath(
+ PurePath(
+ "pool/main/t/test-package/test-package_2_amd64.deb"
+ ),
+ live_at=now - timedelta(days=days),
+ ),
+ )
+
class TestGetPublishedSources(TestCaseWithFactory):
diff --git a/lib/lp/soyuz/tests/test_archivefile.py b/lib/lp/soyuz/tests/test_archivefile.py
index c4e2a57..9ee7a88 100644
--- a/lib/lp/soyuz/tests/test_archivefile.py
+++ b/lib/lp/soyuz/tests/test_archivefile.py
@@ -4,14 +4,17 @@
"""ArchiveFile tests."""
import os
-from datetime import timedelta
+from datetime import datetime, timedelta
+import pytz
import transaction
from storm.store import Store
from testtools.matchers import AfterPreprocessing, Equals, Is, MatchesStructure
from zope.component import getUtility
from zope.security.proxy import removeSecurityProxy
+from lp.app.errors import IncompatibleArguments
+from lp.services.database.constants import UTC_NOW
from lp.services.database.sqlbase import (
flush_database_caches,
get_transaction_timestamp,
@@ -84,13 +87,8 @@ class TestArchiveFile(TestCaseWithFactory):
def test_getByArchive(self):
archives = [self.factory.makeArchive(), self.factory.makeArchive()]
archive_files = []
- now = get_transaction_timestamp(Store.of(archives[0]))
for archive in archives:
- archive_files.append(
- self.factory.makeArchiveFile(
- archive=archive, scheduled_deletion_date=now
- )
- )
+ archive_files.append(self.factory.makeArchiveFile(archive=archive))
archive_files.append(
self.factory.makeArchiveFile(archive=archive, container="foo")
)
@@ -115,14 +113,6 @@ class TestArchiveFile(TestCaseWithFactory):
[], archive_file_set.getByArchive(archives[0], path="other")
)
self.assertContentEqual(
- [archive_files[0]],
- archive_file_set.getByArchive(archives[0], condemned=True),
- )
- self.assertContentEqual(
- [archive_files[1]],
- archive_file_set.getByArchive(archives[0], condemned=False),
- )
- self.assertContentEqual(
archive_files[2:], archive_file_set.getByArchive(archives[1])
)
self.assertContentEqual(
@@ -142,14 +132,6 @@ class TestArchiveFile(TestCaseWithFactory):
[], archive_file_set.getByArchive(archives[1], path="other")
)
self.assertContentEqual(
- [archive_files[2]],
- archive_file_set.getByArchive(archives[1], condemned=True),
- )
- self.assertContentEqual(
- [archive_files[3]],
- archive_file_set.getByArchive(archives[1], condemned=False),
- )
- self.assertContentEqual(
[archive_files[0]],
archive_file_set.getByArchive(
archives[0],
@@ -186,6 +168,126 @@ class TestArchiveFile(TestCaseWithFactory):
archive_file_set.getByArchive(archive, path_parent="dists/xenial"),
)
+ def test_getByArchive_both_live_at_and_existed_at(self):
+ now = datetime.now(pytz.UTC)
+ archive = self.factory.makeArchive()
+ self.assertRaisesWithContent(
+ IncompatibleArguments,
+ "You cannot specify both 'live_at' and 'existed_at'.",
+ getUtility(IArchiveFileSet).getByArchive,
+ archive,
+ live_at=now,
+ existed_at=now,
+ )
+
+ def test_getByArchive_live_at(self):
+ archive = self.factory.makeArchive()
+ now = get_transaction_timestamp(Store.of(archive))
+ archive_file_1 = self.factory.makeArchiveFile(
+ archive=archive, path="dists/jammy/InRelease"
+ )
+ naked_archive_file_1 = removeSecurityProxy(archive_file_1)
+ naked_archive_file_1.date_created = now - timedelta(days=3)
+ naked_archive_file_1.date_superseded = now - timedelta(days=1)
+ archive_file_2 = self.factory.makeArchiveFile(
+ archive=archive, path="dists/jammy/InRelease"
+ )
+ naked_archive_file_2 = removeSecurityProxy(archive_file_2)
+ naked_archive_file_2.date_created = now - timedelta(days=1)
+ archive_file_set = getUtility(IArchiveFileSet)
+ for days, expected_file in (
+ (4, None),
+ (3, archive_file_1),
+ (2, archive_file_1),
+ (1, archive_file_2),
+ (0, archive_file_2),
+ ):
+ self.assertEqual(
+ expected_file,
+ archive_file_set.getByArchive(
+ archive,
+ path="dists/jammy/InRelease",
+ live_at=now - timedelta(days=days) if days else UTC_NOW,
+ ).one(),
+ )
+
+ def test_getByArchive_live_at_without_date_created(self):
+ archive = self.factory.makeArchive()
+ now = get_transaction_timestamp(Store.of(archive))
+ archive_file = self.factory.makeArchiveFile(
+ archive=archive, path="dists/jammy/InRelease"
+ )
+ naked_archive_file = removeSecurityProxy(archive_file)
+ naked_archive_file.date_created = None
+ naked_archive_file.date_superseded = now
+ archive_file_set = getUtility(IArchiveFileSet)
+ for days, expected_file in ((1, archive_file), (0, None)):
+ self.assertEqual(
+ expected_file,
+ archive_file_set.getByArchive(
+ archive,
+ path="dists/jammy/InRelease",
+ live_at=now - timedelta(days=days) if days else UTC_NOW,
+ ).one(),
+ )
+
+ def test_getByArchive_existed_at(self):
+ archive = self.factory.makeArchive()
+ now = get_transaction_timestamp(Store.of(archive))
+ archive_file_1 = self.factory.makeArchiveFile(
+ archive=archive, path="dists/jammy/InRelease"
+ )
+ naked_archive_file_1 = removeSecurityProxy(archive_file_1)
+ naked_archive_file_1.date_created = now - timedelta(days=3)
+ naked_archive_file_1.date_superseded = now - timedelta(days=2)
+ naked_archive_file_1.date_removed = now - timedelta(days=1)
+ archive_file_2 = self.factory.makeArchiveFile(
+ archive=archive, path="dists/jammy/InRelease"
+ )
+ naked_archive_file_2 = removeSecurityProxy(archive_file_2)
+ naked_archive_file_2.date_created = now - timedelta(days=2)
+ archive_file_set = getUtility(IArchiveFileSet)
+ for days, existed in ((4, False), (3, True), (2, True), (1, False)):
+ self.assertEqual(
+ archive_file_1 if existed else None,
+ archive_file_set.getByArchive(
+ archive,
+ path="dists/jammy/InRelease",
+ sha256=archive_file_1.library_file.content.sha256,
+ existed_at=now - timedelta(days=days),
+ ).one(),
+ )
+ for days, existed in ((3, False), (2, True), (1, True), (0, True)):
+ self.assertEqual(
+ archive_file_2 if existed else None,
+ archive_file_set.getByArchive(
+ archive,
+ path="dists/jammy/InRelease",
+ sha256=archive_file_2.library_file.content.sha256,
+ existed_at=now - timedelta(days=days) if days else UTC_NOW,
+ ).one(),
+ )
+
+ def test_getByArchive_existed_at_without_date_created(self):
+ archive = self.factory.makeArchive()
+ now = get_transaction_timestamp(Store.of(archive))
+ archive_file = self.factory.makeArchiveFile(
+ archive=archive, path="dists/jammy/InRelease"
+ )
+ naked_archive_file = removeSecurityProxy(archive_file)
+ naked_archive_file.date_created = None
+ naked_archive_file.date_removed = now
+ archive_file_set = getUtility(IArchiveFileSet)
+ for days, expected_file in ((1, archive_file), (0, None)):
+ self.assertEqual(
+ expected_file,
+ archive_file_set.getByArchive(
+ archive,
+ path="dists/jammy/InRelease",
+ existed_at=now - timedelta(days=days) if days else UTC_NOW,
+ ).one(),
+ )
+
def test_scheduleDeletion(self):
archive_files = [self.factory.makeArchiveFile() for _ in range(3)]
getUtility(IArchiveFileSet).scheduleDeletion(
diff --git a/lib/lp/soyuz/xmlrpc/archive.py b/lib/lp/soyuz/xmlrpc/archive.py
index d5e5f69..61546b3 100644
--- a/lib/lp/soyuz/xmlrpc/archive.py
+++ b/lib/lp/soyuz/xmlrpc/archive.py
@@ -8,6 +8,7 @@ __all__ = [
]
import logging
+from datetime import datetime
from pathlib import PurePath
from typing import Optional, Union
from xmlrpc.client import Fault
@@ -18,6 +19,7 @@ from zope.interface import implementer
from zope.interface.interfaces import ComponentLookupError
from zope.security.proxy import removeSecurityProxy
+from lp.services.database.constants import UTC_NOW
from lp.services.macaroons.interfaces import NO_USER, IMacaroonIssuer
from lp.services.webapp import LaunchpadXMLRPCView
from lp.soyuz.enums import ArchiveRepositoryFormat
@@ -126,7 +128,11 @@ class ArchiveAPI(LaunchpadXMLRPCView):
)
def _translatePathByHash(
- self, archive_reference: str, archive, path: PurePath
+ self,
+ archive_reference: str,
+ archive,
+ path: PurePath,
+ existed_at: Optional[datetime],
) -> Optional[str]:
suite = path.parts[1]
checksum_type = path.parts[-2]
@@ -143,6 +149,7 @@ class ArchiveAPI(LaunchpadXMLRPCView):
container="release:%s" % suite,
path_parent="/".join(path.parts[:-3]),
sha256=checksum,
+ existed_at=UTC_NOW if existed_at is None else existed_at,
)
.any()
)
@@ -150,20 +157,27 @@ class ArchiveAPI(LaunchpadXMLRPCView):
return None
log.info(
- "%s: %s (by-hash) -> LFA %d",
+ "%s: %s (by-hash)%s -> LFA %d",
archive_reference,
path.as_posix(),
+ "" if existed_at is None else " at %s" % existed_at.isoformat(),
archive_file.library_file.id,
)
return archive_file.library_file.getURL(include_token=True)
def _translatePathNonPool(
- self, archive_reference: str, archive, path: PurePath
+ self,
+ archive_reference: str,
+ archive,
+ path: PurePath,
+ live_at: Optional[datetime],
) -> Optional[str]:
archive_file = (
getUtility(IArchiveFileSet)
.getByArchive(
- archive=archive, path=path.as_posix(), condemned=False
+ archive=archive,
+ path=path.as_posix(),
+ live_at=UTC_NOW if live_at is None else live_at,
)
.one()
)
@@ -171,30 +185,41 @@ class ArchiveAPI(LaunchpadXMLRPCView):
return None
log.info(
- "%s: %s (non-pool) -> LFA %d",
+ "%s: %s (non-pool)%s -> LFA %d",
archive_reference,
path.as_posix(),
+ "" if live_at is None else " at %s" % live_at.isoformat(),
archive_file.library_file.id,
)
return archive_file.library_file.getURL(include_token=True)
def _translatePathPool(
- self, archive_reference: str, archive, path: PurePath
+ self,
+ archive_reference: str,
+ archive,
+ path: PurePath,
+ live_at: Optional[datetime],
) -> Optional[str]:
- lfa = archive.getPoolFileByPath(path)
+ lfa = archive.getPoolFileByPath(path, live_at=live_at)
if lfa is None:
return None
log.info(
- "%s: %s (pool) -> LFA %d",
+ "%s: %s (pool)%s -> LFA %d",
archive_reference,
path.as_posix(),
+ "" if live_at is None else " at %s" % live_at.isoformat(),
lfa.id,
)
return lfa.getURL(include_token=True)
@return_fault
- def _translatePath(self, archive_reference: str, path: PurePath) -> str:
+ def _translatePath(
+ self,
+ archive_reference: str,
+ path: PurePath,
+ live_at: Optional[datetime],
+ ) -> str:
archive = getUtility(IArchiveSet).getByReference(archive_reference)
if archive is None:
log.info("%s: No archive found", archive_reference)
@@ -212,38 +237,60 @@ class ArchiveAPI(LaunchpadXMLRPCView):
message="Can't translate paths in '%s' with format %s."
% (archive_reference, archive.repository_format)
)
+ live_at_message = (
+ "" if live_at is None else " at %s" % live_at.isoformat()
+ )
# Consider by-hash index files.
if path.parts[0] == "dists" and path.parts[2:][-3:-2] == ("by-hash",):
- url = self._translatePathByHash(archive_reference, archive, path)
+ url = self._translatePathByHash(
+ archive_reference, archive, path, live_at
+ )
if url is not None:
return url
# Consider other non-pool files.
if path.parts[0] != "pool":
- url = self._translatePathNonPool(archive_reference, archive, path)
+ url = self._translatePathNonPool(
+ archive_reference, archive, path, live_at
+ )
if url is not None:
return url
- log.info("%s: %s not found", archive_reference, path.as_posix())
+ log.info(
+ "%s: %s not found%s",
+ archive_reference,
+ path.as_posix(),
+ live_at_message,
+ )
raise faults.NotFound(
- message="'%s' not found in '%s'."
- % (path.as_posix(), archive_reference)
+ message="'%s' not found in '%s'%s."
+ % (path.as_posix(), archive_reference, live_at_message)
)
# Consider pool files.
- url = self._translatePathPool(archive_reference, archive, path)
+ url = self._translatePathPool(
+ archive_reference, archive, path, live_at
+ )
if url is not None:
return url
- log.info("%s: %s not found", archive_reference, path.as_posix())
+ log.info(
+ "%s: %s not found%s",
+ archive_reference,
+ path.as_posix(),
+ live_at_message,
+ )
raise faults.NotFound(
- message="'%s' not found in '%s'."
- % (path.as_posix(), archive_reference)
+ message="'%s' not found in '%s'%s."
+ % (path.as_posix(), archive_reference, live_at_message)
)
def translatePath(
- self, archive_reference: str, path: str
+ self,
+ archive_reference: str,
+ path: str,
+ live_at: Optional[datetime] = None,
) -> Union[str, Fault]:
"""See `IArchiveAPI`."""
# This thunk exists because you can't use a decorated function as
# the implementation of a method exported over XML-RPC.
- return self._translatePath(archive_reference, PurePath(path))
+ return self._translatePath(archive_reference, PurePath(path), live_at)
diff --git a/lib/lp/soyuz/xmlrpc/tests/test_archive.py b/lib/lp/soyuz/xmlrpc/tests/test_archive.py
index 3d725ce..e549279 100644
--- a/lib/lp/soyuz/xmlrpc/tests/test_archive.py
+++ b/lib/lp/soyuz/xmlrpc/tests/test_archive.py
@@ -3,8 +3,9 @@
"""Tests for the internal Soyuz archive API."""
-from datetime import timedelta
+from datetime import datetime, timedelta
+import pytz
from fixtures import FakeLogger
from zope.component import getUtility
from zope.security.proxy import removeSecurityProxy
@@ -372,6 +373,53 @@ class TestArchiveAPI(TestCaseWithFactory):
% (archive.reference, path, archive_file.library_file.id)
)
+ def test_translatePath_by_hash_live_at(self):
+ now = datetime.now(pytz.UTC)
+ archive = removeSecurityProxy(self.factory.makeArchive(private=True))
+ archive_file = self.factory.makeArchiveFile(
+ archive=archive,
+ container="release:jammy",
+ path="dists/jammy/InRelease",
+ )
+ naked_archive_file = removeSecurityProxy(archive_file)
+ naked_archive_file.date_created = now - timedelta(days=3)
+ naked_archive_file.date_superseded = now - timedelta(days=2)
+ naked_archive_file.date_removed = now - timedelta(days=1)
+ path = (
+ "dists/jammy/by-hash/SHA256/%s"
+ % archive_file.library_file.content.sha256
+ )
+ for days, expected in ((4, False), (3, True), (2, True), (1, False)):
+ self.logger = self.useFixture(FakeLogger())
+ live_at = now - timedelta(days=days)
+ if expected:
+ self.assertEqual(
+ archive_file.library_file.getURL(),
+ self.archive_api.translatePath(
+ archive.reference, path, live_at=live_at
+ ),
+ )
+ self.assertLogs(
+ "%s: %s (by-hash) at %s -> LFA %d"
+ % (
+ archive.reference,
+ path,
+ live_at.isoformat(),
+ archive_file.library_file.id,
+ )
+ )
+ else:
+ self.assertNotFound(
+ "translatePath",
+ "'%s' not found in '%s' at %s."
+ % (path, archive.reference, live_at.isoformat()),
+ "%s: %s not found at %s"
+ % (archive.reference, path, live_at.isoformat()),
+ archive.reference,
+ path,
+ live_at=live_at,
+ )
+
def test_translatePath_non_pool_not_found(self):
archive = removeSecurityProxy(self.factory.makeArchive())
self.factory.makeArchiveFile(archive=archive)
@@ -522,6 +570,56 @@ class TestArchiveAPI(TestCaseWithFactory):
% (archive.reference, path, sprf.libraryfile.id)
)
+ def test_translatePath_pool_source_live_at(self):
+ now = datetime.now(pytz.UTC)
+ archive = removeSecurityProxy(self.factory.makeArchive())
+ spph = self.factory.makeSourcePackagePublishingHistory(
+ archive=archive,
+ status=PackagePublishingStatus.PUBLISHED,
+ sourcepackagename="test-package",
+ component="main",
+ )
+ removeSecurityProxy(spph).datepublished = now - timedelta(days=2)
+ removeSecurityProxy(spph).dateremoved = now - timedelta(days=1)
+ sprf = self.factory.makeSourcePackageReleaseFile(
+ sourcepackagerelease=spph.sourcepackagerelease,
+ library_file=self.factory.makeLibraryFileAlias(
+ filename="test-package_1.dsc", db_only=True
+ ),
+ )
+ IStore(sprf).flush()
+ path = "pool/main/t/test-package/test-package_1.dsc"
+ for days, expected in ((3, False), (2, True), (1, False)):
+ self.logger = self.useFixture(FakeLogger())
+ live_at = now - timedelta(days=days)
+ if expected:
+ self.assertEqual(
+ sprf.libraryfile.getURL(),
+ self.archive_api.translatePath(
+ archive.reference, path, live_at=live_at
+ ),
+ )
+ self.assertLogs(
+ "%s: %s (pool) at %s -> LFA %d"
+ % (
+ archive.reference,
+ path,
+ live_at.isoformat(),
+ sprf.libraryfile.id,
+ )
+ )
+ else:
+ self.assertNotFound(
+ "translatePath",
+ "'%s' not found in '%s' at %s."
+ % (path, archive.reference, live_at.isoformat()),
+ "%s: %s not found at %s"
+ % (archive.reference, path, live_at.isoformat()),
+ archive.reference,
+ path,
+ live_at=live_at,
+ )
+
def test_translatePath_pool_binary_not_found(self):
archive = removeSecurityProxy(self.factory.makeArchive())
self.factory.makeBinaryPackagePublishingHistory(
@@ -613,3 +711,53 @@ class TestArchiveAPI(TestCaseWithFactory):
"%s: %s (pool) -> LFA %d"
% (archive.reference, path, bpf.libraryfile.id)
)
+
+ def test_translatePath_pool_binary_live_at(self):
+ now = datetime.now(pytz.UTC)
+ archive = removeSecurityProxy(self.factory.makeArchive())
+ bpph = self.factory.makeBinaryPackagePublishingHistory(
+ archive=archive,
+ status=PackagePublishingStatus.PUBLISHED,
+ sourcepackagename="test-package",
+ component="main",
+ )
+ removeSecurityProxy(bpph).datepublished = now - timedelta(days=2)
+ removeSecurityProxy(bpph).dateremoved = now - timedelta(days=1)
+ bpf = self.factory.makeBinaryPackageFile(
+ binarypackagerelease=bpph.binarypackagerelease,
+ library_file=self.factory.makeLibraryFileAlias(
+ filename="test-package_1_amd64.deb", db_only=True
+ ),
+ )
+ IStore(bpf).flush()
+ path = "pool/main/t/test-package/test-package_1_amd64.deb"
+ for days, expected in ((3, False), (2, True), (1, False)):
+ self.logger = self.useFixture(FakeLogger())
+ live_at = now - timedelta(days=days)
+ if expected:
+ self.assertEqual(
+ bpf.libraryfile.getURL(),
+ self.archive_api.translatePath(
+ archive.reference, path, live_at=live_at
+ ),
+ )
+ self.assertLogs(
+ "%s: %s (pool) at %s -> LFA %d"
+ % (
+ archive.reference,
+ path,
+ live_at.isoformat(),
+ bpf.libraryfile.id,
+ )
+ )
+ else:
+ self.assertNotFound(
+ "translatePath",
+ "'%s' not found in '%s' at %s."
+ % (path, archive.reference, live_at.isoformat()),
+ "%s: %s not found at %s"
+ % (archive.reference, path, live_at.isoformat()),
+ archive.reference,
+ path,
+ live_at=live_at,
+ )
Follow ups