launchpad-reviewers team mailing list archive
-
launchpad-reviewers team
-
Mailing list archive
-
Message #04748
[Merge] lp:~wgrant/launchpad/dspc-out-of-distroseries into lp:launchpad
William Grant has proposed merging lp:~wgrant/launchpad/dspc-out-of-distroseries into lp:launchpad.
Requested reviews:
Launchpad code reviewers (launchpad-reviewers)
For more details, see:
https://code.launchpad.net/~wgrant/launchpad/dspc-out-of-distroseries/+merge/72674
DistroSeriesPackageCache has a couple of hundred lines of maintenance methods on DistroSeries, which is already an intimidatingly large class. They're only used by update-pkgcache.py and tests, so they're largely clutter and would be easier to manage elsewhere.
This branch moves them onto the DistroSeriesPackageCache class itself, shrinking DistroSeries by nearly 10%.
--
https://code.launchpad.net/~wgrant/launchpad/dspc-out-of-distroseries/+merge/72674
Your team Launchpad code reviewers is requested to review the proposed merge of lp:~wgrant/launchpad/dspc-out-of-distroseries into lp:launchpad.
=== modified file 'cronscripts/update-pkgcache.py'
--- cronscripts/update-pkgcache.py 2010-11-08 12:52:43 +0000
+++ cronscripts/update-pkgcache.py 2011-08-24 06:27:35 +0000
@@ -15,6 +15,7 @@
from lp.registry.interfaces.distribution import IDistributionSet
from lp.services.scripts.base import LaunchpadCronScript
+from lp.soyuz.model.distroseriespackagecache import DistroSeriesPackageCache
class PackageCacheUpdater(LaunchpadCronScript):
@@ -63,10 +64,11 @@
distroseries.distribution.name, distroseries.name,
archive.displayname))
- distroseries.removeOldCacheItems(archive=archive, log=self.logger)
+ DistroSeriesPackageCache.removeOld(
+ distroseries, archive=archive, log=self.logger)
- updates = distroseries.updateCompletePackageCache(
- archive=archive, ztm=self.txn, log=self.logger)
+ updates = DistroSeriesPackageCache.updateAll(
+ distroseries, archive=archive, ztm=self.txn, log=self.logger)
if updates > 0:
self.txn.commit()
@@ -102,4 +104,3 @@
script = PackageCacheUpdater(
'update-cache', dbuser="update-pkg-cache")
script.lock_and_run()
-
=== modified file 'lib/lp/registry/interfaces/distroseries.py'
--- lib/lp/registry/interfaces/distroseries.py 2011-08-23 18:45:37 +0000
+++ lib/lp/registry/interfaces/distroseries.py 2011-08-24 06:27:35 +0000
@@ -739,49 +739,6 @@
distribution 'main_archive'.
"""
- def getBinaryPackageCaches(archive=None):
- """All of the cached binary package records for this distroseries.
-
- If 'archive' is not given it will return all caches stored for the
- distroseries main archives (PRIMARY and PARTNER).
- """
-
- def removeOldCacheItems(archive, log):
- """Delete any records that are no longer applicable.
-
- Consider all binarypackages marked as REMOVED.
-
- Also purges all existing cache records for disabled archives.
-
- :param archive: target `IArchive`.
- :param log: the context logger object able to print DEBUG level
- messages.
- """
-
- def updateCompletePackageCache(archive, log, ztm, commit_chunk=500):
- """Update the binary package cache
-
- Consider all binary package names published in this distro series
- and entirely skips updates for disabled archives
-
- :param archive: target `IArchive`;
- :param log: logger object for printing debug level information;
- :param ztm: transaction used for partial commits, every chunk of
- 'commit_chunk' updates is committed;
- :param commit_chunk: number of updates before commit, defaults to 500.
-
- :return the number of packages updated.
- """
-
- def updatePackageCache(binarypackagename, archive, log):
- """Update the package cache for a given IBinaryPackageName
-
- 'log' is required, it should be a logger object able to print
- DEBUG level messages.
- 'ztm' is the current trasaction manager used for partial commits
- (in full batches of 100 elements)
- """
-
def searchPackages(text):
"""Search through the packge cache for this distroseries and return
DistroSeriesBinaryPackage objects that match the given text.
=== modified file 'lib/lp/registry/model/distroseries.py'
--- lib/lp/registry/model/distroseries.py 2011-08-22 11:39:17 +0000
+++ lib/lp/registry/model/distroseries.py 2011-08-24 06:27:35 +0000
@@ -157,7 +157,6 @@
ISourcePackageFormatSelectionSet,
)
from lp.soyuz.model.binarypackagename import BinaryPackageName
-from lp.soyuz.model.binarypackagerelease import BinaryPackageRelease
from lp.soyuz.model.component import Component
from lp.soyuz.model.distroarchseries import (
DistroArchSeries,
@@ -1341,139 +1340,6 @@
return section
raise NotFoundError(name)
- def getBinaryPackageCaches(self, archive=None):
- """See `IDistroSeries`."""
- if archive is not None:
- archives = [archive.id]
- else:
- archives = self.distribution.all_distro_archive_ids
-
- caches = DistroSeriesPackageCache.select("""
- distroseries = %s AND
- archive IN %s
- """ % sqlvalues(self, archives),
- orderBy="name")
-
- return caches
-
- def removeOldCacheItems(self, archive, log):
- """See `IDistroSeries`."""
-
- # get the set of package names that should be there
- bpns = set(BinaryPackageName.select("""
- BinaryPackagePublishingHistory.distroarchseries =
- DistroArchSeries.id AND
- DistroArchSeries.distroseries = %s AND
- Archive.id = %s AND
- BinaryPackagePublishingHistory.archive = Archive.id AND
- BinaryPackagePublishingHistory.binarypackagerelease =
- BinaryPackageRelease.id AND
- BinaryPackageRelease.binarypackagename =
- BinaryPackageName.id AND
- BinaryPackagePublishingHistory.dateremoved is NULL AND
- Archive.enabled = TRUE
- """ % sqlvalues(self, archive),
- distinct=True,
- clauseTables=[
- 'Archive',
- 'DistroArchSeries',
- 'BinaryPackagePublishingHistory',
- 'BinaryPackageRelease']))
-
- # remove the cache entries for binary packages we no longer want
- for cache in self.getBinaryPackageCaches(archive):
- if cache.binarypackagename not in bpns:
- log.debug(
- "Removing binary cache for '%s' (%s)"
- % (cache.name, cache.id))
- cache.destroySelf()
-
- def updateCompletePackageCache(self, archive, log, ztm, commit_chunk=500):
- """See `IDistroSeries`."""
- # Do not create cache entries for disabled archives.
- if not archive.enabled:
- return
-
- # Get the set of package names to deal with.
- bpns = IStore(BinaryPackageName).find(
- BinaryPackageName,
- DistroArchSeries.distroseries == self,
- BinaryPackagePublishingHistory.distroarchseriesID ==
- DistroArchSeries.id,
- BinaryPackagePublishingHistory.archive == archive,
- BinaryPackagePublishingHistory.binarypackagereleaseID ==
- BinaryPackageRelease.id,
- BinaryPackageRelease.binarypackagename == BinaryPackageName.id,
- BinaryPackagePublishingHistory.dateremoved == None).config(
- distinct=True).order_by(BinaryPackageName.name)
-
- number_of_updates = 0
- chunk_size = 0
- for bpn in bpns:
- log.debug("Considering binary '%s'" % bpn.name)
- self.updatePackageCache(bpn, archive, log)
- number_of_updates += 1
- chunk_size += 1
- if chunk_size == commit_chunk:
- chunk_size = 0
- log.debug("Committing")
- ztm.commit()
-
- return number_of_updates
-
- def updatePackageCache(self, binarypackagename, archive, log):
- """See `IDistroSeries`."""
-
- # get the set of published binarypackagereleases
- bprs = IStore(BinaryPackageRelease).find(
- BinaryPackageRelease,
- BinaryPackageRelease.binarypackagename == binarypackagename,
- BinaryPackageRelease.id ==
- BinaryPackagePublishingHistory.binarypackagereleaseID,
- BinaryPackagePublishingHistory.distroarchseriesID ==
- DistroArchSeries.id,
- DistroArchSeries.distroseries == self,
- BinaryPackagePublishingHistory.archive == archive,
- BinaryPackagePublishingHistory.dateremoved == None)
- bprs = bprs.order_by(Desc(BinaryPackageRelease.datecreated))
- bprs = bprs.config(distinct=True)
-
- if bprs.count() == 0:
- log.debug("No binary releases found.")
- return
-
- # find or create the cache entry
- cache = DistroSeriesPackageCache.selectOne("""
- distroseries = %s AND
- archive = %s AND
- binarypackagename = %s
- """ % sqlvalues(self, archive, binarypackagename))
- if cache is None:
- log.debug("Creating new binary cache entry.")
- cache = DistroSeriesPackageCache(
- archive=archive,
- distroseries=self,
- binarypackagename=binarypackagename)
-
- # make sure the cached name, summary and description are correct
- cache.name = binarypackagename.name
- cache.summary = bprs[0].summary
- cache.description = bprs[0].description
-
- # get the sets of binary package summaries, descriptions. there is
- # likely only one, but just in case...
-
- summaries = set()
- descriptions = set()
- for bpr in bprs:
- log.debug("Considering binary version %s" % bpr.version)
- summaries.add(bpr.summary)
- descriptions.add(bpr.description)
-
- # and update the caches
- cache.summaries = ' '.join(sorted(summaries))
- cache.descriptions = ' '.join(sorted(descriptions))
-
def searchPackages(self, text):
"""See `IDistroSeries`."""
=== modified file 'lib/lp/soyuz/doc/distroarchseriesbinarypackage.txt'
--- lib/lp/soyuz/doc/distroarchseriesbinarypackage.txt 2011-03-23 16:28:51 +0000
+++ lib/lp/soyuz/doc/distroarchseriesbinarypackage.txt 2011-08-24 06:27:35 +0000
@@ -209,8 +209,10 @@
DEBUG Considering source 'pmount'
...
- >>> warty.updateCompletePackageCache(
- ... archive=cprov.archive, ztm=LaunchpadZopelessLayer.txn,
+ >>> from lp.soyuz.model.distroseriespackagecache import (
+ ... DistroSeriesPackageCache)
+ >>> DistroSeriesPackageCache.updateAll(
+ ... warty, archive=cprov.archive, ztm=LaunchpadZopelessLayer.txn,
... log=FakeLogger())
DEBUG Considering binary 'mozilla-firefox'
...
=== modified file 'lib/lp/soyuz/doc/package-cache.txt'
--- lib/lp/soyuz/doc/package-cache.txt 2010-12-22 20:46:21 +0000
+++ lib/lp/soyuz/doc/package-cache.txt 2011-08-24 06:27:35 +0000
@@ -58,8 +58,10 @@
* fti, a tsvector generated on insert or update with the text indexes
for name, summary, description, summaries and descriptions.
+ >>> from lp.soyuz.model.distroseriespackagecache import (
+ ... DistroSeriesPackageCache)
>>> warty = ubuntu['warty']
- >>> warty_caches = warty.getBinaryPackageCaches()
+ >>> warty_caches = DistroSeriesPackageCache._find(warty)
>>> warty_caches.count()
5
>>> for name in sorted([cache.name for cache in warty_caches]):
@@ -224,7 +226,8 @@
Binary cache updates are driven by distroseries, IDistroSeries
instance offers a method for removing obsolete records in cache:
- >>> warty.removeOldCacheItems(archive=ubuntu.main_archive, log=FakeLogger())
+ >>> DistroSeriesPackageCache.removeOld(
+ ... warty, archive=ubuntu.main_archive, log=FakeLogger())
DEBUG Removing binary cache for 'foobar' (8)
>>> transaction.commit()
@@ -257,8 +260,9 @@
We can invoke the cache updater directly on IDistroSeries:
- >>> updates = warty.updateCompletePackageCache(
- ... archive=ubuntu.main_archive, ztm=transaction, log=FakeLogger())
+ >>> updates = DistroSeriesPackageCache.updateAll(
+ ... warty, archive=ubuntu.main_archive, ztm=transaction,
+ ... log=FakeLogger())
DEBUG Considering binary 'at'
...
DEBUG Considering binary 'cdrkit'
@@ -359,8 +363,9 @@
DEBUG Considering source 'pmount'
...
- >>> binary_updates = warty.updateCompletePackageCache(
- ... archive=cprov.archive, ztm=transaction, log=FakeLogger())
+ >>> binary_updates = DistroSeriesPackageCache.updateAll(
+ ... warty, archive=cprov.archive, ztm=transaction,
+ ... log=FakeLogger())
DEBUG Considering binary 'mozilla-firefox'
...
@@ -703,9 +708,10 @@
... ubuntu.updateCompleteSourcePackageCache(
... archive=archive, ztm=transaction, log=logger)
... for series in ubuntu.series:
- ... series.removeOldCacheItems(archive=archive, log=logger)
- ... series.updateCompletePackageCache(
- ... archive=archive, ztm=transaction, log=logger)
+ ... DistroSeriesPackageCache.removeOld(
+ ... series, archive=archive, log=logger)
+ ... DistroSeriesPackageCache.updateAll(
+ ... series, archive=archive, ztm=transaction, log=logger)
... archive.updateArchiveCache()
>>> def print_caches(archive):
... source_caches = DistributionSourcePackageCache.selectBy(
=== modified file 'lib/lp/soyuz/model/archive.py'
--- lib/lp/soyuz/model/archive.py 2011-08-02 13:42:46 +0000
+++ lib/lp/soyuz/model/archive.py 2011-08-24 06:27:35 +0000
@@ -189,7 +189,6 @@
from lp.soyuz.model.distributionsourcepackagecache import (
DistributionSourcePackageCache,
)
-from lp.soyuz.model.distroseriespackagecache import DistroSeriesPackageCache
from lp.soyuz.model.files import (
BinaryPackageFile,
SourcePackageReleaseFile,
@@ -910,6 +909,8 @@
def updateArchiveCache(self):
"""See `IArchive`."""
+ from lp.soyuz.model.distroseriespackagecache import (
+ DistroSeriesPackageCache)
# Compiled regexp to remove puntication.
clean_text = re.compile('(,|;|:|\.|\?|!)')
@@ -1541,7 +1542,7 @@
"""Prevent copyPackage(s) if these conditions are not met."""
if not getFeatureFlag(u"soyuz.copypackage.enabled"):
raise ForbiddenByFeatureFlag
- if (self.is_ppa and
+ if (self.is_ppa and
not getFeatureFlag(u"soyuz.copypackageppa.enabled")):
# We have no way of giving feedback about failed jobs yet,
# so this is disabled for now.
@@ -1959,7 +1960,7 @@
commercial = getUtility(ILaunchpadCelebrities).commercial_admin
admin = getUtility(ILaunchpadCelebrities).admin
if not person.inTeam(commercial) and not person.inTeam(admin):
- return '%s is not allowed to make private PPAs' % (person.name,)
+ return '%s is not allowed to make private PPAs' % person.name
if person.isTeam() and (
person.subscriptionpolicy in OPEN_TEAM_POLICY):
return "Open teams cannot have PPAs."
=== modified file 'lib/lp/soyuz/model/distributionsourcepackagerelease.py'
--- lib/lp/soyuz/model/distributionsourcepackagerelease.py 2011-05-19 14:15:19 +0000
+++ lib/lp/soyuz/model/distributionsourcepackagerelease.py 2011-08-24 06:27:35 +0000
@@ -43,7 +43,6 @@
from lp.soyuz.model.binarypackagename import BinaryPackageName
from lp.soyuz.model.binarypackagerelease import BinaryPackageRelease
from lp.soyuz.model.distroseriesbinarypackage import DistroSeriesBinaryPackage
-from lp.soyuz.model.distroseriespackagecache import DistroSeriesPackageCache
from lp.soyuz.model.publishing import (
BinaryPackagePublishingHistory,
SourcePackagePublishingHistory,
@@ -165,6 +164,8 @@
#avoid circular imports.
from lp.registry.model.distroseries import DistroSeries
from lp.soyuz.model.distroarchseries import DistroArchSeries
+ from lp.soyuz.model.distroseriespackagecache import (
+ DistroSeriesPackageCache)
store = getUtility(IStoreSelector).get(MAIN_STORE, DEFAULT_FLAVOR)
archive_ids = list(self.distribution.all_distro_archive_ids)
result_row = (
=== modified file 'lib/lp/soyuz/model/distroarchseriesbinarypackage.py'
--- lib/lp/soyuz/model/distroarchseriesbinarypackage.py 2010-11-09 08:43:34 +0000
+++ lib/lp/soyuz/model/distroarchseriesbinarypackage.py 2011-08-24 06:27:35 +0000
@@ -26,7 +26,6 @@
from lp.soyuz.model.distroarchseriesbinarypackagerelease import (
DistroArchSeriesBinaryPackageRelease,
)
-from lp.soyuz.model.distroseriespackagecache import DistroSeriesPackageCache
from lp.soyuz.model.publishing import BinaryPackagePublishingHistory
@@ -76,6 +75,8 @@
@cachedproperty
def cache(self):
"""See IDistroArchSeriesBinaryPackage."""
+ from lp.soyuz.model.distroseriespackagecache import (
+ DistroSeriesPackageCache)
query = """
distroseries = %s AND
archive IN %s AND
=== modified file 'lib/lp/soyuz/model/distroseriesbinarypackage.py'
--- lib/lp/soyuz/model/distroseriesbinarypackage.py 2011-05-18 14:32:04 +0000
+++ lib/lp/soyuz/model/distroseriesbinarypackage.py 2011-08-24 06:27:35 +0000
@@ -21,7 +21,6 @@
IDistroSeriesBinaryPackage,
)
from lp.soyuz.model.binarypackagerelease import BinaryPackageRelease
-from lp.soyuz.model.distroseriespackagecache import DistroSeriesPackageCache
from lp.soyuz.model.distroseriessourcepackagerelease import (
DistroSeriesSourcePackageRelease,
)
@@ -66,6 +65,8 @@
@cachedproperty
def cache(self):
"""See IDistroSeriesBinaryPackage."""
+ from lp.soyuz.model.distroseriespackagecache import (
+ DistroSeriesPackageCache)
store = Store.of(self.distroseries)
archive_ids = (
self.distroseries.distribution.all_distro_archive_ids)
=== modified file 'lib/lp/soyuz/model/distroseriespackagecache.py'
--- lib/lp/soyuz/model/distroseriespackagecache.py 2010-08-20 20:31:18 +0000
+++ lib/lp/soyuz/model/distroseriespackagecache.py 2011-08-24 06:27:35 +0000
@@ -12,13 +12,24 @@
ForeignKey,
StringCol,
)
-from storm.locals import RawStr
+from storm.locals import (
+ Desc,
+ RawStr,
+ )
from zope.interface import implements
-from canonical.database.sqlbase import SQLBase
+from canonical.database.sqlbase import (
+ SQLBase,
+ sqlvalues,
+ )
+from canonical.launchpad.interfaces.lpstorm import IStore
from lp.soyuz.interfaces.distroseriespackagecache import (
IDistroSeriesPackageCache,
)
+from lp.soyuz.model.binarypackagename import BinaryPackageName
+from lp.soyuz.model.binarypackagerelease import BinaryPackageRelease
+from lp.soyuz.model.distroarchseries import DistroArchSeries
+from lp.soyuz.model.publishing import BinaryPackagePublishingHistory
class DistroSeriesPackageCache(SQLBase):
@@ -39,4 +50,165 @@
summaries = StringCol(notNull=False, default=None)
descriptions = StringCol(notNull=False, default=None)
-
+ @classmethod
+ def _find(cls, distroseries, archive=None):
+ """All of the cached binary package records for this distroseries.
+
+ If 'archive' is not given it will return all caches stored for the
+ distroseries main archives (PRIMARY and PARTNER).
+ """
+ if archive is not None:
+ archives = [archive.id]
+ else:
+ archives = distroseries.distribution.all_distro_archive_ids
+
+ return IStore(cls).find(
+ cls,
+ cls.distroseries == distroseries,
+ cls.archiveID.is_in(archives)).order_by(cls.name)
+
+ @classmethod
+ def removeOld(cls, distroseries, archive, log):
+ """Delete any records that are no longer applicable.
+
+ Consider all binarypackages marked as REMOVED.
+
+ Also purges all existing cache records for disabled archives.
+
+ :param archive: target `IArchive`.
+ :param log: the context logger object able to print DEBUG level
+ messages.
+ """
+ # get the set of package names that should be there
+ bpns = set(BinaryPackageName.select("""
+ BinaryPackagePublishingHistory.distroarchseries =
+ DistroArchSeries.id AND
+ DistroArchSeries.distroseries = %s AND
+ Archive.id = %s AND
+ BinaryPackagePublishingHistory.archive = Archive.id AND
+ BinaryPackagePublishingHistory.binarypackagerelease =
+ BinaryPackageRelease.id AND
+ BinaryPackageRelease.binarypackagename =
+ BinaryPackageName.id AND
+ BinaryPackagePublishingHistory.dateremoved is NULL AND
+ Archive.enabled = TRUE
+ """ % sqlvalues(distroseries.id, archive.id),
+ distinct=True,
+ clauseTables=[
+ 'Archive',
+ 'DistroArchSeries',
+ 'BinaryPackagePublishingHistory',
+ 'BinaryPackageRelease']))
+
+ # remove the cache entries for binary packages we no longer want
+ for cache in cls._find(distroseries, archive):
+ if cache.binarypackagename not in bpns:
+ log.debug(
+ "Removing binary cache for '%s' (%s)"
+ % (cache.name, cache.id))
+ cache.destroySelf()
+
+ @classmethod
+ def _update(cls, distroseries, binarypackagename, archive, log):
+ """Update the package cache for a given IBinaryPackageName
+
+ 'log' is required, it should be a logger object able to print
+ DEBUG level messages.
+ 'ztm' is the current trasaction manager used for partial commits
+ (in full batches of 100 elements)
+ """
+ # get the set of published binarypackagereleases
+ bprs = IStore(BinaryPackageRelease).find(
+ BinaryPackageRelease,
+ BinaryPackageRelease.binarypackagename == binarypackagename,
+ BinaryPackageRelease.id ==
+ BinaryPackagePublishingHistory.binarypackagereleaseID,
+ BinaryPackagePublishingHistory.distroarchseriesID ==
+ DistroArchSeries.id,
+ DistroArchSeries.distroseries == distroseries,
+ BinaryPackagePublishingHistory.archive == archive,
+ BinaryPackagePublishingHistory.dateremoved == None)
+ bprs = bprs.order_by(Desc(BinaryPackageRelease.datecreated))
+ bprs = bprs.config(distinct=True)
+
+ if bprs.count() == 0:
+ log.debug("No binary releases found.")
+ return
+
+ # find or create the cache entry
+ cache = cls.selectOne("""
+ distroseries = %s AND
+ archive = %s AND
+ binarypackagename = %s
+ """ % sqlvalues(distroseries, archive, binarypackagename))
+ if cache is None:
+ log.debug("Creating new binary cache entry.")
+ cache = cls(
+ archive=archive,
+ distroseries=distroseries,
+ binarypackagename=binarypackagename)
+
+ # make sure the cached name, summary and description are correct
+ cache.name = binarypackagename.name
+ cache.summary = bprs[0].summary
+ cache.description = bprs[0].description
+
+ # get the sets of binary package summaries, descriptions. there is
+ # likely only one, but just in case...
+
+ summaries = set()
+ descriptions = set()
+ for bpr in bprs:
+ log.debug("Considering binary version %s" % bpr.version)
+ summaries.add(bpr.summary)
+ descriptions.add(bpr.description)
+
+ # and update the caches
+ cache.summaries = ' '.join(sorted(summaries))
+ cache.descriptions = ' '.join(sorted(descriptions))
+
+ @classmethod
+ def updateAll(cls, distroseries, archive, log, ztm, commit_chunk=500):
+ """Update the binary package cache
+
+ Consider all binary package names published in this distro series
+ and entirely skips updates for disabled archives
+
+ :param archive: target `IArchive`;
+ :param log: logger object for printing debug level information;
+ :param ztm: transaction used for partial commits, every chunk of
+ 'commit_chunk' updates is committed;
+ :param commit_chunk: number of updates before commit, defaults to 500.
+
+ :return the number of packages updated.
+ """
+ # Do not create cache entries for disabled archives.
+ if not archive.enabled:
+ return
+
+ # Get the set of package names to deal with.
+ bpns = IStore(BinaryPackageName).find(
+ BinaryPackageName,
+ DistroArchSeries.distroseries == distroseries,
+ BinaryPackagePublishingHistory.distroarchseriesID ==
+ DistroArchSeries.id,
+ BinaryPackagePublishingHistory.archive == archive,
+ BinaryPackagePublishingHistory.binarypackagereleaseID ==
+ BinaryPackageRelease.id,
+ BinaryPackageRelease.binarypackagename == BinaryPackageName.id,
+ BinaryPackagePublishingHistory.dateremoved == None).config(
+ distinct=True).order_by(BinaryPackageName.name)
+
+ number_of_updates = 0
+ chunk_size = 0
+ for bpn in bpns:
+ log.debug("Considering binary '%s'" % bpn.name)
+ cls._update(distroseries, bpn, archive, log)
+ number_of_updates += 1
+ chunk_size += 1
+ if chunk_size == commit_chunk:
+ chunk_size = 0
+ log.debug("Committing")
+ ztm.commit()
+
+ return number_of_updates
=== modified file 'lib/lp/soyuz/stories/soyuz/xx-distributionsourcepackagerelease-pages.txt'
--- lib/lp/soyuz/stories/soyuz/xx-distributionsourcepackagerelease-pages.txt 2011-05-03 02:39:30 +0000
+++ lib/lp/soyuz/stories/soyuz/xx-distributionsourcepackagerelease-pages.txt 2011-08-24 06:27:35 +0000
@@ -252,14 +252,16 @@
>>> from lp.services.log.logger import BufferLogger
>>> from canonical.testing.layers import reconnect_stores
>>> from lp.registry.interfaces.distribution import IDistributionSet
+ >>> from lp.soyuz.model.distroseriespackagecache import (
+ ... DistroSeriesPackageCache)
>>> login('foo.bar@xxxxxxxxxxxxx')
>>> transaction.commit()
>>> reconnect_stores(config.statistician.dbuser)
>>> logger = BufferLogger()
>>> ubuntutest = getUtility(IDistributionSet).getByName('ubuntutest')
>>> breezy_autotest = ubuntutest.getSeries('breezy-autotest')
- >>> unused = breezy_autotest.updateCompletePackageCache(
- ... ubuntutest.main_archive, logger, transaction)
+ >>> unused = DistroSeriesPackageCache.updateAll(
+ ... breezy_autotest, ubuntutest.main_archive, logger, transaction)
>>> transaction.commit()
>>> reconnect_stores('launchpad')
>>> logout()
=== modified file 'lib/lp/soyuz/tests/test_distroseriesbinarypackage.py'
--- lib/lp/soyuz/tests/test_distroseriesbinarypackage.py 2011-05-19 14:15:19 +0000
+++ lib/lp/soyuz/tests/test_distroseriesbinarypackage.py 2011-08-24 06:27:35 +0000
@@ -6,7 +6,6 @@
__metaclass__ = type
__all__ = [
'TestDistroSeriesBinaryPackage',
- 'test_suite',
]
from testtools.matchers import (
@@ -19,6 +18,7 @@
from canonical.testing.layers import LaunchpadZopelessLayer
from lp.services.log.logger import BufferLogger
from lp.soyuz.model.distroseriesbinarypackage import DistroSeriesBinaryPackage
+from lp.soyuz.model.distroseriespackagecache import DistroSeriesPackageCache
from lp.soyuz.tests.test_publishing import SoyuzTestPublisher
from lp.testing import (
StormStatementRecorder,
@@ -61,11 +61,13 @@
logger = BufferLogger()
transaction.commit()
LaunchpadZopelessLayer.switchDbUser(config.statistician.dbuser)
- self.distroseries.updatePackageCache(
- self.binary_package_name, distro_archive_1, logger)
+ DistroSeriesPackageCache._update(
+ self.distroseries, self.binary_package_name, distro_archive_1,
+ logger)
- self.distroseries.updatePackageCache(
- self.binary_package_name, distro_archive_2, logger)
+ DistroSeriesPackageCache._update(
+ self.distroseries, self.binary_package_name, distro_archive_2,
+ logger)
self.failUnlessEqual(
'Foo is the best', self.distroseries_binary_package.summary)
=== modified file 'lib/lp/soyuz/tests/test_publishing.py'
--- lib/lp/soyuz/tests/test_publishing.py 2011-08-23 14:35:43 +0000
+++ lib/lp/soyuz/tests/test_publishing.py 2011-08-24 06:27:35 +0000
@@ -54,6 +54,7 @@
)
from lp.soyuz.interfaces.queue import QueueInconsistentStateError
from lp.soyuz.interfaces.section import ISectionSet
+from lp.soyuz.model.distroseriespackagecache import DistroSeriesPackageCache
from lp.soyuz.model.processor import ProcessorFamily
from lp.soyuz.model.publishing import (
BinaryPackagePublishingHistory,
@@ -556,7 +557,8 @@
reconnect_stores(config.statistician.dbuser)
distroseries = getUtility(IDistroSeriesSet).get(distroseries.id)
- distroseries.updateCompletePackageCache(
+ DistroSeriesPackageCache.updateAll(
+ distroseries,
archive=distroseries.distribution.main_archive,
ztm=transaction,
log=DevNullLogger())