launchpad-reviewers team mailing list archive
-
launchpad-reviewers team
-
Mailing list archive
-
Message #14767
[Merge] lp:~stevenk/launchpad/populate-searchables-for-pu into lp:launchpad
Steve Kowalik has proposed merging lp:~stevenk/launchpad/populate-searchables-for-pu into lp:launchpad.
Requested reviews:
Launchpad code reviewers (launchpad-reviewers)
Related bugs:
Bug #1081827 in Launchpad itself: "DistroSeries:+queue search times out due to overenthusiastic joins"
https://bugs.launchpad.net/launchpad/+bug/1081827
For more details, see:
https://code.launchpad.net/~stevenk/launchpad/populate-searchables-for-pu/+merge/138906
Populate the new searchable_names and searchable_versions columns on PackageUpload. The queries the garbo job are running are pretty hideous, but they're temporary and actually quite performant -- dogfood is averaging 1,200 rows each loop through.
LoC justification is the garbo job is temporary and will be dropped when it's done, and after that we can delete most of IPackageUploadSet.get_all() and the horrible 8 table joins it does today -- that's roughly a 140 line drop. By my calculations, we end up at -50 lines, even including the +36 from three or four database patches.
This can not land until https://code.launchpad.net/~stevenk/launchpad/db-searchableversions-for-pu/+merge/136843 has landed and been deployed.
--
https://code.launchpad.net/~stevenk/launchpad/populate-searchables-for-pu/+merge/138906
Your team Launchpad code reviewers is requested to review the proposed merge of lp:~stevenk/launchpad/populate-searchables-for-pu into lp:launchpad.
=== modified file 'database/schema/security.cfg'
--- database/schema/security.cfg 2012-12-03 21:56:19 +0000
+++ database/schema/security.cfg 2012-12-10 06:38:22 +0000
@@ -2253,6 +2253,7 @@
public.oauthnonce = SELECT, DELETE
public.openidconsumerassociation = SELECT, DELETE
public.openidconsumernonce = SELECT, DELETE
+public.packageupload = SELECT, UPDATE
public.person = SELECT, DELETE
public.product = SELECT, UPDATE
public.pofiletranslator = SELECT, INSERT, UPDATE, DELETE
=== modified file 'lib/lp/registry/model/distroseries.py'
--- lib/lp/registry/model/distroseries.py 2012-12-06 13:33:46 +0000
+++ lib/lp/registry/model/distroseries.py 2012-12-10 06:38:22 +0000
@@ -1339,12 +1339,18 @@
changesfilename, len(changesfilecontent),
StringIO(changesfilecontent), 'text/plain',
restricted=archive.private)
+ searchable_names = None
+ searchable_versions = None
+ if package_copy_job is not None:
+ searchable_names = package_copy_job.package_name
+ searchable_versions = [package_copy_job.package_version]
return PackageUpload(
distroseries=self, status=PackageUploadStatus.NEW,
- pocket=pocket, archive=archive,
- changesfile=changes_file_alias, signing_key=signing_key,
- package_copy_job=package_copy_job)
+ pocket=pocket, archive=archive, changesfile=changes_file_alias,
+ signing_key=signing_key, package_copy_job=package_copy_job,
+ searchable_names=searchable_names,
+ searchable_versions=searchable_versions)
def getPackageUploadQueue(self, state):
"""See `IDistroSeries`."""
=== modified file 'lib/lp/scripts/garbo.py'
--- lib/lp/scripts/garbo.py 2012-12-03 21:56:19 +0000
+++ lib/lp/scripts/garbo.py 2012-12-10 06:38:22 +0000
@@ -18,6 +18,7 @@
)
import logging
import multiprocessing
+from operator import itemgetter
import os
import threading
import time
@@ -122,6 +123,7 @@
from lp.services.verification.model.logintoken import LoginToken
from lp.soyuz.model.archive import Archive
from lp.soyuz.model.publishing import SourcePackagePublishingHistory
+from lp.soyuz.model.queue import PackageUpload
from lp.soyuz.model.reporting import LatestPersonSourcePackageReleaseCache
from lp.soyuz.model.sourcepackagerelease import SourcePackageRelease
from lp.translations.interfaces.potemplate import IPOTemplateSet
@@ -1364,6 +1366,154 @@
transaction.commit()
+class PopulatePackageUploadSearchableNames(TunableLoop):
+ """Populates PackageUpload.searchable_names."""
+
+ maximum_chunk_size = 5000
+
+ def __init__(self, log, abort_time=None):
+ super(PopulatePackageUploadSearchableNames, self).__init__(
+ log, abort_time)
+ self.start_at = 1
+ self.store = IMasterStore(PackageUpload)
+
+ def findPackageUploadIDs(self):
+ return self.store.find(
+ (PackageUpload.id,), PackageUpload.searchable_names == None,
+ PackageUpload.id >= self.start_at).order_by(PackageUpload.id)
+
+ def isDone(self):
+ return self.findPackageUploadIDs().is_empty()
+
+ def __call__(self, chunk_size):
+ packageupload_ids = map(
+ itemgetter(0), list(self.findPackageUploadIDs()[:chunk_size]))
+ results = self.store.find(
+ (PackageUpload.id, SQL("""
+ array_to_string((SELECT array_agg(
+ DISTINCT spn.name ORDER BY spn.name)
+ FROM
+ packageuploadbuild
+ JOIN binarypackagebuild AS bpb ON
+ bpb.id = packageuploadbuild.build
+ JOIN sourcepackagerelease AS spr ON
+ spr.id = bpb.source_package_release
+ JOIN sourcepackagename AS spn ON
+ spn.id = spr.sourcepackagename
+ WHERE packageuploadbuild.packageupload = packageupload.id
+ ) ||
+ (
+ SELECT array_agg(
+ DISTINCT bpn.name ORDER BY bpn.name)
+ FROM
+ packageuploadbuild
+ JOIN binarypackagerelease ON
+ binarypackagerelease.build = packageuploadbuild.build
+ JOIN binarypackagename AS bpn ON
+ bpn.id = binarypackagerelease.binarypackagename
+ WHERE packageuploadbuild.packageupload = packageupload.id
+ ) ||
+ (
+ SELECT array_agg(
+ DISTINCT sourcepackagename.name ORDER BY sourcepackagename.name)
+ FROM
+ packageuploadsource
+ JOIN sourcepackagerelease AS spr ON
+ spr.id = packageuploadsource.sourcepackagerelease
+ JOIN sourcepackagename ON
+ sourcepackagename.id = spr.sourcepackagename
+ WHERE packageuploadsource.packageupload = packageupload.id
+ ) ||
+ (
+ SELECT array_agg(
+ DISTINCT libraryfilealias.filename
+ ORDER BY libraryfilealias.filename)
+ FROM
+ packageuploadcustom
+ JOIN libraryfilealias ON
+ libraryfilealias.id = packageuploadcustom.libraryfilealias
+ WHERE packageuploadcustom.packageupload = packageupload.id
+ ) ||
+ (
+ SELECT package_name FROM packagecopyjob
+ WHERE packageupload.package_copy_job = packagecopyjob.id
+ ), ' ')""")), PackageUpload.id.is_in(packageupload_ids))
+ cache_data = ClassAlias(PackageUpload, "cache_data")
+ updated_columns = dict(
+ [(PackageUpload.searchable_names, cache_data.searchable_names)])
+ values = [
+ [dbify_value(col, val)[0]
+ for (col, val) in zip(
+ (PackageUpload.id, PackageUpload.searchable_names), data)]
+ for data in results]
+ cols = [('id', 'integer'), ('searchable_names', 'text')]
+ cache_data_expr = Values('cache_data', cols, values)
+ self.store.execute(
+ BulkUpdate(
+ updated_columns, table=PackageUpload, values=cache_data_expr,
+ where=PackageUpload.id == cache_data.id))
+ self.start_at = packageupload_ids[-1] + 1
+ transaction.commit()
+
+
+class PopulatePackageUploadSearchableVersions(TunableLoop):
+ """Populates PackageUpload.searchable_versions."""
+
+ maximum_chunk_size = 5000
+
+ def __init__(self, log, abort_time=None):
+ super(PopulatePackageUploadSearchableVersions, self).__init__(
+ log, abort_time)
+ self.start_at = 1
+ self.store = IMasterStore(PackageUpload)
+
+ def findPackageUploadIDs(self):
+ return self.store.find(
+ (PackageUpload.id,), PackageUpload.searchable_versions == None,
+ PackageUpload.id >= self.start_at).order_by(PackageUpload.id)
+
+ def isDone(self):
+ return self.findPackageUploadIDs().is_empty()
+
+ def __call__(self, chunk_size):
+ packageupload_ids = map(
+ itemgetter(0), list(self.findPackageUploadIDs()[:chunk_size]))
+ results = self.store.find(
+ (PackageUpload.id, SQL("""
+ (COALESCE(
+ (SELECT ARRAY[spr.version]
+ FROM packageuploadsource
+ JOIN sourcepackagerelease AS spr ON
+ spr.id = packageuploadsource.sourcepackagerelease
+ WHERE packageuploadsource.packageupload = packageupload.id),
+ ARRAY[]::debversion[]
+ ) || (
+ SELECT array_agg(DISTINCT binarypackagerelease.version)
+ FROM packageuploadbuild
+ JOIN binarypackagerelease ON
+ binarypackagerelease.build = packageuploadbuild.build
+ WHERE packageuploadbuild.packageupload = packageupload.id)
+ )::text[]
+ """)), PackageUpload.id.is_in(packageupload_ids))
+ cache_data = ClassAlias(PackageUpload, "cache_data")
+ updated_columns = dict(
+ [(PackageUpload.searchable_versions,
+ cache_data.searchable_versions)])
+ values = [
+ [dbify_value(col, val)[0]
+ for (col, val) in zip(
+ (PackageUpload.id, PackageUpload.searchable_versions), data)]
+ for data in results]
+ cols = [('id', 'integer'), ('searchable_versions', 'text[]')]
+ cache_data_expr = Values('cache_data', cols, values)
+ self.store.execute(
+ BulkUpdate(
+ updated_columns, table=PackageUpload, values=cache_data_expr,
+ where=PackageUpload.id == cache_data.id))
+ self.start_at = packageupload_ids[-1] + 1
+ transaction.commit()
+
+
class BaseDatabaseGarbageCollector(LaunchpadCronScript):
"""Abstract base class to run a collection of TunableLoops."""
script_name = None # Script name for locking and database user. Override.
@@ -1619,6 +1769,8 @@
UnusedSessionPruner,
DuplicateSessionPruner,
BugHeatUpdater,
+ PopulatePackageUploadSearchableNames,
+ PopulatePackageUploadSearchableVersions,
]
experimental_tunable_loops = []
=== modified file 'lib/lp/scripts/tests/test_garbo.py'
--- lib/lp/scripts/tests/test_garbo.py 2012-12-03 21:56:19 +0000
+++ lib/lp/scripts/tests/test_garbo.py 2012-12-10 06:38:22 +0000
@@ -1293,6 +1293,49 @@
'PopulateLatestPersonSourcePackageReleaseCache')
self.assertEqual(spph_2.id, job_data['last_spph_id'])
+ def test_PopulatePackageUploadSearchableNames(self):
+ # PopulatePackageUploadSearchableNames sets searchable_names for
+ # existing uploads correctly.
+ switch_dbuser('testadmin')
+ distroseries = self.factory.makeDistroSeries()
+ source = self.factory.makeSourcePackageUpload(distroseries)
+ binary = self.factory.makeBuildPackageUpload(distroseries)
+ custom = self.factory.makeCustomPackageUpload(distroseries)
+ # They are all have searchable_names set, so unset it.
+ for kind in (source, binary, custom):
+ removeSecurityProxy(kind).searchable_names = None
+ transaction.commit()
+ self.runHourly()
+ source_name = source.sources[0].sourcepackagerelease.name
+ binary_names = '%s %s' % (
+ binary.builds[0].build.source_package_release.name,
+ binary.builds[0].build.binarypackages[0].name)
+ filename = custom.customfiles[0].libraryfilealias.filename
+ self.assertEqual(source.searchable_names, source_name)
+ self.assertEqual(binary.searchable_names, binary_names)
+ self.assertEqual(custom.searchable_names, filename)
+
+ def test_PopulatePackageUploadSearchableVersions(self):
+ # PopulatePackageUploadSearchableVersions sets searchable_versions for
+ # existing uploads correctly.
+ switch_dbuser('testadmin')
+ distroseries = self.factory.makeDistroSeries()
+ source = self.factory.makeSourcePackageUpload(distroseries)
+ binary = self.factory.makeBuildPackageUpload(distroseries)
+ build = self.factory.makeBinaryPackageBuild()
+ self.factory.makeBinaryPackageRelease(build=build)
+ binary.addBuild(build)
+ # They are all have searchable_versions set, so unset it.
+ for kind in (source, binary):
+ removeSecurityProxy(kind).searchable_versions = None
+ transaction.commit()
+ self.runHourly()
+ source_version = [source.sources[0].sourcepackagerelease.version]
+ binary_versions = [
+ build.build.binarypackages[0].version for build in binary.builds]
+ self.assertContentEqual(source_version, source.searchable_versions)
+ self.assertContentEqual(binary_versions, binary.searchable_versions)
+
class TestGarboTasks(TestCaseWithFactory):
layer = LaunchpadZopelessLayer
=== modified file 'lib/lp/soyuz/configure.zcml'
--- lib/lp/soyuz/configure.zcml 2012-11-15 01:41:14 +0000
+++ lib/lp/soyuz/configure.zcml 2012-12-10 06:38:22 +0000
@@ -190,7 +190,9 @@
package_name
package_version
section_name
- components"/>
+ components
+ searchable_names
+ searchable_versions"/>
<require
permission="launchpad.Edit"
attributes="
=== modified file 'lib/lp/soyuz/interfaces/queue.py'
--- lib/lp/soyuz/interfaces/queue.py 2012-11-15 01:41:14 +0000
+++ lib/lp/soyuz/interfaces/queue.py 2012-12-10 06:38:22 +0000
@@ -1,8 +1,6 @@
# Copyright 2009-2012 Canonical Ltd. This software is licensed under the
# GNU Affero General Public License version 3 (see the file LICENSE).
-# pylint: disable-msg=E0211,E0213
-
"""Queue interfaces."""
__metaclass__ = type
@@ -212,6 +210,11 @@
sourcepackagerelease = Attribute(
"The source package release for this item")
+ searchable_names = TextLine(
+ title=_("Searchable names for this item"), readonly=True)
+ searchable_versions = List(
+ title=_("Searchable versions for this item"), readonly=True)
+
package_name = exported(
TextLine(
title=_("Name of the uploaded source package"), readonly=True),
=== modified file 'lib/lp/soyuz/model/queue.py'
--- lib/lp/soyuz/model/queue.py 2012-11-15 02:45:58 +0000
+++ lib/lp/soyuz/model/queue.py 2012-12-10 06:38:22 +0000
@@ -1,8 +1,6 @@
# Copyright 2009-2012 Canonical Ltd. This software is licensed under the
# GNU Affero General Public License version 3 (see the file LICENSE).
-# pylint: disable-msg=E0611,W0212
-
__metaclass__ = type
__all__ = [
'PackageUploadQueue',
@@ -23,6 +21,7 @@
ForeignKey,
SQLMultipleJoin,
SQLObjectNotFound,
+ StringCol,
)
from storm.expr import LeftJoin
from storm.locals import (
@@ -30,8 +29,10 @@
Desc,
Int,
Join,
+ List,
Or,
Reference,
+ Unicode,
)
from storm.store import (
EmptyResultSet,
@@ -216,30 +217,33 @@
_defaultOrder = ['id']
- status = EnumCol(dbName='status', unique=False, notNull=True,
- default=PackageUploadStatus.NEW,
- schema=PackageUploadStatus,
- storm_validator=validate_status)
+ status = EnumCol(
+ dbName='status', unique=False, notNull=True,
+ default=PackageUploadStatus.NEW, schema=PackageUploadStatus,
+ storm_validator=validate_status)
date_created = UtcDateTimeCol(notNull=False, default=UTC_NOW)
- distroseries = ForeignKey(dbName="distroseries",
- foreignKey='DistroSeries')
+ distroseries = ForeignKey(dbName="distroseries", foreignKey='DistroSeries')
- pocket = EnumCol(dbName='pocket', unique=False, notNull=True,
- schema=PackagePublishingPocket)
+ pocket = EnumCol(
+ dbName='pocket', unique=False, notNull=True,
+ schema=PackagePublishingPocket)
changesfile = ForeignKey(
dbName='changesfile', foreignKey="LibraryFileAlias", notNull=False)
archive = ForeignKey(dbName="archive", foreignKey="Archive", notNull=True)
- signing_key = ForeignKey(foreignKey='GPGKey', dbName='signing_key',
- notNull=False)
+ signing_key = ForeignKey(
+ foreignKey='GPGKey', dbName='signing_key', notNull=False)
package_copy_job_id = Int(name='package_copy_job', allow_none=True)
package_copy_job = Reference(package_copy_job_id, 'PackageCopyJob.id')
+ searchable_names = StringCol(name='searchable_names')
+ searchable_versions = List(type=Unicode())
+
# XXX julian 2007-05-06:
# Sources should not be SQLMultipleJoin, there is only ever one
# of each at most.
@@ -353,15 +357,13 @@
def setNew(self):
"""See `IPackageUpload`."""
if self.status == PackageUploadStatus.NEW:
- raise QueueInconsistentStateError(
- 'Queue item already new')
+ raise QueueInconsistentStateError('Queue item already new')
self.status = PassthroughStatusValue(PackageUploadStatus.NEW)
def setUnapproved(self):
"""See `IPackageUpload`."""
if self.status == PackageUploadStatus.UNAPPROVED:
- raise QueueInconsistentStateError(
- 'Queue item already unapproved')
+ raise QueueInconsistentStateError('Queue item already unapproved')
self.status = PassthroughStatusValue(PackageUploadStatus.UNAPPROVED)
def setAccepted(self):
@@ -374,8 +376,7 @@
self.pocket.name, self.distroseries.status.name))
if self.status == PackageUploadStatus.ACCEPTED:
- raise QueueInconsistentStateError(
- 'Queue item already accepted')
+ raise QueueInconsistentStateError('Queue item already accepted')
for source in self.sources:
source.verifyBeforeAccept()
@@ -461,15 +462,13 @@
def setDone(self):
"""See `IPackageUpload`."""
if self.status == PackageUploadStatus.DONE:
- raise QueueInconsistentStateError(
- 'Queue item already done')
+ raise QueueInconsistentStateError('Queue item already done')
self.status = PassthroughStatusValue(PackageUploadStatus.DONE)
def setRejected(self):
"""See `IPackageUpload`."""
if self.status == PackageUploadStatus.REJECTED:
- raise QueueInconsistentStateError(
- 'Queue item already rejected')
+ raise QueueInconsistentStateError('Queue item already rejected')
self.status = PassthroughStatusValue(PackageUploadStatus.REJECTED)
def _closeBugs(self, changesfile_path, logger=None):
@@ -709,20 +708,17 @@
@cachedproperty
def contains_upgrader(self):
"""See `IPackageUpload`."""
- return (PackageUploadCustomFormat.DIST_UPGRADER
- in self._customFormats)
+ return PackageUploadCustomFormat.DIST_UPGRADER in self._customFormats
@cachedproperty
def contains_ddtp(self):
"""See `IPackageUpload`."""
- return (PackageUploadCustomFormat.DDTP_TARBALL
- in self._customFormats)
+ return PackageUploadCustomFormat.DDTP_TARBALL in self._customFormats
@cachedproperty
def contains_uefi(self):
"""See `IPackageUpload`."""
- return (PackageUploadCustomFormat.UEFI
- in self._customFormats)
+ return PackageUploadCustomFormat.UEFI in self._customFormats
@property
def package_name(self):
@@ -853,26 +849,43 @@
return publishing_records
+ def appendSearchableNames(self, names):
+ name = ' '.join(names)
+ if self.searchable_names:
+ self.searchable_names = '%s %s' % (self.searchable_names, name)
+ else:
+ self.searchable_names = name
+
+ def appendSearchableVersions(self, version):
+ if self.searchable_versions:
+ self.searchable_versions.append(version)
+ else:
+ self.searchable_versions = [version]
+
def addSource(self, spr):
"""See `IPackageUpload`."""
del get_property_cache(self).sources
+ self.appendSearchableNames([spr.name])
+ self.appendSearchableVersions(spr.version)
return PackageUploadSource(
- packageupload=self,
- sourcepackagerelease=spr.id)
+ packageupload=self, sourcepackagerelease=spr.id)
def addBuild(self, build):
"""See `IPackageUpload`."""
del get_property_cache(self).builds
- return PackageUploadBuild(
- packageupload=self,
- build=build.id)
+ names = [build.source_package_release.name]
+ names.extend([bpr.name for bpr in build.binarypackages])
+ self.appendSearchableNames(names)
+ versions = set([bpr.version for bpr in build.binarypackages])
+ [self.appendSearchableVersions(version) for version in versions]
+ return PackageUploadBuild(packageupload=self, build=build.id)
def addCustom(self, library_file, custom_type):
"""See `IPackageUpload`."""
del get_property_cache(self).customfiles
+ self.appendSearchableNames([library_file.filename])
return PackageUploadCustom(
- packageupload=self,
- libraryfilealias=library_file.id,
+ packageupload=self, libraryfilealias=library_file.id,
customformat=custom_type)
def isPPA(self):
@@ -1361,15 +1374,14 @@
_defaultOrder = ['id']
packageupload = ForeignKey(
- dbName='packageupload',
- foreignKey='PackageUpload')
-
- customformat = EnumCol(dbName='customformat', unique=False,
- notNull=True, schema=PackageUploadCustomFormat)
-
- libraryfilealias = ForeignKey(dbName='libraryfilealias',
- foreignKey="LibraryFileAlias",
- notNull=True)
+ dbName='packageupload', foreignKey='PackageUpload')
+
+ customformat = EnumCol(
+ dbName='customformat', unique=False, notNull=True,
+ schema=PackageUploadCustomFormat)
+
+ libraryfilealias = ForeignKey(
+ dbName='libraryfilealias', foreignKey="LibraryFileAlias", notNull=True)
def publish(self, logger=None):
"""See `IPackageUploadCustom`."""
@@ -1425,8 +1437,7 @@
"""See `IPackageUploadCustom`."""
# XXX cprov 2005-03-03: We need to use the Zope Component Lookup
# to instantiate the object in question and avoid circular imports
- from lp.archivepublisher.dist_upgrader import (
- process_dist_upgrader)
+ from lp.archivepublisher.dist_upgrader import process_dist_upgrader
self._publishCustom(process_dist_upgrader, logger=logger)
@@ -1434,8 +1445,7 @@
"""See `IPackageUploadCustom`."""
# XXX cprov 2005-03-03: We need to use the Zope Component Lookup
# to instantiate the object in question and avoid circular imports
- from lp.archivepublisher.ddtp_tarball import (
- process_ddtp_tarball)
+ from lp.archivepublisher.ddtp_tarball import process_ddtp_tarball
self._publishCustom(process_ddtp_tarball, logger=logger)
=== modified file 'lib/lp/soyuz/tests/test_packageupload.py'
--- lib/lp/soyuz/tests/test_packageupload.py 2012-11-15 01:41:14 +0000
+++ lib/lp/soyuz/tests/test_packageupload.py 2012-12-10 06:38:22 +0000
@@ -116,6 +116,8 @@
upload.addSource(spr)
self.assertEqual(spr.sourcepackagename.name, upload.package_name)
self.assertEqual(spr.version, upload.package_version)
+ self.assertEqual(spr.name, upload.searchable_names)
+ self.assertContentEqual([spr.version], upload.searchable_versions)
def test_publish_sets_packageupload(self):
# Publishing a PackageUploadSource will pass itself to the source
@@ -459,6 +461,25 @@
upload, job = self.makeUploadWithPackageCopyJob()
self.assertEqual(job.package_name, upload.package_name)
self.assertEqual(job.package_version, upload.package_version)
+ self.assertEqual(job.package_name, upload.searchable_names)
+ self.assertEqual([job.package_version], upload.searchable_versions)
+
+ def test_searchables_for_builds(self):
+ distroseries = self.factory.makeDistroSeries()
+ upload = self.factory.makeBuildPackageUpload(distroseries)
+ bpr = upload.builds[0].build.binarypackages[0]
+ names = '%s %s' % (
+ upload.builds[0].build.source_package_release.name, bpr.name)
+ self.assertEqual(upload.searchable_names, names)
+ self.assertEqual(upload.searchable_versions, [bpr.version])
+
+ def test_searchables_for_custom(self):
+ distroseries = self.factory.makeDistroSeries()
+ upload = self.factory.makeCustomPackageUpload(distroseries)
+ self.assertEqual(
+ upload.searchable_names,
+ upload.customfiles[0].libraryfilealias.filename)
+ self.assertIsNone(upload.searchable_versions)
def test_displayarchs_for_copy_job_is_sync(self):
# For copy jobs, displayarchs is "source."
=== modified file 'lib/lp/testing/factory.py'
--- lib/lp/testing/factory.py 2012-12-05 15:58:18 +0000
+++ lib/lp/testing/factory.py 2012-12-10 06:38:22 +0000
@@ -3434,10 +3434,10 @@
pocket=pocket)
build = self.makeBinaryPackageBuild(
source_package_release=source_package_release, pocket=pocket)
- upload.addBuild(build)
self.makeBinaryPackageRelease(
binarypackagename=binarypackagename, build=build,
component=component)
+ upload.addBuild(build)
return upload
def makeCustomPackageUpload(self, distroseries=None, archive=None,
Follow ups