launchpad-reviewers team mailing list archive
-
launchpad-reviewers team
-
Mailing list archive
-
Message #15086
[Merge] lp:~wgrant/launchpad/flatten-bfj-5-app-cleanup into lp:launchpad
William Grant has proposed merging lp:~wgrant/launchpad/flatten-bfj-5-app-cleanup into lp:launchpad with lp:~wgrant/launchpad/flatten-bfj-4-app-eliminate as a prerequisite.
Commit message:
More app cleanup from the BFJ flattening. _new_foo -> foo, killing off PackageBuild, that sort of thing.
Requested reviews:
Launchpad code reviewers (launchpad-reviewers)
For more details, see:
https://code.launchpad.net/~wgrant/launchpad/flatten-bfj-5-app-cleanup/+merge/146349
More app cleanup from the BFJ flattening. _new_foo -> foo, killing off PackageBuild, that sort of thing.
--
https://code.launchpad.net/~wgrant/launchpad/flatten-bfj-5-app-cleanup/+merge/146349
Your team Launchpad code reviewers is requested to review the proposed merge of lp:~wgrant/launchpad/flatten-bfj-5-app-cleanup into lp:launchpad.
=== modified file 'lib/lp/buildmaster/configure.zcml'
--- lib/lp/buildmaster/configure.zcml 2013-02-04 07:27:23 +0000
+++ lib/lp/buildmaster/configure.zcml 2013-02-04 07:27:24 +0000
@@ -64,19 +64,6 @@
interface="lp.buildmaster.interfaces.buildfarmjob.IBuildFarmJobSet" />
</securedutility>
- <!-- PackageBuild -->
- <class
- class="lp.buildmaster.model.packagebuild.PackageBuild">
- <allow
- interface="lp.buildmaster.interfaces.packagebuild.IPackageBuildDB" />
- </class>
- <securedutility
- component="lp.buildmaster.model.packagebuild.PackageBuild"
- provides="lp.buildmaster.interfaces.packagebuild.IPackageBuildSource">
- <allow
- interface="lp.buildmaster.interfaces.packagebuild.IPackageBuildSource" />
- </securedutility>
-
<!-- BuildQueue -->
<class
class="lp.buildmaster.model.buildqueue.BuildQueue">
=== modified file 'lib/lp/buildmaster/interfaces/packagebuild.py'
--- lib/lp/buildmaster/interfaces/packagebuild.py 2013-02-04 07:27:23 +0000
+++ lib/lp/buildmaster/interfaces/packagebuild.py 2013-02-04 07:27:24 +0000
@@ -5,16 +5,12 @@
__metaclass__ = type
__all__ = [
'IPackageBuild',
- 'IPackageBuildSource',
]
from lazr.restful.declarations import exported
from lazr.restful.fields import Reference
-from zope.interface import (
- Attribute,
- Interface,
- )
+from zope.interface import Attribute
from zope.schema import (
Choice,
Object,
@@ -30,19 +26,6 @@
from lp.soyuz.interfaces.archive import IArchive
-class IPackageBuildDB(Interface):
- """Operations on a `PackageBuild` DB row.
-
- This is deprecated while it's flattened into the concrete implementations.
- """
-
- id = Attribute('The package build ID.')
-
- build_farm_job = Reference(
- title=_('Build farm job'), schema=IBuildFarmJob, required=True,
- readonly=True, description=_('The base build farm job.'))
-
-
class IPackageBuild(IBuildFarmJob):
"""Attributes and operations specific to package build jobs."""
@@ -122,15 +105,3 @@
:param changes: Changes file from the upload.
"""
-
-
-class IPackageBuildSource(Interface):
- """A utility of this interface used to create _things_."""
-
- def new(build_farm_job, archive, pocket):
- """Create a new `IPackageBuild`.
-
- :param build_farm_job: An `IBuildFarmJob`.
- :param archive: An `IArchive`.
- :param pocket: An item of `PackagePublishingPocket`.
- """
=== modified file 'lib/lp/buildmaster/model/buildfarmjob.py'
--- lib/lp/buildmaster/model/buildfarmjob.py 2013-02-04 07:27:23 +0000
+++ lib/lp/buildmaster/model/buildfarmjob.py 2013-02-04 07:27:24 +0000
@@ -195,46 +195,6 @@
class BuildFarmJobMixin:
@property
- def processor(self):
- return self._new_processor
-
- @property
- def virtualized(self):
- return self._new_virtualized
-
- @property
- def date_created(self):
- return self._new_date_created
-
- @property
- def date_started(self):
- return self._new_date_started
-
- @property
- def date_finished(self):
- return self._new_date_finished
-
- @property
- def date_first_dispatched(self):
- return self._new_date_first_dispatched
-
- @property
- def builder(self):
- return self._new_builder
-
- @property
- def status(self):
- return self._new_status
-
- @property
- def log(self):
- return self._new_log
-
- @property
- def failure_count(self):
- return self._new_failure_count
-
- @property
def dependencies(self):
return None
@@ -289,29 +249,28 @@
def setLog(self, log):
"""See `IBuildFarmJob`."""
- self._new_log = log
+ self.log = log
def updateStatus(self, status, builder=None, slave_status=None,
date_started=None, date_finished=None):
"""See `IBuildFarmJob`."""
- self.build_farm_job.status = self._new_status = status
+ self.build_farm_job.status = self.status = status
# If there's a builder provided, set it if we don't already have
# one, or otherwise crash if it's different from the one we
# expected.
if builder is not None:
if self.builder is None:
- self.build_farm_job.builder = self._new_builder = builder
+ self.build_farm_job.builder = self.builder = builder
else:
assert self.builder == builder
# If we're starting to build, set date_started and
# date_first_dispatched if required.
if self.date_started is None and status == BuildStatus.BUILDING:
- self._new_date_started = (
- date_started or datetime.datetime.now(pytz.UTC))
+ self.date_started = date_started or datetime.datetime.now(pytz.UTC)
if self.date_first_dispatched is None:
- self._new_date_first_dispatched = self.date_started
+ self.date_first_dispatched = self.date_started
# If we're in a final build state (or UPLOADING, which sort of
# is), set date_finished if date_started is.
@@ -322,7 +281,7 @@
# XXX cprov 20060615 bug=120584: Currently buildduration includes
# the scanner latency, it should really be asking the slave for
# the duration spent building locally.
- self.build_farm_job.date_finished = self._new_date_finished = (
+ self.build_farm_job.date_finished = self.date_finished = (
date_finished or datetime.datetime.now(pytz.UTC))
def gotFailure(self):
@@ -331,7 +290,7 @@
self.build_farm_job.failure_count += 1
self._new_failure_count = self.build_farm_job.failure_count
=======
- self._new_failure_count += 1
+ self.failure_count += 1
>>>>>>> MERGE-SOURCE
=== modified file 'lib/lp/buildmaster/model/packagebuild.py'
--- lib/lp/buildmaster/model/packagebuild.py 2013-02-04 07:27:23 +0000
+++ lib/lp/buildmaster/model/packagebuild.py 2013-02-04 07:27:24 +0000
@@ -3,45 +3,18 @@
__metaclass__ = type
__all__ = [
- 'PackageBuild',
'PackageBuildMixin',
]
from cStringIO import StringIO
-from storm.expr import Desc
-from storm.locals import (
- Int,
- Reference,
- Store,
- Storm,
- Unicode,
- )
+from storm.locals import Store
from zope.component import getUtility
-from zope.interface import (
- classProvides,
- implements,
- )
from lp.buildmaster.enums import BuildStatus
-from lp.buildmaster.interfaces.packagebuild import (
- IPackageBuild,
- IPackageBuildSource,
- )
-from lp.buildmaster.model.buildfarmjob import (
- BuildFarmJob,
- BuildFarmJobMixin,
- )
+from lp.buildmaster.model.buildfarmjob import BuildFarmJobMixin
from lp.buildmaster.model.buildqueue import BuildQueue
-from lp.registry.interfaces.pocket import PackagePublishingPocket
-from lp.services.database.enumcol import DBEnum
-from lp.services.database.interfaces import (
- DEFAULT_FLAVOR,
- IStoreSelector,
- MAIN_STORE,
- )
-from lp.services.database.lpstorm import IMasterStore
from lp.services.helpers import filenameToContentType
from lp.services.librarian.browser import ProxiedLibraryFileAlias
from lp.services.librarian.interfaces import ILibraryFileAliasSet
@@ -51,78 +24,9 @@
from lp.soyuz.interfaces.component import IComponentSet
-class PackageBuild(Storm):
- """An implementation of `IBuildFarmJob` for package builds."""
-
- __storm_table__ = 'PackageBuild'
-
- implements(IPackageBuild)
- classProvides(IPackageBuildSource)
-
- id = Int(primary=True)
-
- archive_id = Int(name='archive', allow_none=False)
- archive = Reference(archive_id, 'Archive.id')
-
- pocket = DBEnum(
- name='pocket', allow_none=False,
- enum=PackagePublishingPocket)
-
- upload_log_id = Int(name='upload_log', allow_none=True)
- upload_log = Reference(upload_log_id, 'LibraryFileAlias.id')
-
- dependencies = Unicode(name='dependencies', allow_none=True)
-
- build_farm_job_id = Int(name='build_farm_job', allow_none=False)
- build_farm_job = Reference(build_farm_job_id, 'BuildFarmJob.id')
-
- # The following two properties are part of the IPackageBuild
- # interface, but need to be provided by derived classes.
- distribution = None
- distro_series = None
-
- def __init__(self, build_farm_job, archive, pocket, dependencies=None):
- """Construct a PackageBuild."""
- super(PackageBuild, self).__init__()
- self.build_farm_job = build_farm_job
- self.archive = archive
- self.pocket = pocket
- self.dependencies = dependencies
-
- @classmethod
- def new(cls, build_farm_job, archive, pocket):
- """See `IPackageBuildSource`."""
- store = IMasterStore(PackageBuild)
- package_build = cls(build_farm_job, archive, pocket)
- store.add(package_build)
- return package_build
-
- def destroySelf(self):
- build_farm_job = self.build_farm_job
- store = Store.of(self)
- store.remove(self)
- store.remove(build_farm_job)
-
-
class PackageBuildMixin(BuildFarmJobMixin):
@property
- def archive(self):
- return self._new_archive
-
- @property
- def pocket(self):
- return self._new_pocket
-
- @property
- def upload_log(self):
- return self._new_upload_log
-
- @property
- def dependencies(self):
- return self._new_dependencies
-
- @property
def current_component(self):
"""See `IPackageBuild`."""
return getUtility(IComponentSet)[default_component_dependency_name]
@@ -158,9 +62,9 @@
if (status == BuildStatus.MANUALDEPWAIT and slave_status is not None
and slave_status.get('dependencies') is not None):
- self._new_dependencies = unicode(slave_status.get('dependencies'))
+ self.dependencies = unicode(slave_status.get('dependencies'))
else:
- self._new_dependencies = None
+ self.dependencies = None
def verifySuccessfulUpload(self):
"""See `IPackageBuild`."""
@@ -196,7 +100,7 @@
"""See `IPackageBuild`."""
filename = "upload_%s_log.txt" % self.id
library_file = self.createUploadLog(content, filename=filename)
- self._new_upload_log = library_file
+ self.upload_log = library_file
def notify(self, extra_info=None):
"""See `IPackageBuild`."""
=== modified file 'lib/lp/buildmaster/tests/test_manager.py'
--- lib/lp/buildmaster/tests/test_manager.py 2013-02-04 07:27:23 +0000
+++ lib/lp/buildmaster/tests/test_manager.py 2013-02-04 07:27:24 +0000
@@ -322,7 +322,7 @@
builder.failure_count = builder_count
naked_job = removeSecurityProxy(builder.currentjob.specific_job)
- naked_job.build._new_failure_count = job_count
+ naked_job.build.failure_count = job_count
# The _scanFailed() calls abort, so make sure our existing
# failure counts are persisted.
self.layer.txn.commit()
=== modified file 'lib/lp/buildmaster/tests/test_packagebuild.py'
--- lib/lp/buildmaster/tests/test_packagebuild.py 2013-02-04 07:27:23 +0000
+++ lib/lp/buildmaster/tests/test_packagebuild.py 2013-02-04 07:27:24 +0000
@@ -8,22 +8,10 @@
import hashlib
from storm.store import Store
-from zope.component import getUtility
from zope.security.management import checkPermission
-from zope.security.proxy import removeSecurityProxy
-from lp.buildmaster.enums import (
- BuildFarmJobType,
- BuildStatus,
- )
-from lp.buildmaster.interfaces.buildfarmjob import IBuildFarmJobSource
-from lp.buildmaster.interfaces.packagebuild import (
- IPackageBuild,
- IPackageBuildSource,
- )
-from lp.buildmaster.model.buildfarmjob import BuildFarmJob
-from lp.buildmaster.model.packagebuild import PackageBuild
-from lp.registry.interfaces.pocket import PackagePublishingPocket
+from lp.buildmaster.enums import BuildStatus
+from lp.buildmaster.interfaces.packagebuild import IPackageBuild
from lp.testing import (
login,
login_person,
@@ -32,65 +20,6 @@
from lp.testing.layers import LaunchpadFunctionalLayer
-class TestPackageBuildBase(TestCaseWithFactory):
- """Provide a factory method for creating PackageBuilds.
-
- This is not included in the launchpad test factory because
- only classes deriving from PackageBuild should be used.
- """
-
- def makePackageBuild(
- self, archive=None, job_type=BuildFarmJobType.PACKAGEBUILD,
- status=BuildStatus.NEEDSBUILD,
- pocket=PackagePublishingPocket.RELEASE):
- if archive is None:
- archive = self.factory.makeArchive()
-
- bfj = getUtility(IBuildFarmJobSource).new(job_type, status=status)
- return getUtility(IPackageBuildSource).new(bfj, archive, pocket)
-
-
-class TestPackageBuild(TestPackageBuildBase):
- """Tests for the package build object."""
-
- layer = LaunchpadFunctionalLayer
-
- def setUp(self):
- """Create a package build with which to test."""
- super(TestPackageBuild, self).setUp()
- joe = self.factory.makePerson(name="joe")
- joes_ppa = self.factory.makeArchive(owner=joe, name="ppa")
- self.package_build = self.makePackageBuild(archive=joes_ppa)
-
- def test_saves_record(self):
- # A package build can be stored in the database.
- store = Store.of(self.package_build)
- store.flush()
- retrieved_build = store.find(
- PackageBuild,
- PackageBuild.id == self.package_build.id).one()
- self.assertEqual(self.package_build, retrieved_build)
-
- def test_default_values(self):
- # PackageBuild has a number of default values.
- pb = removeSecurityProxy(self.package_build)
- self.failUnlessEqual(None, pb.distribution)
- self.failUnlessEqual(None, pb.distro_series)
-
- def test_destroySelf_removes_BuildFarmJob(self):
- # Destroying a packagebuild also destroys the BuildFarmJob it
- # references.
- naked_build = removeSecurityProxy(self.package_build)
- store = Store.of(self.package_build)
- # Ensure build_farm_job_id is set.
- store.flush()
- build_farm_job_id = naked_build.build_farm_job_id
- naked_build.destroySelf()
- result = store.find(
- BuildFarmJob, BuildFarmJob.id == build_farm_job_id)
- self.assertIs(None, result.one())
-
-
class TestPackageBuildMixin(TestCaseWithFactory):
"""Test methods provided by PackageBuildMixin."""
=== modified file 'lib/lp/code/model/sourcepackagerecipe.py'
--- lib/lp/code/model/sourcepackagerecipe.py 2013-02-04 07:27:23 +0000
+++ lib/lp/code/model/sourcepackagerecipe.py 2013-02-04 07:27:24 +0000
@@ -221,15 +221,15 @@
SourcePackageRecipeBuild,
And(SourcePackageRecipeBuild.recipe_id ==
SourcePackageRecipe.id,
- SourcePackageRecipeBuild._new_archive_id ==
+ SourcePackageRecipeBuild.archive_id ==
SourcePackageRecipe.daily_build_archive_id,
- SourcePackageRecipeBuild._new_date_created > one_day_ago)),
+ SourcePackageRecipeBuild.date_created > one_day_ago)),
)
return IStore(SourcePackageRecipe).using(*joins).find(
SourcePackageRecipe,
SourcePackageRecipe.is_stale == True,
SourcePackageRecipe.build_daily == True,
- SourcePackageRecipeBuild._new_date_created == None,
+ SourcePackageRecipeBuild.date_created == None,
).config(distinct=True)
@staticmethod
@@ -286,8 +286,8 @@
pending = IStore(self).find(SourcePackageRecipeBuild,
SourcePackageRecipeBuild.recipe_id == self.id,
SourcePackageRecipeBuild.distroseries_id == distroseries.id,
- SourcePackageRecipeBuild._new_archive_id == archive.id,
- SourcePackageRecipeBuild._new_status == BuildStatus.NEEDSBUILD)
+ SourcePackageRecipeBuild.archive_id == archive.id,
+ SourcePackageRecipeBuild.status == BuildStatus.NEEDSBUILD)
if pending.any() is not None:
raise BuildAlreadyPending(self, distroseries)
@@ -321,9 +321,9 @@
"""See `ISourcePackageRecipe`."""
order_by = (
Desc(Greatest(
- SourcePackageRecipeBuild._new_date_started,
- SourcePackageRecipeBuild._new_date_finished)),
- Desc(SourcePackageRecipeBuild._new_date_created),
+ SourcePackageRecipeBuild.date_started,
+ SourcePackageRecipeBuild.date_finished)),
+ Desc(SourcePackageRecipeBuild.date_created),
Desc(SourcePackageRecipeBuild.id))
return self._getBuilds(None, order_by)
@@ -331,11 +331,11 @@
def completed_builds(self):
"""See `ISourcePackageRecipe`."""
filter_term = (
- SourcePackageRecipeBuild._new_status != BuildStatus.NEEDSBUILD)
+ SourcePackageRecipeBuild.status != BuildStatus.NEEDSBUILD)
order_by = (
Desc(Greatest(
- SourcePackageRecipeBuild._new_date_started,
- SourcePackageRecipeBuild._new_date_finished)),
+ SourcePackageRecipeBuild.date_started,
+ SourcePackageRecipeBuild.date_finished)),
Desc(SourcePackageRecipeBuild.id))
return self._getBuilds(filter_term, order_by)
@@ -343,7 +343,7 @@
def pending_builds(self):
"""See `ISourcePackageRecipe`."""
filter_term = (
- SourcePackageRecipeBuild._new_status == BuildStatus.NEEDSBUILD)
+ SourcePackageRecipeBuild.status == BuildStatus.NEEDSBUILD)
# We want to order by date_created but this is the same as ordering
# by id (since id increases monotonically) and is less expensive.
order_by = Desc(SourcePackageRecipeBuild.id)
@@ -353,7 +353,7 @@
"""The actual query to get the builds."""
query_args = [
SourcePackageRecipeBuild.recipe == self,
- SourcePackageRecipeBuild._new_archive_id == Archive.id,
+ SourcePackageRecipeBuild.archive_id == Archive.id,
Archive._enabled == True,
]
if filter_term is not None:
@@ -377,7 +377,7 @@
def last_build(self):
"""See `ISourcePackageRecipeBuild`."""
return self._getBuilds(
- True, Desc(SourcePackageRecipeBuild._new_date_finished)).first()
+ True, Desc(SourcePackageRecipeBuild.date_finished)).first()
def getMedianBuildDuration(self):
"""Return the median duration of builds of this recipe."""
@@ -385,7 +385,7 @@
result = store.find(
SourcePackageRecipeBuild,
SourcePackageRecipeBuild.recipe == self.id,
- SourcePackageRecipeBuild._new_date_finished != None)
+ SourcePackageRecipeBuild.date_finished != None)
durations = [
build.date_finished - build.date_started for build in result]
if len(durations) == 0:
=== modified file 'lib/lp/code/model/sourcepackagerecipebuild.py'
--- lib/lp/code/model/sourcepackagerecipebuild.py 2013-02-04 07:27:23 +0000
+++ lib/lp/code/model/sourcepackagerecipebuild.py 2013-02-04 07:27:24 +0000
@@ -90,6 +90,9 @@
id = Int(primary=True)
+ build_farm_job_id = Int(name='build_farm_job', allow_none=False)
+ build_farm_job = Reference(build_farm_job_id, BuildFarmJob.id)
+
@property
def binary_builds(self):
"""See `ISourcePackageRecipeBuild`."""
@@ -107,10 +110,16 @@
assert component is not None
return component
+ archive_id = Int(name='archive', allow_none=False)
+ archive = Reference(archive_id, 'Archive.id')
+
distroseries_id = Int(name='distroseries', allow_none=True)
distroseries = Reference(distroseries_id, 'DistroSeries.id')
distro_series = distroseries
+ pocket = DBEnum(
+ name='pocket', enum=PackagePublishingPocket, allow_none=False)
+
@property
def distribution(self):
"""See `IPackageBuild`."""
@@ -121,6 +130,35 @@
recipe_id = Int(name='recipe')
recipe = Reference(recipe_id, 'SourcePackageRecipe.id')
+ requester_id = Int(name='requester', allow_none=False)
+ requester = Reference(requester_id, 'Person.id')
+
+ upload_log_id = Int(name='upload_log')
+ upload_log = Reference(upload_log_id, 'LibraryFileAlias.id')
+
+ dependencies = Unicode(name='dependencies')
+
+ processor_id = Int(name='processor')
+ processor = Reference(processor_id, 'Processor.id')
+ virtualized = Bool(name='virtualized')
+
+ date_created = DateTime(
+ name='date_created', tzinfo=pytz.UTC, allow_none=False)
+ date_started = DateTime(name='date_started', tzinfo=pytz.UTC)
+ date_finished = DateTime(name='date_finished', tzinfo=pytz.UTC)
+ date_first_dispatched = DateTime(
+ name='date_first_dispatched', tzinfo=pytz.UTC)
+
+ builder_id = Int(name='builder')
+ builder = Reference(builder_id, 'Builder.id')
+
+ status = DBEnum(name='status', enum=BuildStatus, allow_none=False)
+
+ log_id = Int(name='log')
+ log = Reference(log_id, 'LibraryFileAlias.id')
+
+ failure_count = Int(name='failure_count', allow_none=False)
+
manifest = Reference(
id, 'SourcePackageRecipeData.sourcepackage_recipe_build_id',
on_remote=True)
@@ -140,48 +178,6 @@
return None
return str(self.manifest.getRecipe())
- requester_id = Int(name='requester', allow_none=False)
- requester = Reference(requester_id, 'Person.id')
-
- # Migrating from PackageBuild
- build_farm_job_id = Int(name='build_farm_job')
- build_farm_job = Reference(build_farm_job_id, BuildFarmJob.id)
-
- _new_archive_id = Int(name='archive')
- _new_archive = Reference(_new_archive_id, 'Archive.id')
-
- _new_pocket = DBEnum(name='pocket', enum=PackagePublishingPocket)
-
- _new_upload_log_id = Int(name='upload_log')
- _new_upload_log = Reference(_new_upload_log_id, 'LibraryFileAlias.id')
-
- _new_dependencies = Unicode(name='dependencies')
-
- # Migrating from BuildFarmJob.
- _new_processor_id = Int(name='processor')
- _new_processor = Reference(_new_processor_id, 'Processor.id')
-
- _new_virtualized = Bool(name='virtualized')
-
- _new_date_created = DateTime(name='date_created', tzinfo=pytz.UTC)
-
- _new_date_started = DateTime(name='date_started', tzinfo=pytz.UTC)
-
- _new_date_finished = DateTime(name='date_finished', tzinfo=pytz.UTC)
-
- _new_date_first_dispatched = DateTime(
- name='date_first_dispatched', tzinfo=pytz.UTC)
-
- _new_builder_id = Int(name='builder')
- _new_builder = Reference(_new_builder_id, 'Builder.id')
-
- _new_status = DBEnum(name='status', enum=BuildStatus)
-
- _new_log_id = Int(name='log')
- _new_log = Reference(_new_log_id, 'LibraryFileAlias.id')
-
- _new_failure_count = Int(name='failure_count')
-
@property
def buildqueue_record(self):
"""See `IBuildFarmJob`."""
@@ -214,12 +210,12 @@
self.distroseries = distroseries
self.recipe = recipe
self.requester = requester
- self._new_archive = archive
- self._new_pocket = pocket
- self._new_status = BuildStatus.NEEDSBUILD
- self._new_virtualized = True
+ self.archive = archive
+ self.pocket = pocket
+ self.status = BuildStatus.NEEDSBUILD
+ self.virtualized = True
if date_created is not None:
- self._new_date_created = date_created
+ self.date_created = date_created
@classmethod
def new(cls, distroseries, recipe, requester, archive, pocket=None,
@@ -325,8 +321,8 @@
# Circular imports.
from lp.code.model.sourcepackagerecipe import SourcePackageRecipe
from lp.services.librarian.model import LibraryFileAlias
- load_related(LibraryFileAlias, builds, ['_new_log_id'])
- archives = load_related(Archive, builds, ['_new_archive_id'])
+ load_related(LibraryFileAlias, builds, ['log_id'])
+ archives = load_related(Archive, builds, ['archive_id'])
load_related(Person, archives, ['ownerID'])
sprs = load_related(SourcePackageRecipe, builds, ['recipe_id'])
SourcePackageRecipe.preLoadDataForSourcePackageRecipes(sprs)
@@ -349,7 +345,7 @@
old_threshold = _now - timedelta(days=1)
return store.find(cls, cls.distroseries_id == distroseries.id,
cls.requester_id == requester.id, cls.recipe_id == recipe.id,
- cls._new_date_created > old_threshold)
+ cls.date_created > old_threshold)
def makeJob(self):
"""See `ISourcePackageRecipeBuildJob`."""
=== modified file 'lib/lp/soyuz/doc/sourcepackagerelease.txt'
--- lib/lp/soyuz/doc/sourcepackagerelease.txt 2013-02-04 07:27:23 +0000
+++ lib/lp/soyuz/doc/sourcepackagerelease.txt 2013-02-04 07:27:24 +0000
@@ -67,7 +67,7 @@
>>> ff_ppa_build = Store.of(cprov_ppa).find(
... BinaryPackageBuild,
... BinaryPackageBuild.source_package_release == spr,
- ... BinaryPackageBuild._new_archive == cprov_ppa)
+ ... BinaryPackageBuild.archive == cprov_ppa)
>>> ff_ppa_build.count()
1
>>> ff_ppa_build[0].archive.purpose.name
=== modified file 'lib/lp/soyuz/model/archive.py'
--- lib/lp/soyuz/model/archive.py 2013-02-04 07:27:23 +0000
+++ lib/lp/soyuz/model/archive.py 2013-02-04 07:27:24 +0000
@@ -1118,17 +1118,13 @@
extra_exprs = []
if not include_needsbuild:
extra_exprs.append(
- BinaryPackageBuild._new_status != BuildStatus.NEEDSBUILD)
+ BinaryPackageBuild.status != BuildStatus.NEEDSBUILD)
- find_spec = (
- BinaryPackageBuild._new_status,
- Count(BinaryPackageBuild.id),
- )
result = store.find(
- find_spec,
- BinaryPackageBuild._new_archive == self,
- *extra_exprs).group_by(BinaryPackageBuild._new_status).order_by(
- BinaryPackageBuild._new_status)
+ (BinaryPackageBuild.status, Count(BinaryPackageBuild.id)),
+ BinaryPackageBuild.archive == self,
+ *extra_exprs).group_by(BinaryPackageBuild.status).order_by(
+ BinaryPackageBuild.status)
# Create a map for each count summary to a number of buildstates:
count_map = {
@@ -1898,12 +1894,12 @@
sprs_building = store.find(
BinaryPackageBuild.source_package_release_id,
- BinaryPackageBuild._new_archive == self,
- BinaryPackageBuild._new_status == BuildStatus.BUILDING)
+ BinaryPackageBuild.archive == self,
+ BinaryPackageBuild.status == BuildStatus.BUILDING)
sprs_waiting = store.find(
BinaryPackageBuild.source_package_release_id,
- BinaryPackageBuild._new_archive == self,
- BinaryPackageBuild._new_status == BuildStatus.NEEDSBUILD)
+ BinaryPackageBuild.archive == self,
+ BinaryPackageBuild.status == BuildStatus.NEEDSBUILD)
# A package is not counted as waiting if it already has at least
# one build building.
@@ -1918,13 +1914,13 @@
extra_exprs = []
if build_status is not None:
- extra_exprs = [BinaryPackageBuild._new_status == build_status]
+ extra_exprs = [BinaryPackageBuild.status == build_status]
result_set = store.find(
SourcePackageRelease,
(BinaryPackageBuild.source_package_release_id ==
SourcePackageRelease.id),
- BinaryPackageBuild._new_archive == self,
+ BinaryPackageBuild.archive == self,
*extra_exprs)
result_set.config(distinct=True).order_by(SourcePackageRelease.id)
@@ -2399,12 +2395,12 @@
def getBuildCountersForArchitecture(self, archive, distroarchseries):
"""See `IArchiveSet`."""
result = IStore(BinaryPackageBuild).find(
- (BinaryPackageBuild._new_status, Count(BinaryPackageBuild.id)),
- BinaryPackageBuild._new_archive == archive,
+ (BinaryPackageBuild.status, Count(BinaryPackageBuild.id)),
+ BinaryPackageBuild.archive == archive,
BinaryPackageBuild.distro_arch_series == distroarchseries,
).group_by(
- BinaryPackageBuild._new_status
- ).order_by(BinaryPackageBuild._new_status)
+ BinaryPackageBuild.status
+ ).order_by(BinaryPackageBuild.status)
status_map = {
'failed': (
=== modified file 'lib/lp/soyuz/model/binarypackagebuild.py'
--- lib/lp/soyuz/model/binarypackagebuild.py 2013-02-04 07:27:23 +0000
+++ lib/lp/soyuz/model/binarypackagebuild.py 2013-02-04 07:27:24 +0000
@@ -103,6 +103,9 @@
build_farm_job_type = BuildFarmJobType.PACKAGEBUILD
job_type = build_farm_job_type
+ build_farm_job_id = Int(name='build_farm_job')
+ build_farm_job = Reference(build_farm_job_id, BuildFarmJob.id)
+
distro_arch_series_id = Int(name='distro_arch_series', allow_none=False)
distro_arch_series = Reference(
distro_arch_series_id, 'DistroArchSeries.id')
@@ -111,56 +114,49 @@
source_package_release = Reference(
source_package_release_id, 'SourcePackageRelease.id')
- # Migrating from PackageBuild
- build_farm_job_id = Int(name='build_farm_job')
- build_farm_job = Reference(build_farm_job_id, BuildFarmJob.id)
-
- _new_archive_id = Int(name='archive')
- _new_archive = Reference(_new_archive_id, 'Archive.id')
-
- _new_pocket = DBEnum(name='pocket', enum=PackagePublishingPocket)
-
- _new_upload_log_id = Int(name='upload_log')
- _new_upload_log = Reference(_new_upload_log_id, 'LibraryFileAlias.id')
-
- _new_dependencies = Unicode(name='dependencies')
-
- # Migrating from BuildFarmJob.
- _new_processor_id = Int(name='processor')
- _new_processor = Reference(_new_processor_id, 'Processor.id')
-
- _new_virtualized = Bool(name='virtualized')
-
- _new_date_created = DateTime(name='date_created', tzinfo=pytz.UTC)
-
- _new_date_started = DateTime(name='date_started', tzinfo=pytz.UTC)
-
- _new_date_finished = DateTime(name='date_finished', tzinfo=pytz.UTC)
-
- _new_date_first_dispatched = DateTime(
+ archive_id = Int(name='archive', allow_none=False)
+ archive = Reference(archive_id, 'Archive.id')
+
+ pocket = DBEnum(
+ name='pocket', enum=PackagePublishingPocket, allow_none=False)
+
+ upload_log_id = Int(name='upload_log')
+ upload_log = Reference(upload_log_id, 'LibraryFileAlias.id')
+
+ dependencies = Unicode(name='dependencies')
+
+ processor_id = Int(name='processor')
+ processor = Reference(processor_id, 'Processor.id')
+ virtualized = Bool(name='virtualized')
+
+ date_created = DateTime(
+ name='date_created', tzinfo=pytz.UTC, allow_none=False)
+ date_started = DateTime(name='date_started', tzinfo=pytz.UTC)
+ date_finished = DateTime(name='date_finished', tzinfo=pytz.UTC)
+ date_first_dispatched = DateTime(
name='date_first_dispatched', tzinfo=pytz.UTC)
- _new_builder_id = Int(name='builder')
- _new_builder = Reference(_new_builder_id, 'Builder.id')
-
- _new_status = DBEnum(name='status', enum=BuildStatus)
-
- _new_log_id = Int(name='log')
- _new_log = Reference(_new_log_id, 'LibraryFileAlias.id')
-
- _new_failure_count = Int(name='failure_count')
-
- _new_distribution_id = Int(name='distribution')
- _new_distribution = Reference(_new_distribution_id, 'Distribution.id')
-
- _new_distro_series_id = Int(name='distro_series')
- _new_distro_series = Reference(_new_distro_series_id, 'DistroSeries.id')
-
- _new_is_distro_archive = Bool(name='is_distro_archive')
-
- _new_source_package_name_id = Int(name='source_package_name')
- _new_source_package_name = Reference(
- _new_source_package_name_id, 'SourcePackageName.id')
+ builder_id = Int(name='builder')
+ builder = Reference(builder_id, 'Builder.id')
+
+ status = DBEnum(name='status', enum=BuildStatus, allow_none=False)
+
+ log_id = Int(name='log')
+ log = Reference(log_id, 'LibraryFileAlias.id')
+
+ failure_count = Int(name='failure_count', allow_none=False)
+
+ distribution_id = Int(name='distribution', allow_none=False)
+ distribution = Reference(distribution_id, 'Distribution.id')
+
+ distro_series_id = Int(name='distro_series', allow_none=False)
+ distro_series = Reference(distro_series_id, 'DistroSeries.id')
+
+ is_distro_archive = Bool(name='is_distro_archive', allow_none=False)
+
+ source_package_name_id = Int(name='source_package_name', allow_none=False)
+ source_package_name = Reference(
+ source_package_name_id, 'SourcePackageName.id')
@property
def buildqueue_record(self):
@@ -249,16 +245,6 @@
return DecoratedResultSet(results, operator.itemgetter(0)).one()
@property
- def distro_series(self):
- """See `IBuild`"""
- return self.distro_arch_series.distroseries
-
- @property
- def distribution(self):
- """See `IBuild`"""
- return self.distro_series.distribution
-
- @property
def is_virtualized(self):
"""See `IBuild`"""
return self.archive.require_virtualized
@@ -409,18 +395,22 @@
def retry(self):
"""See `IBuild`."""
assert self.can_be_retried, "Build %s cannot be retried" % self.id
+<<<<<<< TREE
self.build_farm_job.status = self._new_status = BuildStatus.NEEDSBUILD
self.build_farm_job.date_finished = self._new_date_finished = None
- self._new_date_started = None
+ self.build_farm_job.date_started = self._new_date_started = None
self.build_farm_job.builder = self._new_builder = None
-<<<<<<< TREE
self.build_farm_job.log = self._new_log = None
self.package_build.upload_log = self._new_upload_log = None
self.package_build.dependencies = self._new_dependencies = None
=======
- self._new_log = None
- self._new_upload_log = None
- self._new_dependencies = None
+ self.build_farm_job.status = self.status = BuildStatus.NEEDSBUILD
+ self.build_farm_job.date_finished = self.date_finished = None
+ self.date_started = None
+ self.build_farm_job.builder = self.builder = None
+ self.log = None
+ self.upload_log = None
+ self.dependencies = None
>>>>>>> MERGE-SOURCE
self.queueBuild()
@@ -571,7 +561,7 @@
if not self._isDependencySatisfied(token)]
# Update dependencies line
- self._new_dependencies = u", ".join(remaining_deps)
+ self.dependencies = u", ".join(remaining_deps)
def __getitem__(self, name):
return self.getBinaryPackageRelease(name)
@@ -875,15 +865,14 @@
build_farm_job=build_farm_job,
distro_arch_series=distro_arch_series,
source_package_release=source_package_release,
- _new_archive=archive, _new_pocket=pocket,
- _new_status=status, _new_processor=processor,
- _new_virtualized=archive.require_virtualized,
- _new_builder=builder, _new_is_distro_archive=archive.is_main,
- _new_distribution=distro_arch_series.distroseries.distribution,
- _new_distro_series=distro_arch_series.distroseries,
- _new_source_package_name=source_package_release.sourcepackagename)
+ archive=archive, pocket=pocket, status=status, processor=processor,
+ virtualized=archive.require_virtualized, builder=builder,
+ is_distro_archive=archive.is_main,
+ distribution=distro_arch_series.distroseries.distribution,
+ distro_series=distro_arch_series.distroseries,
+ source_package_name=source_package_release.sourcepackagename)
if date_created is not None:
- binary_package_build._new_date_created = date_created
+ binary_package_build.date_created = date_created
return binary_package_build
def getBuildBySRAndArchtag(self, sourcepackagereleaseID, archtag):
@@ -919,7 +908,7 @@
self._prefetchBuildData(builds)
distro_arch_series = load_related(
DistroArchSeries, builds, ['distro_arch_series_id'])
- archives = load_related(Archive, builds, ['_new_archive_id'])
+ archives = load_related(Archive, builds, ['archive_id'])
load_related(Person, archives, ['ownerID'])
distroseries = load_related(
DistroSeries, distro_arch_series, ['distroseriesID'])
@@ -966,13 +955,13 @@
# Add query clause that filters on build state if the latter is
# provided.
if status is not None:
- clauses.append(BinaryPackageBuild._new_status == status)
+ clauses.append(BinaryPackageBuild.status == status)
# Add query clause that filters on pocket if the latter is provided.
if pocket:
if not isinstance(pocket, (list, tuple)):
pocket = (pocket,)
- clauses.append(BinaryPackageBuild._new_pocket.is_in(pocket))
+ clauses.append(BinaryPackageBuild.pocket.is_in(pocket))
# Add query clause that filters on architecture tag if provided.
if arch_tag is not None:
@@ -1005,8 +994,8 @@
Archive, get_archive_privacy_filter)
clauses = [
- BinaryPackageBuild._new_archive_id == Archive.id,
- BinaryPackageBuild._new_builder_id == builder_id,
+ BinaryPackageBuild.archive_id == Archive.id,
+ BinaryPackageBuild.builder_id == builder_id,
get_archive_privacy_filter(user)]
origin = [Archive]
@@ -1015,13 +1004,13 @@
return IStore(BinaryPackageBuild).using(*origin).find(
BinaryPackageBuild, *clauses).order_by(
- Desc(BinaryPackageBuild._new_date_finished),
+ Desc(BinaryPackageBuild.date_finished),
BinaryPackageBuild.id)
def getBuildsForArchive(self, archive, status=None, name=None,
pocket=None, arch_tag=None):
"""See `IBinaryPackageBuildSet`."""
- clauses = [BinaryPackageBuild._new_archive_id == archive.id]
+ clauses = [BinaryPackageBuild.archive_id == archive.id]
origin = []
self.handleOptionalParamsForBuildQueries(
@@ -1032,9 +1021,9 @@
# * FULLYBUILT & FAILURES by -datebuilt
# It should present the builds in a more natural order.
if status == BuildStatus.SUPERSEDED or status is None:
- orderBy = [Desc(BinaryPackageBuild._new_date_created)]
+ orderBy = [Desc(BinaryPackageBuild.date_created)]
else:
- orderBy = [Desc(BinaryPackageBuild._new_date_finished)]
+ orderBy = [Desc(BinaryPackageBuild.date_finished)]
# All orders fallback to id if the primary order doesn't succeed
orderBy.append(BinaryPackageBuild.id)
@@ -1095,7 +1084,7 @@
elif status == BuildStatus.SUPERSEDED or status is None:
order_by = [Desc(BinaryPackageBuild.id)]
else:
- order_by = [Desc(BinaryPackageBuild._new_date_finished),
+ order_by = [Desc(BinaryPackageBuild.date_finished),
BinaryPackageBuild.id]
# End of duplication (see XXX cprov 2006-09-25 above).
@@ -1154,7 +1143,7 @@
BinaryPackageBuild,
SQL(query))
resultset.order_by(
- Desc(BinaryPackageBuild._new_date_created), BinaryPackageBuild.id)
+ Desc(BinaryPackageBuild.date_created), BinaryPackageBuild.id)
return resultset
def getStatusSummaryForBuilds(self, builds):
@@ -1237,11 +1226,10 @@
SourcePackageName.id
== SourcePackageRelease.sourcepackagenameID),
LeftJoin(LibraryFileAlias,
- LibraryFileAlias.id == BinaryPackageBuild._new_log_id),
+ LibraryFileAlias.id == BinaryPackageBuild.log_id),
LeftJoin(LibraryFileContent,
LibraryFileContent.id == LibraryFileAlias.contentID),
- LeftJoin(
- Builder, Builder.id == BinaryPackageBuild._new_builder_id),
+ LeftJoin(Builder, Builder.id == BinaryPackageBuild.builder_id),
)
result_set = store.using(*origin).find(
(SourcePackageRelease, LibraryFileAlias, SourcePackageName,
=== modified file 'lib/lp/soyuz/model/distributionsourcepackagerelease.py'
--- lib/lp/soyuz/model/distributionsourcepackagerelease.py 2013-02-04 07:27:23 +0000
+++ lib/lp/soyuz/model/distributionsourcepackagerelease.py 2013-02-04 07:27:24 +0000
@@ -109,8 +109,7 @@
# include new and failed builds.)
builds_built_in_main_archives = Store.of(self.distribution).find(
BinaryPackageBuild,
- builds_for_distro_exprs,
- BinaryPackageBuild._new_archive == Archive.id,
+ builds_for_distro_exprs, BinaryPackageBuild.archive == Archive.id,
Archive.purpose.is_in(MAIN_ARCHIVE_PURPOSES))
# Next get all the builds that have a binary published in the
=== modified file 'lib/lp/soyuz/model/publishing.py'
--- lib/lp/soyuz/model/publishing.py 2013-02-04 07:27:23 +0000
+++ lib/lp/soyuz/model/publishing.py 2013-02-04 07:27:24 +0000
@@ -43,8 +43,6 @@
from lp.app.errors import NotFoundError
from lp.buildmaster.enums import BuildStatus
-from lp.buildmaster.model.buildfarmjob import BuildFarmJob
-from lp.buildmaster.model.packagebuild import PackageBuild
from lp.registry.interfaces.person import validate_public_person
from lp.registry.interfaces.pocket import PackagePublishingPocket
from lp.services.database import bulk
@@ -1579,8 +1577,7 @@
# If an optional list of build states was passed in as a parameter,
# ensure that the result is limited to builds in those states.
if build_states is not None:
- extra_exprs.append(
- BinaryPackageBuild._new_status.is_in(build_states))
+ extra_exprs.append(BinaryPackageBuild.status.is_in(build_states))
store = getUtility(IStoreSelector).get(MAIN_STORE, DEFAULT_FLAVOR)
@@ -1600,7 +1597,7 @@
BinaryPackageBuild,
builds_for_distroseries_expr,
(SourcePackagePublishingHistory.archiveID ==
- BinaryPackageBuild._new_archive_id),
+ BinaryPackageBuild.archive_id),
*extra_exprs)
# Next get all the builds that have a binary published in the
@@ -1610,7 +1607,7 @@
BinaryPackageBuild,
builds_for_distroseries_expr,
(SourcePackagePublishingHistory.archiveID !=
- BinaryPackageBuild._new_archive_id),
+ BinaryPackageBuild.archive_id),
BinaryPackagePublishingHistory.archive ==
SourcePackagePublishingHistory.archiveID,
BinaryPackagePublishingHistory.binarypackagerelease ==
@@ -1728,7 +1725,7 @@
self._getSourceBinaryJoinForSources(
source_publication_ids, active_binaries_only=False),
BinaryPackagePublishingHistory.datepublished != None,
- BinaryPackageBuild._new_status.is_in(build_states))
+ BinaryPackageBuild.status.is_in(build_states))
published_builds.order_by(
SourcePackagePublishingHistory.id,
=== modified file 'lib/lp/soyuz/model/queue.py'
--- lib/lp/soyuz/model/queue.py 2013-02-04 07:27:23 +0000
+++ lib/lp/soyuz/model/queue.py 2013-02-04 07:27:24 +0000
@@ -48,7 +48,6 @@
from lp.archivepublisher.customupload import CustomUploadError
from lp.archivepublisher.debversion import Version
from lp.archiveuploader.tagfiles import parse_tagfile_content
-from lp.buildmaster.model.packagebuild import PackageBuild
from lp.registry.interfaces.pocket import PackagePublishingPocket
from lp.registry.model.sourcepackagename import SourcePackageName
from lp.services.auditor.client import AuditorClient
=== modified file 'lib/lp/soyuz/scripts/retrydepwait.py'
--- lib/lp/soyuz/scripts/retrydepwait.py 2013-02-04 07:27:23 +0000
+++ lib/lp/soyuz/scripts/retrydepwait.py 2013-02-04 07:27:24 +0000
@@ -43,7 +43,7 @@
return self.store.find(
BinaryPackageBuild,
BinaryPackageBuild.id >= self.start_at,
- BinaryPackageBuild._new_status == BuildStatus.MANUALDEPWAIT,
+ BinaryPackageBuild.status == BuildStatus.MANUALDEPWAIT,
).order_by(BinaryPackageBuild.id)
def isDone(self):
=== modified file 'lib/lp/soyuz/tests/test_binarypackagebuild.py'
--- lib/lp/soyuz/tests/test_binarypackagebuild.py 2013-02-01 02:00:24 +0000
+++ lib/lp/soyuz/tests/test_binarypackagebuild.py 2013-02-04 07:27:24 +0000
@@ -10,7 +10,6 @@
import pytz
from storm.store import Store
-from testtools.matchers import MatchesStructure
from zope.component import getUtility
from zope.security.proxy import removeSecurityProxy
@@ -63,25 +62,6 @@
self.assertProvides(self.build, IPackageBuild)
self.assertProvides(self.build, IBinaryPackageBuild)
- def test_denormed_attributes(self):
- primary_build = self.factory.makeBinaryPackageBuild(
- archive=self.factory.makeArchive(purpose=ArchivePurpose.PRIMARY))
- partner_build = self.factory.makeBinaryPackageBuild(
- archive=self.factory.makeArchive(purpose=ArchivePurpose.PARTNER))
- ppa_build = self.factory.makeBinaryPackageBuild(
- archive=self.factory.makeArchive(purpose=ArchivePurpose.PPA))
- scenarios = [
- (primary_build, True), (partner_build, True), (ppa_build, False)]
- for build, is_distro_archive in scenarios:
- self.assertThat(
- removeSecurityProxy(build),
- MatchesStructure.byEquality(
- _new_is_distro_archive=is_distro_archive,
- _new_distro_series=build.distro_arch_series.distroseries,
- _new_distribution=build.distro_series.distribution,
- _new_source_package_name=
- build.source_package_release.sourcepackagename))
-
def test_queueBuild(self):
# BinaryPackageBuild can create the queue entry for itself.
bq = self.build.queueBuild()
=== modified file 'lib/lp/translations/model/translationtemplatesbuild.py'
--- lib/lp/translations/model/translationtemplatesbuild.py 2013-02-04 07:27:23 +0000
+++ lib/lp/translations/model/translationtemplatesbuild.py 2013-02-04 07:27:24 +0000
@@ -65,30 +65,26 @@
branch_id = Int(name='branch', allow_none=False)
branch = Reference(branch_id, 'Branch.id')
- # Migrating from BuildFarmJob.
- _new_processor_id = Int(name='processor')
- _new_processor = Reference(_new_processor_id, 'Processor.id')
-
- _new_virtualized = Bool(name='virtualized')
-
- _new_date_created = DateTime(name='date_created', tzinfo=pytz.UTC)
-
- _new_date_started = DateTime(name='date_started', tzinfo=pytz.UTC)
-
- _new_date_finished = DateTime(name='date_finished', tzinfo=pytz.UTC)
-
- _new_date_first_dispatched = DateTime(
+ processor_id = Int(name='processor')
+ processor = Reference(processor_id, 'Processor.id')
+ virtualized = Bool(name='virtualized')
+
+ date_created = DateTime(
+ name='date_created', tzinfo=pytz.UTC, allow_none=False)
+ date_started = DateTime(name='date_started', tzinfo=pytz.UTC)
+ date_finished = DateTime(name='date_finished', tzinfo=pytz.UTC)
+ date_first_dispatched = DateTime(
name='date_first_dispatched', tzinfo=pytz.UTC)
- _new_builder_id = Int(name='builder')
- _new_builder = Reference(_new_builder_id, 'Builder.id')
-
- _new_status = DBEnum(name='status', enum=BuildStatus)
-
- _new_log_id = Int(name='log')
- _new_log = Reference(_new_log_id, 'LibraryFileAlias.id')
-
- _new_failure_count = Int(name='failure_count')
+ builder_id = Int(name='builder')
+ builder = Reference(builder_id, 'Builder.id')
+
+ status = DBEnum(name='status', enum=BuildStatus, allow_none=False)
+
+ log_id = Int(name='log')
+ log = Reference(log_id, 'LibraryFileAlias.id')
+
+ failure_count = Int(name='failure_count', allow_none=False)
@property
def title(self):
@@ -99,8 +95,8 @@
super(TranslationTemplatesBuild, self).__init__()
self.build_farm_job = build_farm_job
self.branch = branch
- self._new_status = BuildStatus.NEEDSBUILD
- self._new_processor = processor
+ self.status = BuildStatus.NEEDSBUILD
+ self.processor = processor
def makeJob(self):
"""See `IBuildFarmJobOld`."""
@@ -184,7 +180,7 @@
# Preload branches cached associated product series and
# suite source packages for all the related branches.
GenericBranchCollection.preloadDataForBranches(branches)
- load_related(LibraryFileAlias, builds, ['_new_log_id'])
+ load_related(LibraryFileAlias, builds, ['log_id'])
@classmethod
def findByBranch(cls, branch, store=None):