← Back to team overview

launchpad-reviewers team mailing list archive

[Merge] lp:~wgrant/launchpad/flatten-bfj-2-garbo into lp:launchpad

 

William Grant has proposed merging lp:~wgrant/launchpad/flatten-bfj-2-garbo into lp:launchpad with lp:~wgrant/launchpad/flatten-bfj-1-populate as a prerequisite.

Requested reviews:
  Launchpad code reviewers (launchpad-reviewers)
Related bugs:
  Bug #758258 in Launchpad itself: "buildfarmjob schema is inefficient for reporting"
  https://bugs.launchpad.net/launchpad/+bug/758258

For more details, see:
https://code.launchpad.net/~wgrant/launchpad/flatten-bfj-2-garbo/+merge/145542

The build farm job schema is being reworked to improve performance. Columns from PackageBuild and BuildFarmJob are being merged into tables that previously delegated to them. The PackageBuild table will end up dying entirely, but BuildFarmJob will remain, a shadow of its former self, to answer questions about Archive:+builds and Builder:+history. Additionally, BinaryPackageBuild is growing new distribution, distroseries, sourcepackagename and is_distro_archive columns to make searches even faster.

This is the second app part: a garbo job to backfill the values that I started setting in the prereq.
-- 
https://code.launchpad.net/~wgrant/launchpad/flatten-bfj-2-garbo/+merge/145542
Your team Launchpad code reviewers is requested to review the proposed merge of lp:~wgrant/launchpad/flatten-bfj-2-garbo into lp:launchpad.
=== modified file 'database/schema/security.cfg'
--- database/schema/security.cfg	2013-02-01 02:19:27 +0000
+++ database/schema/security.cfg	2013-02-01 02:19:27 +0000
@@ -2218,6 +2218,7 @@
 public.answercontact                    = SELECT, DELETE
 public.branch                           = SELECT, UPDATE
 public.branchjob                        = SELECT, DELETE
+public.binarypackagebuild               = SELECT, UPDATE
 public.binarypackagename                = SELECT
 public.binarypackagerelease             = SELECT
 public.binarypackagepublishinghistory   = SELECT, UPDATE
@@ -2240,6 +2241,7 @@
 public.bugtaskflat                      = SELECT
 public.bugwatch                         = SELECT, UPDATE
 public.bugwatchactivity                 = SELECT, DELETE
+public.buildfarmjob                     = SELECT, UPDATE
 public.codeimportevent                  = SELECT, DELETE
 public.codeimporteventdata              = SELECT, DELETE
 public.codeimportresult                 = SELECT, DELETE
@@ -2263,6 +2265,7 @@
 public.revisionauthor                   = SELECT, UPDATE
 public.revisioncache                    = SELECT, DELETE
 public.sourcepackagename                = SELECT
+public.sourcepackagerecipebuild         = SELECT, UPDATE
 public.sourcepackagerelease             = SELECT
 public.sourcepackagepublishinghistory   = SELECT, UPDATE
 public.suggestivepotemplate             = INSERT, DELETE
@@ -2270,6 +2273,7 @@
 public.teamparticipation                = SELECT, DELETE
 public.translationmessage               = SELECT, DELETE
 public.translationtemplateitem          = SELECT, DELETE
+public.translationtemplatesbuild        = SELECT, UPDATE
 type=user
 
 [garbo_daily]

=== modified file 'lib/lp/scripts/garbo.py'
--- lib/lp/scripts/garbo.py	2013-01-17 00:25:48 +0000
+++ lib/lp/scripts/garbo.py	2013-02-01 02:19:27 +0000
@@ -57,6 +57,8 @@
     BugWatchScheduler,
     MAX_SAMPLE_SIZE,
     )
+from lp.buildmaster.model.buildfarmjob import BuildFarmJob
+from lp.buildmaster.model.packagebuild import PackageBuild
 from lp.code.interfaces.revision import IRevisionSet
 from lp.code.model.codeimportevent import CodeImportEvent
 from lp.code.model.codeimportresult import CodeImportResult
@@ -64,8 +66,10 @@
     RevisionAuthor,
     RevisionCache,
     )
+from lp.code.model.sourcepackagerecipebuild import SourcePackageRecipeBuild
 from lp.hardwaredb.model.hwdb import HWSubmission
 from lp.registry.model.commercialsubscription import CommercialSubscription
+from lp.registry.model.distroseries import DistroSeries
 from lp.registry.model.person import Person
 from lp.registry.model.product import Product
 from lp.registry.model.teammembership import TeamMembership
@@ -104,6 +108,7 @@
 from lp.services.librarian.model import TimeLimitedToken
 from lp.services.log.logger import PrefixFilter
 from lp.services.looptuner import TunableLoop
+from lp.services.memcache.interfaces import IMemcacheClient
 from lp.services.oauth.model import OAuthNonce
 from lp.services.openid.model.openidconsumer import OpenIDConsumerNonce
 from lp.services.propertycache import cachedproperty
@@ -118,7 +123,10 @@
     )
 from lp.services.session.model import SessionData
 from lp.services.verification.model.logintoken import LoginToken
+from lp.soyuz.interfaces.archive import MAIN_ARCHIVE_PURPOSES
 from lp.soyuz.model.archive import Archive
+from lp.soyuz.model.binarypackagebuild import BinaryPackageBuild
+from lp.soyuz.model.distroarchseries import DistroArchSeries
 from lp.soyuz.model.publishing import SourcePackagePublishingHistory
 from lp.soyuz.model.reporting import LatestPersonSourcePackageReleaseCache
 from lp.soyuz.model.sourcepackagerelease import SourcePackageRelease
@@ -129,6 +137,9 @@
 from lp.translations.model.translationtemplateitem import (
     TranslationTemplateItem,
     )
+from lp.translations.model.translationtemplatesbuild import (
+    TranslationTemplatesBuild,
+    )
 from lp.translations.scripts.scrub_pofiletranslator import (
     ScrubPOFileTranslator,
     )
@@ -1335,6 +1346,219 @@
         transaction.commit()
 
 
+class BinaryPackageBuildFlattener(TunableLoop):
+    """Populates the new denormalised columns on BinaryPackageBuild."""
+
+    maximum_chunk_size = 5000
+
+    def __init__(self, log, abort_time=None):
+        super(BinaryPackageBuildFlattener, self).__init__(log, abort_time)
+
+        self.memcache_key = '%s:bpb-flattener' % config.instance_name
+        watermark = getUtility(IMemcacheClient).get(self.memcache_key)
+        self.start_at = watermark or 0
+        self.store = IMasterStore(BinaryPackageBuild)
+
+    def findIDs(self):
+        return self.store.find(
+            BinaryPackageBuild.id,
+            BinaryPackageBuild.id >= self.start_at,
+            ).order_by(BinaryPackageBuild.id)
+
+    def isDone(self):
+        return (
+            not getFeatureFlag('soyuz.flatten_bfj.garbo.enabled')
+            or self.findIDs().is_empty())
+
+    def __call__(self, chunk_size):
+        """See `ITunableLoop`."""
+        ids = list(self.findIDs()[:chunk_size])
+        updated_columns = {
+            BinaryPackageBuild._new_archive_id: PackageBuild.archive_id,
+            BinaryPackageBuild._new_pocket: PackageBuild.pocket,
+            BinaryPackageBuild._new_processor_id: BuildFarmJob.processor_id,
+            BinaryPackageBuild._new_virtualized: BuildFarmJob.virtualized,
+            BinaryPackageBuild._new_date_created: BuildFarmJob.date_created,
+            BinaryPackageBuild._new_date_started: BuildFarmJob.date_started,
+            BinaryPackageBuild._new_date_finished: BuildFarmJob.date_finished,
+            BinaryPackageBuild._new_date_first_dispatched:
+                BuildFarmJob.date_first_dispatched,
+            BinaryPackageBuild._new_builder_id: BuildFarmJob.builder_id,
+            BinaryPackageBuild._new_status: BuildFarmJob.status,
+            BinaryPackageBuild._new_log_id: BuildFarmJob.log_id,
+            BinaryPackageBuild._new_upload_log_id: PackageBuild.upload_log_id,
+            BinaryPackageBuild._new_dependencies: PackageBuild.dependencies,
+            BinaryPackageBuild._new_failure_count: BuildFarmJob.failure_count,
+            BinaryPackageBuild._new_build_farm_job_id: BuildFarmJob.id,
+            BinaryPackageBuild._new_distribution_id:
+                DistroSeries.distributionID,
+            BinaryPackageBuild._new_distro_series_id: DistroSeries.id,
+            BinaryPackageBuild._new_source_package_name_id:
+                SourcePackageRelease.sourcepackagenameID,
+            BinaryPackageBuild._new_is_distro_archive:
+                Archive.purpose.is_in(MAIN_ARCHIVE_PURPOSES),
+            }
+        condition = And(
+            BinaryPackageBuild.id.is_in(ids),
+            PackageBuild.id == BinaryPackageBuild.package_build_id,
+            BuildFarmJob.id == PackageBuild.build_farm_job_id)
+        extra_condition = And(
+            condition,
+            SourcePackageRelease.id ==
+                BinaryPackageBuild.source_package_release_id,
+            Archive.id == PackageBuild.archive_id,
+            DistroArchSeries.id == BinaryPackageBuild.distro_arch_series_id,
+            DistroSeries.id == DistroArchSeries.distroseriesID)
+        self.store.execute(
+            BulkUpdate(
+                updated_columns, table=BinaryPackageBuild,
+                values=(
+                    PackageBuild, BuildFarmJob, Archive, DistroArchSeries,
+                    DistroSeries, SourcePackageRelease),
+                where=And(condition, extra_condition)))
+        self.store.execute(
+            BulkUpdate(
+                {BuildFarmJob.archive_id: PackageBuild.archive_id},
+                table=BuildFarmJob, values=(PackageBuild, BinaryPackageBuild),
+                where=condition))
+        transaction.commit()
+        self.start_at = ids[-1] + 1
+        getUtility(IMemcacheClient).set(self.memcache_key, self.start_at)
+
+
+class SourcePackageRecipeBuildFlattener(TunableLoop):
+    """Populates the new denormalised columns on SourcePackageRecipeBuild."""
+
+    maximum_chunk_size = 5000
+
+    def __init__(self, log, abort_time=None):
+        super(SourcePackageRecipeBuildFlattener, self).__init__(
+            log, abort_time)
+
+        self.memcache_key = '%s:sprb-flattener' % config.instance_name
+        watermark = getUtility(IMemcacheClient).get(self.memcache_key)
+        self.start_at = watermark or 0
+        self.store = IMasterStore(SourcePackageRecipeBuild)
+
+    def findIDs(self):
+        return self.store.find(
+            SourcePackageRecipeBuild.id,
+            SourcePackageRecipeBuild.id >= self.start_at,
+            ).order_by(SourcePackageRecipeBuild.id)
+
+    def isDone(self):
+        return (
+            not getFeatureFlag('soyuz.flatten_bfj.garbo.enabled')
+            or self.findIDs().is_empty())
+
+    def __call__(self, chunk_size):
+        """See `ITunableLoop`."""
+        ids = list(self.findIDs()[:chunk_size])
+        updated_columns = {
+            SourcePackageRecipeBuild._new_archive_id: PackageBuild.archive_id,
+            SourcePackageRecipeBuild._new_pocket: PackageBuild.pocket,
+            SourcePackageRecipeBuild._new_processor_id:
+                BuildFarmJob.processor_id,
+            SourcePackageRecipeBuild._new_virtualized:
+                BuildFarmJob.virtualized,
+            SourcePackageRecipeBuild._new_date_created:
+                BuildFarmJob.date_created,
+            SourcePackageRecipeBuild._new_date_started:
+                BuildFarmJob.date_started,
+            SourcePackageRecipeBuild._new_date_finished:
+                BuildFarmJob.date_finished,
+            SourcePackageRecipeBuild._new_date_first_dispatched:
+                BuildFarmJob.date_first_dispatched,
+            SourcePackageRecipeBuild._new_builder_id: BuildFarmJob.builder_id,
+            SourcePackageRecipeBuild._new_status: BuildFarmJob.status,
+            SourcePackageRecipeBuild._new_log_id: BuildFarmJob.log_id,
+            SourcePackageRecipeBuild._new_upload_log_id:
+                PackageBuild.upload_log_id,
+            SourcePackageRecipeBuild._new_dependencies:
+                PackageBuild.dependencies,
+            SourcePackageRecipeBuild._new_failure_count:
+                BuildFarmJob.failure_count,
+            SourcePackageRecipeBuild._new_build_farm_job_id: BuildFarmJob.id,
+            }
+        condition = And(
+            SourcePackageRecipeBuild.id.is_in(ids),
+            PackageBuild.id == SourcePackageRecipeBuild.package_build_id,
+            BuildFarmJob.id == PackageBuild.build_farm_job_id)
+        self.store.execute(
+            BulkUpdate(
+                updated_columns, table=SourcePackageRecipeBuild,
+                values=(PackageBuild, BuildFarmJob), where=condition))
+        self.store.execute(
+            BulkUpdate(
+                {BuildFarmJob.archive_id: PackageBuild.archive_id},
+                table=BuildFarmJob,
+                values=(PackageBuild, SourcePackageRecipeBuild),
+                where=condition))
+        transaction.commit()
+        self.start_at = ids[-1] + 1
+        getUtility(IMemcacheClient).set(self.memcache_key, self.start_at)
+
+
+class TranslationTemplatesBuildFlattener(TunableLoop):
+    """Populates the new denormalised columns on TranslationTemplatesBuild."""
+
+    maximum_chunk_size = 5000
+
+    def __init__(self, log, abort_time=None):
+        super(TranslationTemplatesBuildFlattener, self).__init__(
+            log, abort_time)
+
+        self.memcache_key = '%s:ttb-flattener' % config.instance_name
+        watermark = getUtility(IMemcacheClient).get(self.memcache_key)
+        self.start_at = watermark or 0
+        self.store = IMasterStore(TranslationTemplatesBuild)
+
+    def findIDs(self):
+        return self.store.find(
+            TranslationTemplatesBuild.id,
+            TranslationTemplatesBuild.id >= self.start_at,
+            ).order_by(TranslationTemplatesBuild.id)
+
+    def isDone(self):
+        return (
+            not getFeatureFlag('soyuz.flatten_bfj.garbo.enabled')
+            or self.findIDs().is_empty())
+
+    def __call__(self, chunk_size):
+        """See `ITunableLoop`."""
+        ids = list(self.findIDs()[:chunk_size])
+        updated_columns = {
+            TranslationTemplatesBuild._new_processor_id:
+                BuildFarmJob.processor_id,
+            TranslationTemplatesBuild._new_virtualized:
+                BuildFarmJob.virtualized,
+            TranslationTemplatesBuild._new_date_created:
+                BuildFarmJob.date_created,
+            TranslationTemplatesBuild._new_date_started:
+                BuildFarmJob.date_started,
+            TranslationTemplatesBuild._new_date_finished:
+                BuildFarmJob.date_finished,
+            TranslationTemplatesBuild._new_date_first_dispatched:
+                BuildFarmJob.date_first_dispatched,
+            TranslationTemplatesBuild._new_builder_id: BuildFarmJob.builder_id,
+            TranslationTemplatesBuild._new_status: BuildFarmJob.status,
+            TranslationTemplatesBuild._new_log_id: BuildFarmJob.log_id,
+            TranslationTemplatesBuild._new_failure_count:
+                BuildFarmJob.failure_count,
+            }
+        self.store.execute(
+            BulkUpdate(
+                updated_columns, table=TranslationTemplatesBuild,
+                values=(PackageBuild, BuildFarmJob),
+                where=And(
+                    TranslationTemplatesBuild.id.is_in(ids),
+                    BuildFarmJob.id ==
+                        TranslationTemplatesBuild.build_farm_job_id)))
+        transaction.commit()
+        self.start_at = ids[-1] + 1
+        getUtility(IMemcacheClient).set(self.memcache_key, self.start_at)
+
+
 class BaseDatabaseGarbageCollector(LaunchpadCronScript):
     """Abstract base class to run a collection of TunableLoops."""
     script_name = None  # Script name for locking and database user. Override.
@@ -1590,6 +1814,9 @@
         UnusedSessionPruner,
         DuplicateSessionPruner,
         BugHeatUpdater,
+        BinaryPackageBuildFlattener,
+        SourcePackageRecipeBuildFlattener,
+        TranslationTemplatesBuildFlattener,
         ]
     experimental_tunable_loops = []
 

=== modified file 'lib/lp/scripts/tests/test_garbo.py'
--- lib/lp/scripts/tests/test_garbo.py	2013-01-17 00:25:48 +0000
+++ lib/lp/scripts/tests/test_garbo.py	2013-02-01 02:19:27 +0000
@@ -31,6 +31,7 @@
 from testtools.matchers import (
     Equals,
     GreaterThan,
+    MatchesStructure,
     )
 import transaction
 from zope.component import getUtility
@@ -42,6 +43,7 @@
     BugNotification,
     BugNotificationRecipient,
     )
+from lp.buildmaster.enums import BuildStatus
 from lp.code.bzr import (
     BranchFormat,
     RepositoryFormat,
@@ -58,6 +60,7 @@
     BranchSharingPolicy,
     BugSharingPolicy,
     )
+from lp.code.model.sourcepackagerecipebuild import SourcePackageRecipeBuild
 from lp.registry.interfaces.accesspolicy import IAccessPolicySource
 from lp.registry.interfaces.person import IPersonSet
 from lp.registry.interfaces.teammembership import TeamMembershipStatus
@@ -114,6 +117,7 @@
 from lp.services.verification.model.logintoken import LoginToken
 from lp.services.worlddata.interfaces.language import ILanguageSet
 from lp.soyuz.enums import PackagePublishingStatus
+from lp.soyuz.model.binarypackagebuild import BinaryPackageBuild
 from lp.soyuz.model.reporting import LatestPersonSourcePackageReleaseCache
 from lp.testing import (
     FakeAdapterMixin,
@@ -121,7 +125,10 @@
     TestCase,
     TestCaseWithFactory,
     )
-from lp.testing.dbuser import switch_dbuser
+from lp.testing.dbuser import (
+    dbuser,
+    switch_dbuser,
+    )
 from lp.testing.layers import (
     DatabaseLayer,
     LaunchpadScriptLayer,
@@ -133,6 +140,9 @@
 from lp.translations.model.translationtemplateitem import (
     TranslationTemplateItem,
     )
+from lp.translations.model.translationtemplatesbuild import (
+    TranslationTemplatesBuild,
+    )
 
 
 class TestGarboScript(TestCase):
@@ -1273,6 +1283,131 @@
             'PopulateLatestPersonSourcePackageReleaseCache')
         self.assertEqual(spph_2.id, job_data['last_spph_id'])
 
+    def test_BinaryPackageBuildFlattener(self):
+        store = IMasterStore(BinaryPackageBuild)
+        # Sampledata builds start off with the new columns set to None,
+        # and garbo won't run without a feature flag set.
+        self.runHourly()
+        self.assertNotEqual(
+            0, store.find(BinaryPackageBuild, _new_archive=None).count())
+
+        # But after a garbo run they're all set properly.
+        with dbuser('testadmin'):
+            IMasterStore(FeatureFlag).add(FeatureFlag(
+                u'default', 0, u'soyuz.flatten_bfj.garbo.enabled', u'true'))
+        self.runHourly()
+        self.assertEqual(
+            0, store.find(BinaryPackageBuild, _new_archive=None).count())
+
+        with dbuser('testadmin'):
+            # Create a build with lots of attributes set.
+            build = self.factory.makeBinaryPackageBuild()
+            build.gotFailure()
+            build.updateStatus(
+                BuildStatus.BUILDING, builder=self.factory.makeBuilder())
+            build.updateStatus(BuildStatus.FULLYBUILT)
+            build.setLog(self.factory.makeLibraryFileAlias())
+            build.storeUploadLog('uploaded')
+
+            # Manually unset the build's denormed columns.
+            attrs = (
+                'archive', 'pocket', 'processor', 'virtualized',
+                'date_created', 'date_started', 'date_finished',
+                'date_first_dispatched', 'builder', 'status', 'log',
+                'upload_log', 'dependencies', 'failure_count',
+                'build_farm_job', 'distribution', 'distro_series',
+                'source_package_name', 'is_distro_archive')
+            for attr in attrs:
+                setattr(removeSecurityProxy(build), '_new_' + attr, None)
+            removeSecurityProxy(build.build_farm_job).archive = None
+        self.assertEqual(
+            1, store.find(BinaryPackageBuild, _new_archive=None).count())
+        self.runHourly()
+        self.assertEqual(
+            0, store.find(BinaryPackageBuild, _new_archive=None).count())
+
+        self.assertThat(
+            removeSecurityProxy(build),
+            MatchesStructure.byEquality(
+                **dict(
+                    ('_new_' + attr, getattr(build, attr)) for attr in attrs)))
+        self.assertEqual(
+            build.archive, removeSecurityProxy(build.build_farm_job).archive)
+
+    def test_SourcePackageRecipeBuildFlattener(self):
+        store = IMasterStore(BinaryPackageBuild)
+        with dbuser('testadmin'):
+            IMasterStore(FeatureFlag).add(FeatureFlag(
+                u'default', 0, u'soyuz.flatten_bfj.garbo.enabled', u'true'))
+
+        with dbuser('testadmin'):
+            # Create a build with lots of attributes set.
+            build = self.factory.makeSourcePackageRecipeBuild()
+            build.gotFailure()
+            build.updateStatus(
+                BuildStatus.BUILDING, builder=self.factory.makeBuilder())
+            build.updateStatus(BuildStatus.FULLYBUILT)
+            build.setLog(self.factory.makeLibraryFileAlias())
+            build.storeUploadLog('uploaded')
+
+            # Manually unset the build's denormed columns.
+            attrs = (
+                'archive', 'pocket', 'processor', 'virtualized',
+                'date_created', 'date_started', 'date_finished',
+                'date_first_dispatched', 'builder', 'status', 'log',
+                'upload_log', 'dependencies', 'failure_count',
+                'build_farm_job')
+            for attr in attrs:
+                setattr(removeSecurityProxy(build), '_new_' + attr, None)
+            removeSecurityProxy(build).build_farm_job.archive = None
+        self.assertEqual(
+            1, store.find(SourcePackageRecipeBuild, _new_archive=None).count())
+        self.runHourly()
+        self.assertEqual(
+            0, store.find(SourcePackageRecipeBuild, _new_archive=None).count())
+
+        self.assertThat(
+            removeSecurityProxy(build),
+            MatchesStructure.byEquality(
+                **dict(
+                    ('_new_' + attr, getattr(build, attr)) for attr in attrs)))
+        self.assertEqual(
+            build.archive, removeSecurityProxy(build.build_farm_job).archive)
+
+    def test_TranslationTemplatesBuildFlattener(self):
+        store = IMasterStore(BinaryPackageBuild)
+        with dbuser('testadmin'):
+            IMasterStore(FeatureFlag).add(FeatureFlag(
+                u'default', 0, u'soyuz.flatten_bfj.garbo.enabled', u'true'))
+
+        with dbuser('testadmin'):
+            # Create a build with lots of attributes set.
+            build = self.factory.makeTranslationTemplatesBuildJob().build
+            build.gotFailure()
+            build.updateStatus(
+                BuildStatus.BUILDING, builder=self.factory.makeBuilder())
+            build.updateStatus(BuildStatus.FULLYBUILT)
+            build.setLog(self.factory.makeLibraryFileAlias())
+
+            # Manually unset the build's denormed columns.
+            attrs = (
+                'processor', 'virtualized', 'date_created', 'date_started',
+                'date_finished', 'date_first_dispatched', 'builder', 'status',
+                'log', 'failure_count')
+            for attr in attrs:
+                setattr(removeSecurityProxy(build), '_new_' + attr, None)
+        self.assertEqual(
+            1, store.find(TranslationTemplatesBuild, _new_status=None).count())
+        self.runHourly()
+        self.assertEqual(
+            0, store.find(TranslationTemplatesBuild, _new_status=None).count())
+
+        self.assertThat(
+            removeSecurityProxy(build),
+            MatchesStructure.byEquality(
+                **dict(
+                    ('_new_' + attr, getattr(build, attr)) for attr in attrs)))
+
 
 class TestGarboTasks(TestCaseWithFactory):
     layer = LaunchpadZopelessLayer

=== modified file 'lib/lp/soyuz/interfaces/binarypackagebuild.py'
--- lib/lp/soyuz/interfaces/binarypackagebuild.py	2013-01-22 08:31:09 +0000
+++ lib/lp/soyuz/interfaces/binarypackagebuild.py	2013-02-01 02:19:27 +0000
@@ -100,6 +100,9 @@
     distro_series = Attribute("Direct parent needed by CanonicalURL")
     arch_tag = exported(
         Text(title=_("Architecture tag"), required=False))
+    source_package_name = Attribute("Source package name")
+    is_distro_archive = Attribute(
+        "Whether the target archive belongs to the distro")
     distributionsourcepackagerelease = Attribute("The page showing the "
         "details for this sourcepackagerelease in this distribution.")
     binarypackages = Attribute(

=== modified file 'lib/lp/soyuz/model/binarypackagebuild.py'
--- lib/lp/soyuz/model/binarypackagebuild.py	2013-02-01 02:19:27 +0000
+++ lib/lp/soyuz/model/binarypackagebuild.py	2013-02-01 02:19:27 +0000
@@ -265,6 +265,16 @@
         return self.distro_series.distribution
 
     @property
+    def source_package_name(self):
+        """See `IBinaryPackageBuild`."""
+        return self.source_package_release.sourcepackagename
+
+    @property
+    def is_distro_archive(self):
+        """See `IBinaryPackageBuild`."""
+        return self.archive.is_main
+
+    @property
     def is_virtualized(self):
         """See `IBuild`"""
         return self.archive.require_virtualized