← Back to team overview

launchpad-reviewers team mailing list archive

[Merge] lp:~julian-edwards/launchpad/cp-slow-deathrow into lp:launchpad/devel

 

Julian Edwards has proposed merging lp:~julian-edwards/launchpad/cp-slow-deathrow into lp:launchpad/devel.

Requested reviews:
  Launchpad code reviewers (launchpad-reviewers)
Related bugs:
  #656803 Process-death-row taking a Very Long Time with PG8.4
  https://bugs.launchpad.net/bugs/656803


After PG8.4 was put in production the process-death-row script starting 
failing.  It turns out to be a bad query that 8.3 let us get away with - this 
branch fixes the query to remove an unncessary table join.
-- 
https://code.launchpad.net/~julian-edwards/launchpad/cp-slow-deathrow/+merge/37943
Your team Launchpad code reviewers is requested to review the proposed merge of lp:~julian-edwards/launchpad/cp-slow-deathrow into lp:launchpad/devel.
=== modified file 'database/schema/comments.sql'
--- database/schema/comments.sql	2010-09-03 16:43:11 +0000
+++ database/schema/comments.sql	2010-10-08 10:21:20 +0000
@@ -1171,6 +1171,7 @@
 COMMENT ON COLUMN DistroArchSeries.package_count IS 'A cache of the number of binary packages published in this distro arch release. The count only includes packages published in the release pocket.';
 COMMENT ON COLUMN DistroArchSeries.supports_virtualized IS 'Whether or not
 virtualized build support should be provided by this specific distroarchseries';
+COMMENT ON COLUMN DistroArchSeries.enabled IS 'Whether to allow build creation and publishing for this DistroArchSeries.';
 
 -- LauncpadDatabaseRevision
 COMMENT ON TABLE LaunchpadDatabaseRevision IS 'This table contains a list of the database patches that have been successfully applied to this database.';

=== added file 'database/schema/patch-2208-08-1.sql'
--- database/schema/patch-2208-08-1.sql	1970-01-01 00:00:00 +0000
+++ database/schema/patch-2208-08-1.sql	2010-10-08 10:21:20 +0000
@@ -0,0 +1,9 @@
+-- Copyright 2010 Canonical Ltd.  This software is licensed under the
+-- GNU Affero General Public License version 3 (see the file LICENSE).
+
+SET client_min_messages=ERROR;
+
+ALTER TABLE distroarchseries
+    ADD COLUMN enabled bool NOT NULL DEFAULT TRUE;
+
+INSERT INTO LaunchpadDatabaseRevision VALUES (2208, 08, 1);

=== modified file 'lib/canonical/launchpad/browser/launchpad.py'
--- lib/canonical/launchpad/browser/launchpad.py	2010-09-29 03:49:03 +0000
+++ lib/canonical/launchpad/browser/launchpad.py	2010-10-08 10:21:20 +0000
@@ -549,12 +549,16 @@
                     "The target %s does not have a linked branch." % path)
 
         except (CannotHaveLinkedBranch, InvalidNamespace,
+<<<<<<< TREE
                 InvalidProductName, NotFoundError), e:
             # If are aren't arriving at this invalid branch URL from another
             # page then we just raise an exception, otherwise we end up in a
             # bad recursion loop. The target url will be None in that case.
             if target_url is None:
                 raise e
+=======
+                InvalidProductName, NotFoundError), e:
+>>>>>>> MERGE-SOURCE
             error_msg = str(e)
             if error_msg == '':
                 error_msg = "Invalid branch lp:%s." % path

=== modified file 'lib/canonical/launchpad/browser/tests/test_launchpad.py'
--- lib/canonical/launchpad/browser/tests/test_launchpad.py	2010-09-23 05:11:29 +0000
+++ lib/canonical/launchpad/browser/tests/test_launchpad.py	2010-10-08 10:21:20 +0000
@@ -3,6 +3,8 @@
 
 """Tests for traversal from the root branch object."""
 
+from __future__ import with_statement
+
 __metaclass__ = type
 
 import unittest

=== modified file 'lib/devscripts/ec2test/tests/test_remote.py'
--- lib/devscripts/ec2test/tests/test_remote.py	2010-10-07 09:57:03 +0000
+++ lib/devscripts/ec2test/tests/test_remote.py	2010-10-08 10:21:20 +0000
@@ -995,11 +995,24 @@
         result = self.make_failing_result()
         logger.got_result(result)
         [user_message] = log
-        error_result_string = request.format_result(
-                result, logger._start_time, logger._end_time)
+<<<<<<< TREE
+        error_result_string = request.format_result(
+                result, logger._start_time, logger._end_time)
+=======
+        error_result_string = request.format_result(
+                result, logger._start_time, logger._end_time)
+        # Stringify the utf8-encoded MIME text message part containing the
+        # test run summary.
+        summary_text = user_message.get_payload(0).get_payload(decode=True)
+>>>>>>> MERGE-SOURCE
         self.assertEqual(
+<<<<<<< TREE
             error_result_string,
             get_body_text(user_message))
+=======
+            error_result_string,
+            summary_text)
+>>>>>>> MERGE-SOURCE
 
     def test_gzip_of_full_log_attached(self):
         # The full log is attached to the email.

=== modified file 'lib/lp/archivepublisher/deathrow.py'
--- lib/lp/archivepublisher/deathrow.py	2010-08-23 16:51:11 +0000
+++ lib/lp/archivepublisher/deathrow.py	2010-10-08 10:21:20 +0000
@@ -120,8 +120,7 @@
             SourcePackagePublishingHistory.scheduleddeletiondate < %s AND
             SourcePackagePublishingHistory.dateremoved IS NULL AND
             NOT EXISTS (
-              SELECT 1 FROM sourcepackagepublishinghistory as spph,
-                  sourcepackagerelease as spr
+              SELECT 1 FROM sourcepackagepublishinghistory as spph
               WHERE
                   SourcePackagePublishingHistory.sourcepackagerelease =
                       spph.sourcepackagerelease AND
@@ -136,8 +135,7 @@
             BinaryPackagePublishingHistory.scheduleddeletiondate < %s AND
             BinaryPackagePublishingHistory.dateremoved IS NULL AND
             NOT EXISTS (
-              SELECT 1 FROM binarypackagepublishinghistory as bpph,
-                  binarypackagerelease as bpr
+              SELECT 1 FROM binarypackagepublishinghistory as bpph
               WHERE
                   BinaryPackagePublishingHistory.binarypackagerelease =
                       bpph.binarypackagerelease AND

=== modified file 'lib/lp/buildmaster/tests/test_builder.py'
=== modified file 'lib/lp/services/scripts/base.py'
--- lib/lp/services/scripts/base.py	2010-10-03 15:30:06 +0000
+++ lib/lp/services/scripts/base.py	2010-10-08 10:21:20 +0000
@@ -381,6 +381,8 @@
 def cronscript_enabled(control_url, name, log):
     """Return True if the cronscript is enabled."""
     try:
+        # XXX bug=654639 StuartBishop: Remove the 2.5 compatibility
+        # code when we are running 2.6 everywhere.
         if sys.version_info[:2] >= (2, 6):
             # Timeout of 5 seconds should be fine on the LAN. We don't want
             # the default as it is too long for scripts being run every 60
@@ -395,6 +397,14 @@
             return True
         log.exception("Error loading %s" % control_url)
         return True
+    # XXX bug=654639 StuartBishop: Python 2.5 can return OSError. Under
+    # 2.6, this is wrapped into a URLError.
+    except OSError, error:
+        if error.errno == 2:
+            log.debug("Cronscript control file not found at %s", control_url)
+            return True
+        log.exception("Error loading %s" % control_url)
+        return True
     except URLError, error:
         if getattr(error.reason, 'errno', None) == 2:
             log.debug("Cronscript control file not found at %s", control_url)

=== modified file 'lib/lp/soyuz/browser/distroarchseries.py'
--- lib/lp/soyuz/browser/distroarchseries.py	2010-08-20 20:31:18 +0000
+++ lib/lp/soyuz/browser/distroarchseries.py	2010-10-08 10:21:20 +0000
@@ -124,7 +124,8 @@
     schema = IDistroArchSeries
 
     field_names = [
-        'architecturetag', 'official', 'supports_virtualized'
+        'architecturetag', 'official', 'supports_virtualized',
+        'enabled',
         ]
 
     @action(_('Change'), name='update')

=== added file 'lib/lp/soyuz/browser/tests/test_distroarchseries_view.py'
--- lib/lp/soyuz/browser/tests/test_distroarchseries_view.py	1970-01-01 00:00:00 +0000
+++ lib/lp/soyuz/browser/tests/test_distroarchseries_view.py	2010-10-08 10:21:20 +0000
@@ -0,0 +1,51 @@
+# Copyright 2009 Canonical Ltd.  This software is licensed under the
+# GNU Affero General Public License version 3 (see the file LICENSE).
+
+__metaclass__ = type
+
+from canonical.launchpad.ftests import login
+from canonical.launchpad.webapp.servers import LaunchpadTestRequest
+from canonical.testing import LaunchpadFunctionalLayer
+from lp.soyuz.browser.distroarchseries import DistroArchSeriesAdminView
+from lp.testing import TestCaseWithFactory
+from lp.testing.sampledata import LAUNCHPAD_ADMIN
+
+
+class TestDistroArchSeriesView(TestCaseWithFactory):
+
+    layer = LaunchpadFunctionalLayer
+
+    def setUp(self):
+        """Create a distroarchseries for the tests and login as an admin."""
+        super(TestDistroArchSeriesView, self).setUp()
+        self.das = self.factory.makeDistroArchSeries()
+        # Login as an admin to ensure access to the view's context
+        # object.
+        login(LAUNCHPAD_ADMIN)
+
+    def initialize_admin_view(self, enabled=True):
+        # Initialize the admin view with the supplied params.
+        method = 'POST'
+        form = {
+            'field.actions.update': 'update',
+            }
+
+        if enabled:
+            form['field.enabled'] = 'on'
+        else:
+            form['field.enabled'] = 'off'
+
+        view = DistroArchSeriesAdminView(
+            self.das, LaunchpadTestRequest(method=method, form=form))
+        view.initialize()
+        return view
+
+    def test_enabling_enabled_flag(self):
+        view = self.initialize_admin_view(enabled=False)
+        self.assertEqual(0, len(view.errors))
+        self.assertFalse(view.context.enabled)
+
+    def test_disabling_enabled_flag(self):
+        view = self.initialize_admin_view(enabled=True)
+        self.assertEqual(0, len(view.errors))
+        self.assertTrue(view.context.enabled)

=== modified file 'lib/lp/soyuz/doc/distroarchseries.txt'
--- lib/lp/soyuz/doc/distroarchseries.txt	2010-10-03 15:30:06 +0000
+++ lib/lp/soyuz/doc/distroarchseries.txt	2010-10-08 10:21:20 +0000
@@ -25,6 +25,16 @@
 #      This needs many more tests to be effective.
 
 
+Properties
+==========
+
+Enabled is a boolean flag that says whether the arch will receive new builds
+and publish them.
+
+    >>> print hoary_i386.enabled
+    True
+
+
 DistroArchSeries can tell you about their published releases
 ============================================================
 

=== modified file 'lib/lp/soyuz/doc/package-arch-specific.txt'
--- lib/lp/soyuz/doc/package-arch-specific.txt	2010-10-03 15:30:06 +0000
+++ lib/lp/soyuz/doc/package-arch-specific.txt	2010-10-08 10:21:20 +0000
@@ -121,6 +121,17 @@
     >>> print_build_architectures(pub_one)
     hppa
 
+If an architecture is disabled for some reason, then the results from
+determineArchitecturesToBuild() will not include it.
+
+    >>> hoary['hppa'].enabled = False
+
+    >>> print_build_architectures(pub_three)
+    i386
+
+Re-enable it before continuing:
+    >>> hoary['hppa'].enabled = True
+
 
 == Check support for kernel notation in architecture hint list ==
 

=== modified file 'lib/lp/soyuz/interfaces/distroarchseries.py'
--- lib/lp/soyuz/interfaces/distroarchseries.py	2010-08-20 20:31:18 +0000
+++ lib/lp/soyuz/interfaces/distroarchseries.py	2010-10-08 10:21:20 +0000
@@ -83,6 +83,12 @@
             description=_("Indicate whether or not this port has support "
                           "for building PPA packages."),
             required=False))
+    enabled = Bool(
+        title=_("Enabled"),
+        description=_(
+            "Whether or not this DistroArchSeries is enabled for build "
+            "creation and publication."),
+        required=False, readonly=False)
 
     # Joins.
     packages = Attribute('List of binary packages in this port.')

=== modified file 'lib/lp/soyuz/model/distroarchseries.py'
--- lib/lp/soyuz/model/distroarchseries.py	2010-10-03 15:30:06 +0000
+++ lib/lp/soyuz/model/distroarchseries.py	2010-10-08 10:21:20 +0000
@@ -82,6 +82,7 @@
         storm_validator=validate_public_person, notNull=True)
     package_count = IntCol(notNull=True, default=DEFAULT)
     supports_virtualized = BoolCol(notNull=False, default=False)
+    enabled = BoolCol(notNull=False, default=True)
 
     packages = SQLRelatedJoin('BinaryPackageRelease',
         joinColumn='distroarchseries',

=== modified file 'lib/lp/soyuz/model/publishing.py'
--- lib/lp/soyuz/model/publishing.py	2010-10-06 08:21:40 +0000
+++ lib/lp/soyuz/model/publishing.py	2010-10-08 10:21:20 +0000
@@ -1246,6 +1246,10 @@
             binarypackagerelease = binary.binarypackagerelease
             target_component = override_component or binary.component
 
+            # XXX 2010-09-28 Julian bug=649859
+            # This piece of code duplicates the logic in
+            # PackageUploadBuild.publish(), it needs to be refactored.
+
             if binarypackagerelease.architecturespecific:
                 # If the binary is architecture specific and the target
                 # distroseries does not include the architecture then we
@@ -1262,7 +1266,9 @@
                     continue
                 destination_architectures = [target_architecture]
             else:
-                destination_architectures = distroseries.architectures
+                destination_architectures = [
+                    arch for arch in distroseries.architectures
+                    if arch.enabled]
 
             for distroarchseries in destination_architectures:
 

=== modified file 'lib/lp/soyuz/model/queue.py'
--- lib/lp/soyuz/model/queue.py	2010-09-03 15:02:39 +0000
+++ lib/lp/soyuz/model/queue.py	2010-10-08 10:21:20 +0000
@@ -1442,20 +1442,30 @@
         build_archtag = self.build.distro_arch_series.architecturetag
         # Determine the target arch series.
         # This will raise NotFoundError if anything odd happens.
-        target_dar = self.packageupload.distroseries[build_archtag]
+        target_das = self.packageupload.distroseries[build_archtag]
         debug(logger, "Publishing build to %s/%s/%s" % (
-            target_dar.distroseries.distribution.name,
-            target_dar.distroseries.name,
+            target_das.distroseries.distribution.name,
+            target_das.distroseries.name,
             build_archtag))
-        # And get the other distroarchseriess
-        other_dars = set(self.packageupload.distroseries.architectures)
-        other_dars = other_dars - set([target_dar])
+
+        # Get the other enabled distroarchseries for this
+        # distroseries.  If the binary is architecture independent then
+        # we need to publish it in all of those too.
+
+        # XXX Julian 2010-09-28 bug=649859
+        # This logic is duplicated in
+        # PackagePublishingSet.copyBinariesTo() and should be
+        # refactored.
+        other_das = set(
+            arch for arch in self.packageupload.distroseries.architectures
+            if arch.enabled)
+        other_das = other_das - set([target_das])
         # First up, publish everything in this build into that dar.
         published_binaries = []
         for binary in self.build.binarypackages:
-            target_dars = set([target_dar])
+            target_dars = set([target_das])
             if not binary.architecturespecific:
-                target_dars = target_dars.union(other_dars)
+                target_dars = target_dars.union(other_das)
                 debug(logger, "... %s/%s (Arch Independent)" % (
                     binary.binarypackagename.name,
                     binary.version))

=== modified file 'lib/lp/soyuz/pas.py'
--- lib/lp/soyuz/pas.py	2010-08-23 16:51:11 +0000
+++ lib/lp/soyuz/pas.py	2010-10-08 10:21:20 +0000
@@ -164,7 +164,8 @@
         if not legal_archseries:
             return []
 
-    legal_arch_tags = set(arch.architecturetag for arch in legal_archseries)
+    legal_arch_tags = set(
+        arch.architecturetag for arch in legal_archseries if arch.enabled)
 
     # We need to support arch tags like any-foo and linux-foo, so remove
     # supported kernel prefixes, Also allow linux-any but not any-any.

=== modified file 'lib/lp/soyuz/scripts/initialise_distroseries.py'
--- lib/lp/soyuz/scripts/initialise_distroseries.py	2010-10-01 17:48:37 +0000
+++ lib/lp/soyuz/scripts/initialise_distroseries.py	2010-10-08 10:21:20 +0000
@@ -134,7 +134,8 @@
             INSERT INTO DistroArchSeries
             (distroseries, processorfamily, architecturetag, owner, official)
             SELECT %s, processorfamily, architecturetag, %s, official
-            FROM DistroArchSeries WHERE distroseries = %s %s
+            FROM DistroArchSeries WHERE distroseries = %s
+            AND enabled = TRUE %s
             """ % (sqlvalues(self.distroseries, self.distroseries.owner,
             self.parent) + (include,)))
 

=== modified file 'lib/lp/soyuz/scripts/tests/test_copypackage.py'
--- lib/lp/soyuz/scripts/tests/test_copypackage.py	2010-10-04 20:46:55 +0000
+++ lib/lp/soyuz/scripts/tests/test_copypackage.py	2010-10-08 10:21:20 +0000
@@ -977,6 +977,44 @@
              ],
             [copy.displayname for copy in copies])
 
+    def test_copying_arch_indep_binaries_with_disabled_arches(self):
+        # When copying an arch-indep binary to a new series, we must not
+        # copy it into architectures that are disabled.
+
+        # Make a new arch-all source and binary in breezy-autotest:
+        archive = self.factory.makeArchive(
+            distribution=self.test_publisher.ubuntutest, virtualized=False)
+        source = self.test_publisher.getPubSource(
+            archive=archive, architecturehintlist='all')
+        [bin_i386, bin_hppa] = self.test_publisher.getPubBinaries(
+            pub_source=source)
+
+        # Now make a new distroseries with two architectures, one of
+        # which is disabled.
+        nobby = self.factory.makeDistroSeries(
+            distribution=self.test_publisher.ubuntutest, name='nobby')
+        i386_pf = self.factory.makeProcessorFamily(name='my_i386')
+        nobby_i386 = self.factory.makeDistroArchSeries(
+            distroseries=nobby, architecturetag='i386',
+            processorfamily=i386_pf)
+        hppa_pf = self.factory.makeProcessorFamily(name='my_hppa')
+        nobby_hppa = self.factory.makeDistroArchSeries(
+            distroseries=nobby, architecturetag='hppa',
+            processorfamily=hppa_pf)
+        nobby_hppa.enabled = False
+        nobby.nominatedarchindep = nobby_i386
+        self.test_publisher.addFakeChroots(nobby)
+
+        # Now we can copy the package with binaries.
+        copies = _do_direct_copy(
+            source, source.archive, nobby, source.pocket, True)
+
+        # The binary should not be published for hppa.
+        self.assertEquals(
+            [u'foo 666 in nobby',
+             u'foo-bin 666 in nobby i386',],
+            [copy.displayname for copy in copies])
+
 
 class DoDelayedCopyTestCase(TestCaseWithFactory):
 

=== modified file 'lib/lp/soyuz/scripts/tests/test_initialise_distroseries.py'
--- lib/lp/soyuz/scripts/tests/test_initialise_distroseries.py	2010-09-30 10:07:26 +0000
+++ lib/lp/soyuz/scripts/tests/test_initialise_distroseries.py	2010-10-08 10:21:20 +0000
@@ -242,6 +242,27 @@
         self.assertEqual(child.binarycount, 0)
         self.assertEqual(builds.count(), self.parent.sourcecount)
 
+    def test_do_not_copy_disabled_dases(self):
+        # DASes that are disabled in the parent will not be copied
+        i386 = self.factory.makeProcessorFamily()
+        ppc = self.factory.makeProcessorFamily()
+        parent = self.factory.makeDistroSeries()
+        i386_das = self.factory.makeDistroArchSeries(
+            distroseries=parent, processorfamily=i386)
+        ppc_das = self.factory.makeDistroArchSeries(
+            distroseries=parent, processorfamily=ppc)
+        ppc_das.enabled = False
+        parent.nominatedarchindep = i386_das
+        foobuntu = self._create_distroseries(parent)
+        ids = InitialiseDistroSeries(foobuntu)
+        ids.check()
+        ids.initialise()
+        das = list(IStore(DistroArchSeries).find(
+            DistroArchSeries, distroseries = foobuntu))
+        self.assertEqual(len(das), 1)
+        self.assertEqual(
+            das[0].architecturetag, i386_das.architecturetag)
+
     def test_script(self):
         # Do an end-to-end test using the command-line tool
         uploader = self.factory.makePerson()

=== modified file 'lib/lp/soyuz/tests/test_publishing.py'
=== modified file 'lib/lp/soyuz/tests/test_publishing_top_level_api.py'
--- lib/lp/soyuz/tests/test_publishing_top_level_api.py	2010-08-24 15:29:01 +0000
+++ lib/lp/soyuz/tests/test_publishing_top_level_api.py	2010-10-08 10:21:20 +0000
@@ -5,7 +5,10 @@
 
 from lp.registry.interfaces.pocket import PackagePublishingPocket
 from lp.registry.interfaces.series import SeriesStatus
-from lp.soyuz.enums import PackagePublishingStatus
+from lp.soyuz.enums import (
+    PackagePublishingStatus,
+    PackageUploadStatus,
+    )
 from lp.soyuz.tests.test_publishing import TestNativePublishingBase
 
 
@@ -419,3 +422,39 @@
         self.checkBinaryLookupForPocket(
             PackagePublishingPocket.RELEASE, is_careful=True,
             expected_result=[pub_published_release, pub_pending_release])
+
+    def test_publishing_disabled_distroarchseries(self):
+        # Disabled DASes will not receive new publications at all.
+
+        # Make an arch-all source and some builds for it.
+        archive = self.factory.makeArchive(
+            distribution=self.ubuntutest, virtualized=False)
+        source = self.getPubSource(
+            archive=archive, architecturehintlist='all')
+        [build_i386] = source.createMissingBuilds()
+        bin_i386 = self.uploadBinaryForBuild(build_i386, 'bin-i386')
+
+        # Now make sure they have a packageupload (but no publishing
+        # records).
+        changes_file_name = '%s_%s_%s.changes' % (
+            bin_i386.name, bin_i386.version, build_i386.arch_tag)
+        pu_i386 = self.addPackageUpload(
+            build_i386.archive, build_i386.distro_arch_series.distroseries,
+            build_i386.pocket, changes_file_content='anything',
+            changes_file_name=changes_file_name,
+            upload_status=PackageUploadStatus.ACCEPTED)
+        pu_i386.addBuild(build_i386)
+
+        # Now we make hppa a disabled architecture, and then call the
+        # publish method on the packageupload.  The arch-all binary
+        # should be published only in the i386 arch, not the hppa one.
+        hppa = pu_i386.distroseries.getDistroArchSeries('hppa')
+        hppa.enabled = False
+        for pu_build in pu_i386.builds:
+            pu_build.publish()
+
+        publications = archive.getAllPublishedBinaries(name="bin-i386")
+
+        self.assertEqual(1, publications.count())
+        self.assertEqual(
+            'i386', publications[0].distroarchseries.architecturetag)

=== modified file 'versions.cfg'
--- versions.cfg	2010-10-01 01:55:57 +0000
+++ versions.cfg	2010-10-08 10:21:20 +0000
@@ -32,7 +32,7 @@
 lazr.delegates = 1.2.0
 lazr.enum = 1.1.2
 lazr.lifecycle = 1.1
-lazr.restful = 0.13.0
+lazr.restful = 0.13.1
 lazr.restfulclient = 0.10.0
 lazr.smtptest = 1.1
 lazr.testing = 0.1.1