← Back to team overview

launchpad-reviewers team mailing list archive

[Merge] lp:~cjwatson/launchpad/delete-unmodifiable-suite into lp:launchpad

 

Colin Watson has proposed merging lp:~cjwatson/launchpad/delete-unmodifiable-suite into lp:launchpad.

Commit message:
Refuse to delete packages from unmodifiable suites.

Requested reviews:
  Launchpad code reviewers (launchpad-reviewers)
Related bugs:
  Bug #1152669 in Launchpad itself: "Soyuz allows me to delete packages from a RELEASE pocket of a CURRENT series"
  https://bugs.launchpad.net/launchpad/+bug/1152669

For more details, see:
https://code.launchpad.net/~cjwatson/launchpad/delete-unmodifiable-suite/+merge/152529

== Summary ==

Bug 1152669: it's possible to delete packages from a release pocket in a stable distroseries, requiring (AFAICT) DB surgery to recover.  This shouldn't be permitted since the deletion will never be published anyway.

== Proposed fix ==

Add Archive.canModifySuite checks to the relevant methods.

== Implementation details ==

I may have gone a little overboard to achieve LoC neutrality.  You can always just look at the principal effective revision (http://bazaar.launchpad.net/~cjwatson/launchpad/delete-unmodifiable-suite/revision/16526).

== Tests ==

bin/test -vvct lp.soyuz.tests.test_publishing

== Demo and Q/A ==

Try to delete a package from the quantal RELEASE pocket on qastaging (using the requestDeletion API method).  This should fail with an error.
-- 
https://code.launchpad.net/~cjwatson/launchpad/delete-unmodifiable-suite/+merge/152529
Your team Launchpad code reviewers is requested to review the proposed merge of lp:~cjwatson/launchpad/delete-unmodifiable-suite into lp:launchpad.
=== modified file 'lib/lp/soyuz/interfaces/publishing.py'
--- lib/lp/soyuz/interfaces/publishing.py	2013-02-28 09:41:40 +0000
+++ lib/lp/soyuz/interfaces/publishing.py	2013-03-08 22:44:24 +0000
@@ -6,6 +6,7 @@
 __metaclass__ = type
 
 __all__ = [
+    'DeletionError',
     'IArchiveSafePublisher',
     'IBinaryPackageFilePublishing',
     'IBinaryPackagePublishingHistory',
@@ -106,6 +107,11 @@
     """Raised when an attempt to change an override fails."""
 
 
+@error_status(httplib.BAD_REQUEST)
+class DeletionError(Exception):
+    """Raised when an attempt to delete a publication fails."""
+
+
 name_priority_map = {
     'required': PackagePublishingPriority.REQUIRED,
     'important': PackagePublishingPriority.IMPORTANT,
@@ -284,19 +290,16 @@
             title=_('Component name'), required=True, readonly=True,
             )
     publishingstatus = Int(
-            title=_('Package publishing status'), required=True,
-            readonly=True,
+            title=_('Package publishing status'), required=True, readonly=True,
             )
     pocket = Int(
-            title=_('Package publishing pocket'), required=True,
-            readonly=True,
+            title=_('Package publishing pocket'), required=True, readonly=True,
             )
     archive = Int(
             title=_('Archive ID'), required=True, readonly=True,
             )
     libraryfilealias = Int(
-            title=_('Binarypackage file alias'), required=True,
-            readonly=True,
+            title=_('Binarypackage file alias'), required=True, readonly=True,
             )
     libraryfilealiasfilename = TextLine(
             title=_('File name'), required=True, readonly=True,

=== modified file 'lib/lp/soyuz/model/publishing.py'
--- lib/lp/soyuz/model/publishing.py	2013-02-28 09:41:40 +0000
+++ lib/lp/soyuz/model/publishing.py	2013-03-08 22:44:24 +0000
@@ -91,6 +91,7 @@
     )
 from lp.soyuz.interfaces.publishing import (
     active_publishing_status,
+    DeletionError,
     IBinaryPackageFilePublishing,
     IBinaryPackagePublishingHistory,
     IPublishingSet,
@@ -122,8 +123,7 @@
     # XXX cprov 2006-08-18: move it away, perhaps archivepublisher/pool.py
     """Return the pool path for a given source name and component name."""
     from lp.archivepublisher.diskpool import poolify
-    return os.path.join(
-        'pool', poolify(source_name, component_name))
+    return os.path.join('pool', poolify(source_name, component_name))
 
 
 def get_component(archive, distroseries, component):
@@ -160,8 +160,7 @@
 
 def proxied_urls(files, parent):
     """Run the files passed through `ProxiedLibraryFileAlias`."""
-    return [
-        ProxiedLibraryFileAlias(file, parent).http_url for file in files]
+    return [ProxiedLibraryFileAlias(file, parent).http_url for file in files]
 
 
 class FilePublishingBase:
@@ -178,15 +177,13 @@
         sha1 = filealias.content.sha1
         path = diskpool.pathFor(component, source, filename)
 
-        action = diskpool.addFile(
-            component, source, filename, sha1, filealias)
+        action = diskpool.addFile(component, source, filename, sha1, filealias)
         if action == diskpool.results.FILE_ADDED:
             log.debug("Added %s from library" % path)
         elif action == diskpool.results.SYMLINK_ADDED:
             log.debug("%s created as a symlink." % path)
         elif action == diskpool.results.NONE:
-            log.debug(
-                "%s is already in pool with the same content." % path)
+            log.debug("%s is already in pool with the same content." % path)
 
     @property
     def archive_url(self):
@@ -435,8 +432,7 @@
         foreignKey='SourcePackageName', dbName='sourcepackagename')
     sourcepackagerelease = ForeignKey(
         foreignKey='SourcePackageRelease', dbName='sourcepackagerelease')
-    distroseries = ForeignKey(
-        foreignKey='DistroSeries', dbName='distroseries')
+    distroseries = ForeignKey(foreignKey='DistroSeries', dbName='distroseries')
     component = ForeignKey(foreignKey='Component', dbName='component')
     section = ForeignKey(foreignKey='Section', dbName='section')
     status = EnumCol(schema=PackagePublishingStatus)
@@ -732,8 +728,7 @@
         """See `IPublishing`."""
         release = self.sourcepackagerelease
         name = release.sourcepackagename.name
-        return "%s %s in %s" % (name, release.version,
-                                self.distroseries.name)
+        return "%s %s in %s" % (name, release.version, self.distroseries.name)
 
     def buildIndexStanzaFields(self):
         """See `IPublishing`."""
@@ -925,6 +920,11 @@
 
     def requestDeletion(self, removed_by, removal_comment=None):
         """See `IPublishing`."""
+        if not self.archive.canModifySuite(self.distroseries, self.pocket):
+            raise DeletionError(
+                "Cannot delete publications from suite '%s'" %
+                self.distroseries.getSuite(self.pocket))
+
         self.setDeleted(removed_by, removal_comment)
         if self.archive.is_main:
             dsd_job_source = getUtility(IDistroSeriesDifferenceJobSource)
@@ -1053,8 +1053,7 @@
         #  ...
         #  <DESCRIPTION LN>
         descr_lines = [line.lstrip() for line in bpr.description.splitlines()]
-        bin_description = (
-            '%s\n %s' % (bpr.summary, '\n '.join(descr_lines)))
+        bin_description = '%s\n %s' % (bpr.summary, '\n '.join(descr_lines))
 
         # Dealing with architecturespecific field.
         # Present 'all' in every archive index for architecture
@@ -1329,11 +1328,9 @@
             ]
 
         if start_date is not None:
-            clauses.append(
-                BinaryPackageReleaseDownloadCount.day >= start_date)
+            clauses.append(BinaryPackageReleaseDownloadCount.day >= start_date)
         if end_date is not None:
-            clauses.append(
-                BinaryPackageReleaseDownloadCount.day <= end_date)
+            clauses.append(BinaryPackageReleaseDownloadCount.day <= end_date)
 
         return clauses
 
@@ -1373,6 +1370,11 @@
 
     def requestDeletion(self, removed_by, removal_comment=None):
         """See `IPublishing`."""
+        if not self.archive.canModifySuite(self.distroseries, self.pocket):
+            raise DeletionError(
+                "Cannot delete publications from suite '%s'" %
+                self.distroseries.getSuite(self.pocket))
+
         self.setDeleted(removed_by, removal_comment)
 
     def binaryFileUrls(self, include_meta=False):
@@ -1415,8 +1417,7 @@
             # Find the DAS in this series corresponding to the original
             # build arch tag. If it does not exist or is disabled, we should
             # not publish.
-            target_arch = arch_map.get(
-                bpr.build.distro_arch_series.architecturetag)
+            target_arch = arch_map.get(bpr.build.arch_tag)
             target_archs = [target_arch] if target_arch is not None else []
         else:
             target_archs = archs
@@ -1941,8 +1942,7 @@
             # separate query for now.
             source_pubs.update(store.find(
                 SourcePackagePublishingHistory,
-                SourcePackagePublishingHistory.id.is_in(
-                    pubs_without_builds),
+                SourcePackagePublishingHistory.id.is_in(pubs_without_builds),
                 SourcePackagePublishingHistory.archive == archive))
         # For each source_pub found, provide an aggregate summary of its
         # builds.
@@ -2043,6 +2043,14 @@
             return
         assert len(sources) + len(binaries) == len(pubs)
 
+        locations = set(
+            (pub.archive, pub.distroseries, pub.pocket) for pub in pubs)
+        for archive, distroseries, pocket in locations:
+            if not archive.canModifySuite(distroseries, pocket):
+                raise DeletionError(
+                    "Cannot delete publications from suite '%s'" %
+                    distroseries.getSuite(pocket))
+
         spph_ids = [spph.id for spph in sources]
         self.setMultipleDeleted(
             SourcePackagePublishingHistory, spph_ids, removed_by,

=== modified file 'lib/lp/soyuz/tests/test_publishing.py'
--- lib/lp/soyuz/tests/test_publishing.py	2013-02-20 12:28:38 +0000
+++ lib/lp/soyuz/tests/test_publishing.py	2013-03-08 22:44:24 +0000
@@ -42,6 +42,7 @@
 from lp.soyuz.interfaces.binarypackagename import IBinaryPackageNameSet
 from lp.soyuz.interfaces.component import IComponentSet
 from lp.soyuz.interfaces.publishing import (
+    DeletionError,
     IPublishingSet,
     OverrideError,
     PackagePublishingPriority,
@@ -504,11 +505,8 @@
             if new_version is None:
                 new_version = version
             changesfile_content = ''
-            handle = open(changesfile_path, 'r')
-            try:
+            with open(changesfile_path, 'r') as handle:
                 changesfile_content = handle.read()
-            finally:
-                handle.close()
 
             source = self.getPubSource(
                 sourcename=sourcename, archive=archive, version=new_version,
@@ -648,9 +646,9 @@
                     dominant = supersededby.binarypackagerelease.build
                 else:
                     dominant = supersededby.sourcepackagerelease
-                self.assertEquals(dominant, pub.supersededby)
+                self.assertEqual(dominant, pub.supersededby)
             else:
-                self.assertIs(None, pub.supersededby)
+                self.assertIsNone(pub.supersededby)
 
 
 class TestNativePublishing(TestNativePublishingBase):
@@ -660,9 +658,7 @@
         # the corresponding files are dumped in the disk pool/.
         pub_source = self.getPubSource(filecontent='Hello world')
         pub_source.publish(self.disk_pool, self.logger)
-        self.assertEqual(
-            PackagePublishingStatus.PUBLISHED,
-            pub_source.status)
+        self.assertEqual(PackagePublishingStatus.PUBLISHED, pub_source.status)
         pool_path = "%s/main/f/foo/foo_666.dsc" % self.pool_dir
         self.assertEqual(open(pool_path).read().strip(), 'Hello world')
 
@@ -671,9 +667,7 @@
         # the corresponding files are dumped in the disk pool/.
         pub_binary = self.getPubBinaries(filecontent='Hello world')[0]
         pub_binary.publish(self.disk_pool, self.logger)
-        self.assertEqual(
-            PackagePublishingStatus.PUBLISHED,
-            pub_binary.status)
+        self.assertEqual(PackagePublishingStatus.PUBLISHED, pub_binary.status)
         pool_path = "%s/main/f/foo/foo-bin_666_all.deb" % self.pool_dir
         self.assertEqual(open(pool_path).read().strip(), 'Hello world')
 
@@ -688,9 +682,8 @@
         foo_path = os.path.join(self.pool_dir, 'main', 'f', 'foo')
         os.makedirs(foo_path)
         foo_dsc_path = os.path.join(foo_path, 'foo_666.dsc')
-        foo_dsc = open(foo_dsc_path, 'w')
-        foo_dsc.write('Hello world')
-        foo_dsc.close()
+        with open(foo_dsc_path, 'w') as foo_dsc:
+            foo_dsc.write('Hello world')
 
         pub_source = self.getPubSource(filecontent="Something")
         pub_source.publish(self.disk_pool, self.logger)
@@ -699,8 +692,7 @@
         self.assertEqual("PoolFileOverwriteError", self.oopses[0]['type'])
 
         self.layer.commit()
-        self.assertEqual(
-            pub_source.status, PackagePublishingStatus.PENDING)
+        self.assertEqual(pub_source.status, PackagePublishingStatus.PENDING)
         self.assertEqual(open(foo_dsc_path).read().strip(), 'Hello world')
 
     def testPublishingDifferentContents(self):
@@ -711,8 +703,7 @@
 
         foo_name = "%s/main/f/foo/foo_666.dsc" % self.pool_dir
         pub_source.sync()
-        self.assertEqual(
-            pub_source.status, PackagePublishingStatus.PUBLISHED)
+        self.assertEqual(pub_source.status, PackagePublishingStatus.PUBLISHED)
         self.assertEqual(open(foo_name).read().strip(), 'foo is happy')
 
         # try to publish 'foo' again with a different content, it
@@ -723,8 +714,7 @@
         self.layer.commit()
 
         pub_source2.sync()
-        self.assertEqual(
-            pub_source2.status, PackagePublishingStatus.PENDING)
+        self.assertEqual(pub_source2.status, PackagePublishingStatus.PENDING)
         self.assertEqual(open(foo_name).read().strip(), 'foo is happy')
 
     def testPublishingAlreadyInPool(self):
@@ -740,16 +730,14 @@
         bar_name = "%s/main/b/bar/bar_666.dsc" % self.pool_dir
         self.assertEqual(open(bar_name).read().strip(), 'bar is good')
         pub_source.sync()
-        self.assertEqual(
-            pub_source.status, PackagePublishingStatus.PUBLISHED)
+        self.assertEqual(pub_source.status, PackagePublishingStatus.PUBLISHED)
 
         pub_source2 = self.getPubSource(
             sourcename='bar', filecontent='bar is good')
         pub_source2.publish(self.disk_pool, self.logger)
         self.layer.commit()
         pub_source2.sync()
-        self.assertEqual(
-            pub_source2.status, PackagePublishingStatus.PUBLISHED)
+        self.assertEqual(pub_source2.status, PackagePublishingStatus.PUBLISHED)
 
     def testPublishingSymlink(self):
         """Test if publishOne moving publication between components.
@@ -759,8 +747,7 @@
         """
         content = 'am I a file or a symbolic link ?'
         # publish sim.dsc in main and re-publish in universe
-        pub_source = self.getPubSource(
-            sourcename='sim', filecontent=content)
+        pub_source = self.getPubSource(sourcename='sim', filecontent=content)
         pub_source2 = self.getPubSource(
             sourcename='sim', component='universe', filecontent=content)
         pub_source.publish(self.disk_pool, self.logger)
@@ -769,10 +756,8 @@
 
         pub_source.sync()
         pub_source2.sync()
-        self.assertEqual(
-            pub_source.status, PackagePublishingStatus.PUBLISHED)
-        self.assertEqual(
-            pub_source2.status, PackagePublishingStatus.PUBLISHED)
+        self.assertEqual(pub_source.status, PackagePublishingStatus.PUBLISHED)
+        self.assertEqual(pub_source2.status, PackagePublishingStatus.PUBLISHED)
 
         # check the resulted symbolic link
         sim_universe = "%s/universe/s/sim/sim_666.dsc" % self.pool_dir
@@ -788,8 +773,7 @@
         self.layer.commit()
 
         pub_source3.sync()
-        self.assertEqual(
-            pub_source3.status, PackagePublishingStatus.PENDING)
+        self.assertEqual(pub_source3.status, PackagePublishingStatus.PENDING)
 
     def testPublishInAnotherArchive(self):
         """Publication in another archive
@@ -882,10 +866,10 @@
             copies = (copied, )
 
         for copy in copies:
-            self.assertEquals(copy.component, pub_record.component)
+            self.assertEqual(pub_record.component, copy.component)
             copy.overrideFromAncestry()
             self.layer.commit()
-            self.assertEquals(copy.component.name, 'universe')
+            self.assertEqual("universe", copy.component.name)
 
     def test_overrideFromAncestry_fallback_to_source_component(self):
         # overrideFromAncestry on the lack of ancestry, falls back to the
@@ -966,7 +950,7 @@
         spph = self.factory.makeSourcePackagePublishingHistory(
             sourcepackagerelease=spr, archive=ppa)
         spph.overrideFromAncestry()
-        self.assertEquals(spph.component.name, 'main')
+        self.assertEqual("main", spph.component.name)
 
     def test_ppa_override_with_ancestry(self):
         # Test a PPA publication with ancestry
@@ -978,7 +962,7 @@
         spph2 = self.factory.makeSourcePackagePublishingHistory(
             sourcepackagerelease=spr, archive=ppa)
         spph2.overrideFromAncestry()
-        self.assertEquals(spph2.component.name, 'main')
+        self.assertEqual("main", spph2.component.name)
 
     def test_copyTo_with_overrides(self):
         # Specifying overrides with copyTo should result in the new
@@ -1009,8 +993,7 @@
         # SPPH's ancestor get's populated when a spph is copied over.
         target_archive = self.factory.makeArchive()
         spph = self.factory.makeSourcePackagePublishingHistory()
-        copy = spph.copyTo(
-            spph.distroseries, spph.pocket, target_archive)
+        copy = spph.copyTo(spph.distroseries, spph.pocket, target_archive)
 
         self.assertEqual(spph, copy.ancestor)
 
@@ -1059,7 +1042,8 @@
         """
         available_archs = [self.sparc_distroarch, self.avr_distroarch]
         pubrec = self.getPubSource(architecturehintlist='any')
-        self.assertEquals([self.sparc_distroarch],
+        self.assertEqual(
+            [self.sparc_distroarch],
             pubrec._getAllowedArchitectures(available_archs))
 
     def test__getAllowedArchitectures_restricted_override(self):
@@ -1071,7 +1055,8 @@
         available_archs = [self.sparc_distroarch, self.avr_distroarch]
         getUtility(IArchiveArchSet).new(self.archive, self.avr_family)
         pubrec = self.getPubSource(architecturehintlist='any')
-        self.assertEquals([self.sparc_distroarch, self.avr_distroarch],
+        self.assertEqual(
+            [self.sparc_distroarch, self.avr_distroarch],
             pubrec._getAllowedArchitectures(available_archs))
 
     def test_createMissingBuilds_restricts_any(self):
@@ -1080,8 +1065,8 @@
         """
         pubrec = self.getPubSource(architecturehintlist='any')
         builds = pubrec.createMissingBuilds()
-        self.assertEquals(1, len(builds))
-        self.assertEquals(self.sparc_distroarch, builds[0].distro_arch_series)
+        self.assertEqual(1, len(builds))
+        self.assertEqual(self.sparc_distroarch, builds[0].distro_arch_series)
 
     def test_createMissingBuilds_restricts_explicitlist(self):
         """createMissingBuilds() limits builds targeted at a variety of
@@ -1089,8 +1074,8 @@
         """
         pubrec = self.getPubSource(architecturehintlist='sparc i386 avr')
         builds = pubrec.createMissingBuilds()
-        self.assertEquals(1, len(builds))
-        self.assertEquals(self.sparc_distroarch, builds[0].distro_arch_series)
+        self.assertEqual(1, len(builds))
+        self.assertEqual(self.sparc_distroarch, builds[0].distro_arch_series)
 
     def test_createMissingBuilds_restricts_all(self):
         """createMissingBuilds() should limit builds targeted at 'all'
@@ -1099,8 +1084,8 @@
         """
         pubrec = self.getPubSource(architecturehintlist='all')
         builds = pubrec.createMissingBuilds()
-        self.assertEquals(1, len(builds))
-        self.assertEquals(self.sparc_distroarch, builds[0].distro_arch_series)
+        self.assertEqual(1, len(builds))
+        self.assertEqual(self.sparc_distroarch, builds[0].distro_arch_series)
 
     def test_createMissingBuilds_restrict_override(self):
         """createMissingBuilds() should limit builds targeted at 'any'
@@ -1110,9 +1095,9 @@
         getUtility(IArchiveArchSet).new(self.archive, self.avr_family)
         pubrec = self.getPubSource(architecturehintlist='any')
         builds = pubrec.createMissingBuilds()
-        self.assertEquals(2, len(builds))
-        self.assertEquals(self.avr_distroarch, builds[0].distro_arch_series)
-        self.assertEquals(self.sparc_distroarch, builds[1].distro_arch_series)
+        self.assertEqual(2, len(builds))
+        self.assertEqual(self.avr_distroarch, builds[0].distro_arch_series)
+        self.assertEqual(self.sparc_distroarch, builds[1].distro_arch_series)
 
 
 class PublishingSetTests(TestCaseWithFactory):
@@ -1175,59 +1160,69 @@
 
     layer = ZopelessDatabaseLayer
 
+    def setUp(self):
+        super(TestPublishingSetLite, self).setUp()
+        self.person = self.factory.makePerson()
+
     def test_requestDeletion_marks_SPPHs_deleted(self):
         spph = self.factory.makeSourcePackagePublishingHistory()
-        getUtility(IPublishingSet).requestDeletion(
-            [spph], self.factory.makePerson())
+        getUtility(IPublishingSet).requestDeletion([spph], self.person)
         self.assertEqual(PackagePublishingStatus.DELETED, spph.status)
 
     def test_requestDeletion_leaves_other_SPPHs_alone(self):
         spph = self.factory.makeSourcePackagePublishingHistory()
         other_spph = self.factory.makeSourcePackagePublishingHistory()
-        getUtility(IPublishingSet).requestDeletion(
-            [other_spph], self.factory.makePerson())
+        getUtility(IPublishingSet).requestDeletion([other_spph], self.person)
         self.assertEqual(PackagePublishingStatus.PENDING, spph.status)
 
     def test_requestDeletion_marks_BPPHs_deleted(self):
         bpph = self.factory.makeBinaryPackagePublishingHistory()
-        getUtility(IPublishingSet).requestDeletion(
-            [bpph], self.factory.makePerson())
+        getUtility(IPublishingSet).requestDeletion([bpph], self.person)
         self.assertEqual(PackagePublishingStatus.DELETED, bpph.status)
 
     def test_requestDeletion_marks_attached_BPPHs_deleted(self):
         bpph = self.factory.makeBinaryPackagePublishingHistory()
         spph = self.factory.makeSPPHForBPPH(bpph)
-        getUtility(IPublishingSet).requestDeletion(
-            [spph], self.factory.makePerson())
+        getUtility(IPublishingSet).requestDeletion([spph], self.person)
         self.assertEqual(PackagePublishingStatus.DELETED, spph.status)
 
     def test_requestDeletion_leaves_other_BPPHs_alone(self):
         bpph = self.factory.makeBinaryPackagePublishingHistory()
         unrelated_spph = self.factory.makeSourcePackagePublishingHistory()
         getUtility(IPublishingSet).requestDeletion(
-            [unrelated_spph], self.factory.makePerson())
+            [unrelated_spph], self.person)
         self.assertEqual(PackagePublishingStatus.PENDING, bpph.status)
 
     def test_requestDeletion_accepts_empty_sources_list(self):
-        person = self.factory.makePerson()
-        getUtility(IPublishingSet).requestDeletion([], person)
+        getUtility(IPublishingSet).requestDeletion([], self.person)
         # The test is that this does not fail.
-        Store.of(person).flush()
+        Store.of(self.person).flush()
 
     def test_requestDeletion_creates_DistroSeriesDifferenceJobs(self):
         dsp = self.factory.makeDistroSeriesParent()
-        series = dsp.derived_series
         spph = self.factory.makeSourcePackagePublishingHistory(
-            series, pocket=PackagePublishingPocket.RELEASE)
+            dsp.derived_series, pocket=PackagePublishingPocket.RELEASE)
         spn = spph.sourcepackagerelease.sourcepackagename
-
-        getUtility(IPublishingSet).requestDeletion(
-            [spph], self.factory.makePerson())
-
+        getUtility(IPublishingSet).requestDeletion([spph], self.person)
         self.assertEqual(
             1, len(find_waiting_jobs(
                 dsp.derived_series, spn, dsp.parent_series)))
 
+    def test_requestDeletion_disallows_unmodifiable_suites(self):
+        bpph = self.factory.makeBinaryPackagePublishingHistory(
+            pocket=PackagePublishingPocket.RELEASE)
+        spph = self.factory.makeSourcePackagePublishingHistory(
+            distroseries=bpph.distroseries,
+            pocket=PackagePublishingPocket.RELEASE)
+        spph.distroseries.status = SeriesStatus.CURRENT
+        message = "Cannot delete publications from suite '%s'" % (
+            spph.distroseries.getSuite(spph.pocket))
+        for pub in spph, bpph:
+            self.assertRaisesWithContent(
+                DeletionError, message, pub.requestDeletion, self.person)
+            self.assertRaisesWithContent(
+                DeletionError, message, pub.api_requestDeletion, self.person)
+
 
 class TestSourceDomination(TestNativePublishingBase):
     """Test SourcePackagePublishingHistory.supersede() operates correctly."""
@@ -1264,7 +1259,7 @@
 
         self.assertRaises(AssertionError, source.supersede, super_source)
         self.checkSuperseded([source], super_source)
-        self.assertEquals(super_date, source.datesuperseded)
+        self.assertEqual(super_date, source.datesuperseded)
 
 
 class TestBinaryDomination(TestNativePublishingBase):
@@ -1343,7 +1338,7 @@
 
         self.assertRaises(AssertionError, bin.supersede, super_bin)
         self.checkSuperseded([bin], super_bin)
-        self.assertEquals(super_date, bin.datesuperseded)
+        self.assertEqual(super_date, bin.datesuperseded)
 
     def testSkipsSupersededArchIndependentBinary(self):
         """Check that supersede() skips a superseded arch-indep binary.
@@ -1365,7 +1360,7 @@
 
         bin.supersede(super_bin)
         self.checkSuperseded([bin], super_bin)
-        self.assertEquals(super_date, bin.datesuperseded)
+        self.assertEqual(super_date, bin.datesuperseded)
 
     def testSupersedesCorrespondingDDEB(self):
         """Check that supersede() takes with it any corresponding DDEB.
@@ -1379,8 +1374,7 @@
 
         # Each of these will return (i386 deb, i386 ddeb, hppa deb,
         # hppa ddeb).
-        bins = self.getPubBinaries(
-            architecturespecific=True, with_debug=True)
+        bins = self.getPubBinaries(architecturespecific=True, with_debug=True)
         super_bins = self.getPubBinaries(
             architecturespecific=True, with_debug=True)
 
@@ -1405,8 +1399,7 @@
             distribution=self.ubuntutest)
 
         # This will return (i386 deb, i386 ddeb, hppa deb, hppa ddeb).
-        bins = self.getPubBinaries(
-            architecturespecific=True, with_debug=True)
+        bins = self.getPubBinaries(architecturespecific=True, with_debug=True)
         self.assertRaises(AssertionError, bins[0].supersede, bins[1])
 
 
@@ -1414,9 +1407,8 @@
     """Test BinaryPackagePublishingHistory._getOtherPublications() works."""
 
     def checkOtherPublications(self, this, others):
-        self.assertEquals(
-            set(removeSecurityProxy(this)._getOtherPublications()),
-            set(others))
+        self.assertContentEqual(
+            removeSecurityProxy(this)._getOtherPublications(), others)
 
     def testFindsOtherArchIndepPublications(self):
         """Arch-indep publications with the same overrides should be found."""
@@ -1529,7 +1521,7 @@
         # SPPH, BPPHs and BPRs.
         with StormStatementRecorder() as recorder:
             bins = spph.getBuiltBinaries()
-        self.assertEquals(0, len(bins))
+        self.assertEqual(0, len(bins))
         self.assertThat(recorder, HasQueryCount(Equals(3)))
 
         self.getPubBinaries(pub_source=spph)
@@ -1541,7 +1533,7 @@
         # BPF has no query penalty.
         with StormStatementRecorder() as recorder:
             bins = spph.getBuiltBinaries(want_files=True)
-            self.assertEquals(2, len(bins))
+            self.assertEqual(2, len(bins))
             for bpph in bins:
                 files = bpph.binarypackagerelease.files
                 self.assertEqual(1, len(files))
@@ -1595,8 +1587,7 @@
     def test_architecture_independent(self):
         # Architecture-independent binaries get published to all enabled
         # DASes in the series.
-        bpr = self.factory.makeBinaryPackageRelease(
-            architecturespecific=False)
+        bpr = self.factory.makeBinaryPackageRelease(architecturespecific=False)
         # Create 3 architectures. The binary will not be published in
         # the disabled one.
         target_das_a = self.factory.makeDistroArchSeries()
@@ -1606,12 +1597,11 @@
         self.factory.makeDistroArchSeries(
             distroseries=target_das_a.distroseries, enabled=False)
         args = self.makeArgs([bpr], target_das_a.distroseries)
-        bpphs = getUtility(IPublishingSet).publishBinaries(
-            **args)
-        self.assertEquals(2, len(bpphs))
-        self.assertEquals(
-            set((target_das_a, target_das_b)),
-            set(bpph.distroarchseries for bpph in bpphs))
+        bpphs = getUtility(IPublishingSet).publishBinaries(**args)
+        self.assertEqual(2, len(bpphs))
+        self.assertContentEqual(
+            (target_das_a, target_das_b),
+            [bpph.distroarchseries for bpph in bpphs])
 
     def test_architecture_disabled(self):
         # An empty list is return if the DistroArchSeries was disabled.
@@ -1664,13 +1654,12 @@
             build=build, binpackageformat=BinaryPackageFormat.DDEB)
         args = self.makeArgs([normal, debug], das.distroseries)
         bpphs = getUtility(IPublishingSet).publishBinaries(**args)
-        self.assertEquals(2, len(bpphs))
-        self.assertEquals(
-            set((normal, debug)),
-            set(bpph.binarypackagerelease for bpph in bpphs))
-        self.assertEquals(
-            set((das.main_archive, das.main_archive.debug_archive)),
-            set(bpph.archive for bpph in bpphs))
+        self.assertEqual(2, len(bpphs))
+        self.assertContentEqual(
+            (normal, debug), [bpph.binarypackagerelease for bpph in bpphs])
+        self.assertContentEqual(
+            (das.main_archive, das.main_archive.debug_archive),
+            [bpph.archive for bpph in bpphs])
 
         # A second copy does nothing, because it checks in the debug
         # archive too.

=== modified file 'lib/lp/testing/factory.py'
--- lib/lp/testing/factory.py	2013-01-23 06:54:49 +0000
+++ lib/lp/testing/factory.py	2013-03-08 22:44:24 +0000
@@ -2,7 +2,7 @@
 # NOTE: The first line above must stay first; do not move the copyright
 # notice to the top.  See http://www.python.org/dev/peps/pep-0263/.
 #
-# Copyright 2009-2012 Canonical Ltd.  This software is licensed under the
+# Copyright 2009-2013 Canonical Ltd.  This software is licensed under the
 # GNU Affero General Public License version 3 (see the file LICENSE).
 
 """Testing infrastructure for the Launchpad application.
@@ -3678,7 +3678,7 @@
             initial source package release  upload archive, or to the
             distro series main archive.
         :param pocket: The pocket to publish into. Can be specified as a
-            string. Defaults to the RELEASE pocket.
+            string. Defaults to the BACKPORTS pocket.
         :param status: The publication status. Defaults to PENDING. If
             set to PUBLISHED, the publisheddate will be set to now.
         :param dateremoved: The removal date.