← Back to team overview

launchpad-reviewers team mailing list archive

[Merge] lp:~jtv/launchpad/pre-824499 into lp:launchpad

 

Jeroen T. Vermeulen has proposed merging lp:~jtv/launchpad/pre-824499 into lp:launchpad.

Requested reviews:
  Launchpad code reviewers (launchpad-reviewers)
Related bugs:
  Bug #824499 in Launchpad itself: ""All derived distros" option for publish-ftpmaster"
  https://bugs.launchpad.net/launchpad/+bug/824499

For more details, see:
https://code.launchpad.net/~jtv/launchpad/pre-824499/+merge/71536

= Summary =

I'm adding an option to publish-ftpmaster that makes it loop over all Ubuntu-derived distributions (instead of processing just one distribution as named on the command line).

This branch is a preparation for that work: instead of having the script use self.distribution everywhere, pass the distribution around explicitly.


== Implementation details ==

As you probably guessed, self.distribution becomes self.distributions in the follow-up branch.

Also fixed a bit of blatantly incorrect indentation, in a function I'm pretty sure I wrote no less.


== Tests ==

{{{
./bin/test -vvc lp.archivepublisher.tests.test_publish_ftparchive
}}}


== Demo and Q/A ==

No separate Q/A needed; that'll happen with the main branch.


= Launchpad lint =

Checking for conflicts and issues in changed files.

Linting changed files:
  lib/lp/archivepublisher/scripts/publish_ftpmaster.py
  lib/lp/archivepublisher/tests/test_publish_ftpmaster.py
-- 
https://code.launchpad.net/~jtv/launchpad/pre-824499/+merge/71536
Your team Launchpad code reviewers is requested to review the proposed merge of lp:~jtv/launchpad/pre-824499 into lp:launchpad.
=== modified file 'lib/lp/archivepublisher/scripts/publish_ftpmaster.py'
--- lib/lp/archivepublisher/scripts/publish_ftpmaster.py	2011-08-09 10:30:43 +0000
+++ lib/lp/archivepublisher/scripts/publish_ftpmaster.py	2011-08-15 10:13:28 +0000
@@ -210,14 +210,14 @@
                 self.logger.debug("Command failed: %s", failure)
                 raise failure
 
-    def getArchives(self):
+    def getArchives(self, distribution):
         """Find archives for `self.distribution` that should be published."""
         # XXX JeroenVermeulen 2011-03-31 bug=746229: to start publishing
         # debug archives, change this to return
-        # list(self.distribution.all_distro_archives).
+        # list(distribution.all_distro_archives).
         return [
             archive
-            for archive in self.distribution.all_distro_archives
+            for archive in distribution.all_distro_archives
                 if archive.purpose in ARCHIVES_TO_PUBLISH]
 
     def getConfigs(self):
@@ -230,7 +230,7 @@
             (archive.purpose, getPubConfig(archive))
             for archive in self.archives)
 
-    def locateIndexesMarker(self, suite):
+    def locateIndexesMarker(self, distribution, suite):
         """Give path for marker file whose presence marks index creation.
 
         The file will be created once the archive indexes for `suite`
@@ -265,50 +265,51 @@
             for pocket in pocketsuffix.iterkeys()]
         return [
             suite for suite in suites
-                if not file_exists(self.locateIndexesMarker(suite))]
+                if not file_exists(self.locateIndexesMarker(distro, suite))]
 
-    def markIndexCreationComplete(self, suite):
+    def markIndexCreationComplete(self, distribution, suite):
         """Note that archive indexes for `suite` have been created.
 
         This tells `listSuitesNeedingIndexes` that no, this suite no
         longer needs archive indexes to be set up.
         """
-        with file(self.locateIndexesMarker(suite), "w") as marker:
+        marker_name = self.locateIndexesMarker(distribution, suite)
+        with file(marker_name, "w") as marker:
             marker.write(
                 "Indexes for %s were created on %s.\n"
                 % (suite, datetime.now(utc)))
 
-    def createIndexes(self, suite):
+    def createIndexes(self, distribution, suite):
         """Create archive indexes for `distroseries`."""
         self.logger.info("Creating archive indexes for %s.", suite)
-        self.runPublishDistro(args=['-A'], suites=[suite])
-        self.markIndexCreationComplete(suite)
+        self.runPublishDistro(distribution, args=['-A'], suites=[suite])
+        self.markIndexCreationComplete(distribution, suite)
 
-    def processAccepted(self):
+    def processAccepted(self, distribution):
         """Run the process-accepted script."""
         self.logger.debug(
             "Processing the accepted queue into the publishing records...")
         script = ProcessAccepted(
-            test_args=[self.distribution.name], logger=self.logger)
+            test_args=[distribution.name], logger=self.logger)
         script.txn = self.txn
         script.main()
 
-    def getDirtySuites(self):
+    def getDirtySuites(self, distribution):
         """Return list of suites that have packages pending publication."""
         self.logger.debug("Querying which suites are pending publication...")
         query_distro = LpQueryDistro(
-            test_args=['-d', self.distribution.name, "pending_suites"],
+            test_args=['-d', distribution.name, "pending_suites"],
             logger=self.logger)
         receiver = StoreArgument()
         query_distro.runAction(presenter=receiver)
         return receiver.argument.split()
 
-    def getDirtySecuritySuites(self):
+    def getDirtySecuritySuites(self, distribution):
         """List security suites with pending publications."""
-        suites = self.getDirtySuites()
+        suites = self.getDirtySuites(distribution)
         return [suite for suite in suites if suite.endswith('-security')]
 
-    def rsyncBackupDists(self):
+    def rsyncBackupDists(self, distribution):
         """Populate the backup dists with a copy of distsroot.
 
         Uses "rsync -aH --delete" so that any obsolete files that may
@@ -365,12 +366,12 @@
                     "Creating backup dists directory %s", distscopy)
                 os.makedirs(distscopy)
 
-    def runPublishDistro(self, args=[], suites=None):
+    def runPublishDistro(self, distribution, args=[], suites=None):
         """Execute `publish-distro`."""
         if suites is None:
             suites = []
         arguments = (
-            ['-d', self.distribution.name] +
+            ['-d', distribution.name] +
             args +
             sum([['-s', suite] for suite in suites], []))
 
@@ -380,7 +381,8 @@
         publish_distro.txn = self.txn
         publish_distro.main()
 
-    def publishDistroArchive(self, archive, security_suites=None):
+    def publishDistroArchive(self, distribution, archive,
+                             security_suites=None):
         """Publish the results for an archive.
 
         :param archive: Archive to publish.
@@ -390,7 +392,7 @@
         purpose = archive.purpose
         archive_config = self.configs[purpose]
         self.logger.debug(
-            "Publishing the %s %s...", self.distribution.name, purpose.title)
+            "Publishing the %s %s...", distribution.name, purpose.title)
 
         # For reasons unknown, publishdistro only seems to work with a
         # directory that's inside the archive root.  So we move it there
@@ -403,13 +405,14 @@
 
         os.rename(get_backup_dists(archive_config), temporary_dists)
         try:
-            self.runPublishDistro(args=arguments, suites=security_suites)
+            self.runPublishDistro(
+                distribution, args=arguments, suites=security_suites)
         finally:
             os.rename(temporary_dists, get_backup_dists(archive_config))
 
-        self.runPublishDistroParts(archive)
+        self.runPublishDistroParts(distribution, archive)
 
-    def runPublishDistroParts(self, archive):
+    def runPublishDistroParts(self, distribution, archive):
         """Execute the publish-distro hooks."""
         archive_config = self.configs[archive.purpose]
         env = {
@@ -418,9 +421,9 @@
             }
         if archive_config.overrideroot is not None:
             env["OVERRIDEROOT"] = shell_quote(archive_config.overrideroot)
-        self.runParts('publish-distro.d', env)
+        self.runParts(distribution, 'publish-distro.d', env)
 
-    def installDists(self):
+    def installDists(self, distribution):
         """Put the new dists into place, as near-atomically as possible.
 
         For each archive, this switches the dists directory and the
@@ -440,7 +443,7 @@
             os.rename(backup_dists, dists)
             os.rename(temp_dists, backup_dists)
 
-    def generateListings(self):
+    def generateListings(self, distribution):
         """Create ls-lR.gz listings."""
         self.logger.debug("Creating ls-lR.gz...")
         lslr = "ls-lR.gz"
@@ -457,14 +460,14 @@
                     "Failed to create %s for %s." % (lslr, purpose.title)))
             os.rename(new_lslr_file, lslr_file)
 
-    def clearEmptyDirs(self):
+    def clearEmptyDirs(self, distribution):
         """Clear out any redundant empty directories."""
         for archive_config in self.configs.itervalues():
             self.executeShell(
                 "find '%s' -type d -empty | xargs -r rmdir"
                 % archive_config.archiveroot)
 
-    def runParts(self, parts, env):
+    def runParts(self, distribution, parts, env):
         """Execute run-parts.
 
         :param parts: The run-parts directory to execute:
@@ -472,7 +475,7 @@
         :param env: A dict of environment variables to pass to the
             scripts in the run-parts directory.
         """
-        parts_dir = find_run_parts_dir(self.distribution, parts)
+        parts_dir = find_run_parts_dir(distribution, parts)
         if parts_dir is None:
             self.logger.debug("Skipping run-parts %s: not configured.", parts)
             return
@@ -482,7 +485,7 @@
             failure=LaunchpadScriptFailure(
                 "Failure while executing run-parts %s." % parts_dir))
 
-    def runFinalizeParts(self, security_only=False):
+    def runFinalizeParts(self, distribution, security_only=False):
         """Run the finalize.d parts to finalize publication."""
         archive_roots = shell_quote(' '.join([
             archive_config.archiveroot
@@ -492,28 +495,29 @@
             'SECURITY_UPLOAD_ONLY': compose_shell_boolean(security_only),
             'ARCHIVEROOTS': archive_roots,
         }
-        self.runParts('finalize.d', env)
+        self.runParts(distribution, 'finalize.d', env)
 
-    def publishSecurityUploads(self):
+    def publishSecurityUploads(self, distribution):
         """Quickly process just the pending security uploads."""
         self.logger.debug("Expediting security uploads.")
-        security_suites = self.getDirtySecuritySuites()
+        security_suites = self.getDirtySecuritySuites(distribution)
         if len(security_suites) == 0:
             self.logger.debug("Nothing to do for security publisher.")
             return
 
         self.publishDistroArchive(
-            self.distribution.main_archive, security_suites=security_suites)
+            distribution, distribution.main_archive,
+            security_suites=security_suites)
 
-    def publishAllUploads(self):
+    def publishDistroUploads(self, distribution):
         """Publish the distro's complete uploads."""
         self.logger.debug("Full publication.  This may take some time.")
         for archive in self.archives:
             # This, for the main archive, is where the script spends
             # most of its time.
-            self.publishDistroArchive(archive)
+            self.publishDistroArchive(distribution, archive)
 
-    def publish(self, security_only=False):
+    def publish(self, distribution, security_only=False):
         """Do the main publishing work.
 
         :param security_only: If True, limit publication to security
@@ -523,13 +527,13 @@
         """
         try:
             if security_only:
-                self.publishSecurityUploads()
+                self.publishSecurityUploads(distribution)
             else:
-                self.publishAllUploads()
+                self.publishDistroUploads(distribution)
 
             # Swizzle the now-updated backup dists and the current dists
             # around.
-            self.installDists()
+            self.installDists(distribution)
         except:
             # If we failed here, there's a chance that we left a
             # working dists directory in its temporary location.  If so,
@@ -542,7 +546,7 @@
     def setUp(self):
         """Process options, and set up internal state."""
         self.processOptions()
-        self.archives = self.getArchives()
+        self.archives = self.getArchives(self.distribution)
         self.configs = self.getConfigs()
 
     def main(self):
@@ -550,32 +554,34 @@
         self.setUp()
         self.recoverWorkingDists()
 
-        for series in self.distribution.series:
+        distribution = self.distribution
+
+        for series in distribution.series:
             suites_needing_indexes = self.listSuitesNeedingIndexes(series)
             if len(suites_needing_indexes) > 0:
                 for suite in suites_needing_indexes:
-                    self.createIndexes(suite)
+                    self.createIndexes(distribution, suite)
                 # Don't try to do too much in one run.  Leave the rest
                 # of the work for next time.
                 return
 
-        self.processAccepted()
+        self.processAccepted(distribution)
         self.setUpDirs()
 
-        self.rsyncBackupDists()
-        self.publish(security_only=True)
-        self.runFinalizeParts(security_only=True)
+        self.rsyncBackupDists(distribution)
+        self.publish(distribution, security_only=True)
+        self.runFinalizeParts(distribution, security_only=True)
 
         if not self.options.security_only:
-            self.rsyncBackupDists()
-            self.publish(security_only=False)
-            self.generateListings()
-            self.clearEmptyDirs()
-            self.runFinalizeParts(security_only=False)
+            self.rsyncBackupDists(distribution)
+            self.publish(distribution, security_only=False)
+            self.generateListings(distribution)
+            self.clearEmptyDirs(distribution)
+            self.runFinalizeParts(distribution, security_only=False)
 
         if self.options.post_rsync:
             #  Update the backup dists with the published changes.  The
             #  initial rsync on the next run will not need to make any
             #  changes, and so it'll take the next run a little less
             #  time to publish its security updates.
-            self.rsyncBackupDists()
+            self.rsyncBackupDists(distribution)

=== modified file 'lib/lp/archivepublisher/tests/test_publish_ftpmaster.py'
--- lib/lp/archivepublisher/tests/test_publish_ftpmaster.py	2011-08-03 06:24:53 +0000
+++ lib/lp/archivepublisher/tests/test_publish_ftpmaster.py	2011-08-15 10:13:28 +0000
@@ -116,10 +116,11 @@
 
 
 def get_marker_files(script, distroseries):
-        suites = [
-            distroseries.getSuite(pocket)
-            for pocket in pocketsuffix.iterkeys()]
-        return [script.locateIndexesMarker(suite) for suite in suites]
+    """Return filesystem paths for all indexes markers for `distroseries`."""
+    suites = [
+        distroseries.getSuite(pocket) for pocket in pocketsuffix.iterkeys()]
+    distro = distroseries.distribution
+    return [script.locateIndexesMarker(distro, suite) for suite in suites]
 
 
 class HelpersMixin:
@@ -366,9 +367,11 @@
 
     def test_getDirtySuites_returns_suite_with_pending_publication(self):
         spph = self.factory.makeSourcePackagePublishingHistory()
+        distro = spph.distroseries.distribution
         script = self.makeScript(spph.distroseries.distribution)
         script.setUp()
-        self.assertEqual([name_spph_suite(spph)], script.getDirtySuites())
+        self.assertEqual(
+            [name_spph_suite(spph)], script.getDirtySuites(distro))
 
     def test_getDirtySuites_returns_suites_with_pending_publications(self):
         distro = self.makeDistroWithPublishDirectory()
@@ -382,14 +385,15 @@
         script.setUp()
         self.assertContentEqual(
             [name_spph_suite(spph) for spph in spphs],
-            script.getDirtySuites())
+            script.getDirtySuites(distro))
 
     def test_getDirtySuites_ignores_suites_without_pending_publications(self):
         spph = self.factory.makeSourcePackagePublishingHistory(
             status=PackagePublishingStatus.PUBLISHED)
+        distro = spph.distroseries.distribution
         script = self.makeScript(spph.distroseries.distribution)
         script.setUp()
-        self.assertEqual([], script.getDirtySuites())
+        self.assertEqual([], script.getDirtySuites(distro))
 
     def test_getDirtySecuritySuites_returns_security_suites(self):
         distro = self.makeDistroWithPublishDirectory()
@@ -404,7 +408,7 @@
         script.setUp()
         self.assertContentEqual(
             [name_spph_suite(spph) for spph in spphs],
-            script.getDirtySecuritySuites())
+            script.getDirtySecuritySuites(distro))
 
     def test_getDirtySecuritySuites_ignores_non_security_suites(self):
         distroseries = self.factory.makeDistroSeries()
@@ -419,7 +423,8 @@
                 distroseries=distroseries, pocket=pocket)
         script = self.makeScript(distroseries.distribution)
         script.setUp()
-        self.assertEqual([], script.getDirtySecuritySuites())
+        self.assertEqual(
+            [], script.getDirtySecuritySuites(distroseries.distribution))
 
     def test_rsync_copies_files(self):
         distro = self.makeDistroWithPublishDirectory()
@@ -431,7 +436,7 @@
         os.makedirs(dists_backup)
         os.makedirs(dists_root)
         write_marker_file([dists_root, "new-file"], "New file")
-        script.rsyncBackupDists()
+        script.rsyncBackupDists(distro)
         self.assertEqual(
             "New file", read_marker_file([dists_backup, "new-file"]))
 
@@ -445,7 +450,7 @@
         old_file = [dists_backup, "old-file"]
         write_marker_file(old_file, "old-file")
         os.makedirs(get_dists_root(get_pub_config(distro)))
-        script.rsyncBackupDists()
+        script.rsyncBackupDists(distro)
         self.assertFalse(path_exists(*old_file))
 
     def test_setUpDirs_creates_directory_structure(self):
@@ -478,10 +483,11 @@
         script.setUp()
         script.setUpDirs()
         script.runParts = FakeMethod()
-        script.publishDistroArchive(distro.main_archive)
+        script.publishDistroArchive(distro, distro.main_archive)
         self.assertEqual(1, script.runParts.call_count)
         args, kwargs = script.runParts.calls[0]
-        parts_dir, env = args
+        run_distro, parts_dir, env = args
+        self.assertEqual(distro, run_distro)
         self.assertEqual("publish-distro.d", parts_dir)
 
     def test_runPublishDistroParts_passes_parameters(self):
@@ -490,9 +496,9 @@
         script.setUp()
         script.setUpDirs()
         script.runParts = FakeMethod()
-        script.runPublishDistroParts(distro.main_archive)
+        script.runPublishDistroParts(distro, distro.main_archive)
         args, kwargs = script.runParts.calls[0]
-        parts_dir, env = args
+        run_distro, parts_dir, env = args
         required_parameters = set([
             "ARCHIVEROOT", "DISTSROOT", "OVERRIDEROOT"])
         missing_parameters = required_parameters.difference(set(env.keys()))
@@ -503,7 +509,7 @@
         script = self.makeScript(distro)
         script.setUp()
         script.setUpDirs()
-        script.generateListings()
+        script.generateListings(distro)
         pass
 
     def test_clearEmptyDirs_cleans_up_empty_directories(self):
@@ -514,7 +520,7 @@
         empty_dir = os.path.join(
             get_dists_root(get_pub_config(distro)), 'empty-dir')
         os.makedirs(empty_dir)
-        script.clearEmptyDirs()
+        script.clearEmptyDirs(distro)
         self.assertFalse(file_exists(empty_dir))
 
     def test_clearEmptyDirs_does_not_clean_up_nonempty_directories(self):
@@ -526,7 +532,7 @@
             get_dists_root(get_pub_config(distro)), 'nonempty-dir')
         os.makedirs(nonempty_dir)
         write_marker_file([nonempty_dir, "placeholder"], "Data here!")
-        script.clearEmptyDirs()
+        script.clearEmptyDirs(distro)
         self.assertTrue(file_exists(nonempty_dir))
 
     def test_processOptions_finds_distribution(self):
@@ -545,8 +551,9 @@
         self.enableRunParts()
         script = self.makeScript(self.prepareUbuntu())
         script.setUp()
+        distro = script.distribution
         script.executeShell = FakeMethod()
-        script.runParts("finalize.d", {})
+        script.runParts(distro, "finalize.d", {})
         self.assertEqual(1, script.executeShell.call_count)
         args, kwargs = script.executeShell.calls[-1]
         command_line, = args
@@ -559,10 +566,11 @@
         self.enableRunParts()
         script = self.makeScript(self.prepareUbuntu())
         script.setUp()
+        distro = script.distribution
         script.executeShell = FakeMethod()
         key = self.factory.getUniqueString()
         value = self.factory.getUniqueString()
-        script.runParts("finalize.d", {key: value})
+        script.runParts(distro, "finalize.d", {key: value})
         args, kwargs = script.executeShell.calls[-1]
         command_line, = args
         self.assertIn("%s=%s" % (key, value), command_line)
@@ -595,10 +603,11 @@
     def test_runFinalizeParts_passes_parameters(self):
         script = self.makeScript(self.prepareUbuntu())
         script.setUp()
+        distro = script.distribution
         script.runParts = FakeMethod()
-        script.runFinalizeParts()
+        script.runFinalizeParts(distro)
         args, kwargs = script.runParts.calls[0]
-        parts_dir, env = args
+        run_distro, parts_dir, env = args
         required_parameters = set(["ARCHIVEROOTS", "SECURITY_UPLOAD_ONLY"])
         missing_parameters = required_parameters.difference(set(env.keys()))
         self.assertEqual(set(), missing_parameters)
@@ -606,12 +615,13 @@
     def test_publishSecurityUploads_skips_pub_if_no_security_updates(self):
         script = self.makeScript()
         script.setUp()
+        distro = script.distribution
         script.setUpDirs()
         script.installDists = FakeMethod()
-        script.publishSecurityUploads()
+        script.publishSecurityUploads(distro)
         self.assertEqual(0, script.installDists.call_count)
 
-    def test_publishAllUploads_publishes_all_distro_archives(self):
+    def test_publishDistroUploads_publishes_all_distro_archives(self):
         distro = self.makeDistroWithPublishDirectory()
         distroseries = self.factory.makeDistroSeries(distribution=distro)
         partner_archive = self.factory.makeArchive(
@@ -624,9 +634,9 @@
         script.setUp()
         script.setUpDirs()
         script.publishDistroArchive = FakeMethod()
-        script.publishAllUploads()
+        script.publishDistroUploads(distro)
         published_archives = [
-            args[0] for args, kwargs in script.publishDistroArchive.calls]
+            args[1] for args, kwargs in script.publishDistroArchive.calls]
 
         self.assertContentEqual(
             distro.all_distro_archives, published_archives)
@@ -648,7 +658,7 @@
         script.logger = BufferLogger()
         script.logger.setLevel(logging.INFO)
         script.setUpDirs()
-        archive_config = script.configs[ArchivePurpose.PRIMARY]
+        archive_config = getPubConfig(distro.main_archive)
         backup_dists = os.path.join(
             archive_config.archiveroot + "-distscopy", "dists")
         working_dists = get_working_dists(archive_config)
@@ -677,16 +687,16 @@
         args, kwargs = script.publish.calls[0]
         self.assertEqual({'security_only': True}, kwargs)
 
-    def test_publishAllUploads_processes_all_archives(self):
+    def test_publishDistroUploads_processes_all_archives(self):
         distro = self.makeDistroWithPublishDirectory()
         partner_archive = self.factory.makeArchive(
             distribution=distro, purpose=ArchivePurpose.PARTNER)
         script = self.makeScript(distro)
         script.publishDistroArchive = FakeMethod()
         script.setUp()
-        script.publishAllUploads()
+        script.publishDistroUploads(distro)
         published_archives = [
-            args[0] for args, kwargs in script.publishDistroArchive.calls]
+            args[1] for args, kwargs in script.publishDistroArchive.calls]
         self.assertContentEqual(
             [distro.main_archive, partner_archive], published_archives)
 
@@ -726,7 +736,7 @@
             done
             """))
 
-        script.runFinalizeParts()
+        script.runFinalizeParts(distro)
 
         for archive in [distro.main_archive, distro.getArchive("partner")]:
             archive_root = getPubConfig(archive).archiveroot
@@ -745,20 +755,24 @@
 
         message = self.factory.getUniqueString()
         script = self.makeScript()
-        script.publishAllUploads = FakeMethod(failure=MoonPhaseError(message))
+        script.publishDistroUploads = FakeMethod(
+            failure=MoonPhaseError(message))
         script.setUp()
-        self.assertRaisesWithContent(MoonPhaseError, message, script.publish)
+        self.assertRaisesWithContent(
+            MoonPhaseError, message,
+            script.publish, script.distribution)
 
     def test_publish_obeys_keyboard_interrupt(self):
         # Similar to an Exception, a keyboard interrupt does not get
         # swallowed.
         message = self.factory.getUniqueString()
         script = self.makeScript()
-        script.publishAllUploads = FakeMethod(
+        script.publishDistroUploads = FakeMethod(
             failure=KeyboardInterrupt(message))
         script.setUp()
         self.assertRaisesWithContent(
-            KeyboardInterrupt, message, script.publish)
+            KeyboardInterrupt, message,
+            script.publish, script.distribution)
 
     def test_publish_recovers_working_dists_on_exception(self):
         # If an Exception comes up while publishing, the publish method
@@ -769,12 +783,12 @@
         failure = MoonPhaseError(self.factory.getUniqueString())
 
         script = self.makeScript()
-        script.publishAllUploads = FakeMethod(failure=failure)
+        script.publishDistroUploads = FakeMethod(failure=failure)
         script.recoverArchiveWorkingDir = FakeMethod()
         script.setUp()
 
         try:
-            script.publish()
+            script.publish(script.distribution)
         except MoonPhaseError:
             pass
 
@@ -786,12 +800,12 @@
         failure = KeyboardInterrupt("Ctrl-C!")
 
         script = self.makeScript()
-        script.publishAllUploads = FakeMethod(failure=failure)
+        script.publishDistroUploads = FakeMethod(failure=failure)
         script.recoverArchiveWorkingDir = FakeMethod()
         script.setUp()
 
         try:
-            script.publish()
+            script.publish(script.distribution)
         except KeyboardInterrupt:
             pass
 
@@ -804,7 +818,8 @@
 
     def createIndexesMarkerDir(self, script, distroseries):
         """Create the directory for `distroseries`'s indexes marker."""
-        marker = script.locateIndexesMarker(get_a_suite(distroseries))
+        marker = script.locateIndexesMarker(
+            distroseries.distribution, get_a_suite(distroseries))
         os.makedirs(os.path.dirname(marker))
 
     def makeDistroSeriesNeedingIndexes(self, distribution=None):
@@ -860,7 +875,7 @@
 
         needful_suites = script.listSuitesNeedingIndexes(series)
         suite = get_a_suite(series)
-        script.markIndexCreationComplete(suite)
+        script.markIndexCreationComplete(distro, suite)
         needful_suites.remove(suite)
         self.assertContentEqual(
             needful_suites, script.listSuitesNeedingIndexes(series))
@@ -885,9 +900,9 @@
         script.markIndexCreationComplete = FakeMethod()
         script.runPublishDistro = FakeMethod()
         suite = get_a_suite(series)
-        script.createIndexes(suite)
+        script.createIndexes(distro, suite)
         self.assertEqual(
-            [((suite, ), {})], script.markIndexCreationComplete.calls)
+            [((distro, suite), {})], script.markIndexCreationComplete.calls)
 
     def test_failed_index_creation_is_not_marked_complete(self):
         # If index creation fails, it is not marked as having been
@@ -900,7 +915,7 @@
         script.markIndexCreationComplete = FakeMethod()
         script.runPublishDistro = FakeMethod(failure=Boom("Sorry!"))
         try:
-            script.createIndexes(get_a_suite(series))
+            script.createIndexes(series.distribution, get_a_suite(series))
         except:
             pass
         self.assertEqual([], script.markIndexCreationComplete.calls)
@@ -911,9 +926,10 @@
         series = self.factory.makeDistroSeries()
         script = self.makeScript(series.distribution)
         script.setUp()
-        archive_root = script.configs[ArchivePurpose.PRIMARY].archiveroot
+        archive_root = getPubConfig(series.main_archive).archiveroot
         self.assertThat(
-            script.locateIndexesMarker(get_a_suite(series)),
+            script.locateIndexesMarker(
+                series.distribution, get_a_suite(series)),
             StartsWith(os.path.normpath(archive_root)))
 
     def test_locateIndexesMarker_uses_separate_files_per_suite(self):
@@ -946,7 +962,8 @@
         script.setUp()
         suite = get_a_suite(series)
         self.assertThat(
-            os.path.basename(script.locateIndexesMarker(suite)),
+            os.path.basename(script.locateIndexesMarker(
+                series.distribution, suite)),
             StartsWith("."))
 
     def test_script_calls_createIndexes_for_new_series(self):
@@ -958,7 +975,7 @@
         script.createIndexes = FakeMethod()
         script.main()
         expected_calls = [
-            ((series.getSuite(pocket), ), {})
+            ((distro, series.getSuite(pocket)), {})
             for pocket in pocketsuffix.iterkeys()]
         self.assertContentEqual(expected_calls, script.createIndexes.calls)
 
@@ -974,7 +991,7 @@
         self.createIndexesMarkerDir(script, series)
         suite = get_a_suite(series)
 
-        script.createIndexes(suite)
+        script.createIndexes(distro, suite)
 
         args, kwargs = script.runPublishDistro.calls[0]
         self.assertEqual([suite], kwargs['suites'])
@@ -994,6 +1011,6 @@
         script.main()
         self.assertEqual([], script.listSuitesNeedingIndexes(series))
         sources = os.path.join(
-            script.configs[ArchivePurpose.PRIMARY].distsroot,
+            getPubConfig(series.main_archive).distsroot,
             series.name, "main", "source", "Sources")
         self.assertTrue(file_exists(sources))


Follow ups