← Back to team overview

launchpad-reviewers team mailing list archive

[Merge] lp:~jtv/launchpad/db-bug-55798 into lp:launchpad/db-devel

 

Jeroen T. Vermeulen has proposed merging lp:~jtv/launchpad/db-bug-55798 into lp:launchpad/db-devel.

Requested reviews:
  Launchpad code reviewers (launchpad-reviewers): code
Related bugs:
  Bug #55798 in Launchpad itself: "cron.publish-ftpmaster untested"
  https://bugs.launchpad.net/launchpad/+bug/55798

For more details, see:
https://code.launchpad.net/~jtv/launchpad/db-bug-55798/+merge/55174

Replaces the shell script cron.publish-ftpmaster with a Python script, and generalizes away the distro so it's no longer Ubuntu-specific.

Ubuntu-specific or config-specific scripts are now run through the standard run-parts mechanism.

This is still a work in progress; expect a proper cover letter later when it goes up for review.
-- 
https://code.launchpad.net/~jtv/launchpad/db-bug-55798/+merge/55174
Your team Launchpad code reviewers is requested to review the proposed merge of lp:~jtv/launchpad/db-bug-55798 into lp:launchpad/db-devel.
=== added file 'cronscripts/publish-ftpmaster.py'
--- cronscripts/publish-ftpmaster.py	1970-01-01 00:00:00 +0000
+++ cronscripts/publish-ftpmaster.py	2011-03-28 15:47:59 +0000
@@ -0,0 +1,17 @@
+#!/usr/bin/python -S
+#
+# Copyright 2011 Canonical Ltd.  This software is licensed under the
+# GNU Affero General Public License version 3 (see the file LICENSE).
+
+"""Master FTP distro publishing script."""
+
+import _pythonpath
+
+from canonical.config import config
+from lp.soyuz.scripts.publish_ftpmaster import PublishFTPMaster
+
+
+if __name__ == '__main__':
+    script = PublishFTPMaster(
+        "publish-ftpmaster", dbuser=config.archivepublisher.dbuser)
+    script.lock_and_run()

=== modified file 'cronscripts/publishing/cron.publish-ftpmaster'
--- cronscripts/publishing/cron.publish-ftpmaster	2010-06-25 14:36:11 +0000
+++ cronscripts/publishing/cron.publish-ftpmaster	2011-03-28 15:47:59 +0000
@@ -9,6 +9,15 @@
     echo LPCONFIG must be set to run this script.
     exit 1
 fi
+
+# Injection points for testability.
+DISTRONAME="${DISTRONAME:-ubuntu}"
+ARCHIVEPARENT="${ARCHIVEPARENT:-/srv/launchpad.net/$DISTRONAME-archive}"
+LOCKFILE_CMD="${LOCKFILE_CMD:-lockfile}"
+DSYNC_FLIST_CMD="${DSYNC_FLIST_CMD:-dsync-flist}"
+COMMERCIAL_COMPAT_CMD="${COMMERCIAL_COMPAT_CMD:-commercial-compat.sh}"
+INHIBIT_GPG_SIGNING="${INHIBIT_GPG_SIGNING:-no}"
+
 SECURITY_PUBLISHER="no"
 if [ "$1" = "security" ]; then
     # We are running a security publisher run, which skips some steps.
@@ -22,28 +31,27 @@
 # Launchpad cron.daily (currently just for Ubuntu).
 
 # Informational -- this *MUST* match the database.
-ARCHIVEROOT=/srv/launchpad.net/ubuntu-archive/ubuntu
+ARCHIVEROOT=$ARCHIVEPARENT/$DISTRONAME
 DISTSROOT=$ARCHIVEROOT/dists
-OVERRIDEROOT=$ARCHIVEROOT/../ubuntu-overrides
-CACHEROOT=$ARCHIVEROOT/../ubuntu-cache
-DISTSCOPYROOT=$ARCHIVEROOT/../ubuntu-distscopy
+OVERRIDEROOT=$ARCHIVEROOT-overrides
+CACHEROOT=$ARCHIVEROOT-cache
+DISTSCOPYROOT=$ARCHIVEROOT-distscopy
 INDICES=$ARCHIVEROOT/indices
 PRODUCTION_CONFIG=ftpmaster-publish
 
 if [ "$LPCONFIG" = "$PRODUCTION_CONFIG" ]; then
-    ARCHIVEROOT_PARTNER=/srv/launchpad.net/ubuntu-archive/ubuntu-partner
-    GNUPGHOME=/srv/launchpad.net/ubuntu-archive/gnupg-home
+    ARCHIVEROOT_PARTNER=$ARCHIVEROOT-partner
+    GNUPGHOME=$ARCHIVEROOT/gnupg-home
 else
     # GNUPGHOME does not need to be set, keys can come from ~/.gnupg.
-    ARCHIVEROOT_PARTNER=/srv/launchpad.net/ppa/ubuntu-partner
+    ARCHIVEROOT_PARTNER=$ARCHIVEPARENT/ppa/$DISTRONAME-partner
 fi
 DISTSROOT_PARTNER=$ARCHIVEROOT_PARTNER/dists
-DISTSCOPYROOT_PARTNER=$ARCHIVEROOT_PARTNER/../ubuntu-partner-distscopy
+DISTSCOPYROOT_PARTNER=$ARCHIVEROOT-partner-distscopy
 
 # Configuration options.
-LAUNCHPADROOT=/srv/launchpad.net/codelines/current
-LOCKFILE=/srv/launchpad.net/ubuntu-archive/cron.daily.lock
-DISTRONAME=ubuntu
+LAUNCHPADROOT=$ARCHIVEPARENT/codelines/current
+LOCKFILE=$ARCHIVEPARENT/cron.daily.lock
 TRACEFILE=$ARCHIVEROOT/project/trace/$(hostname --fqdn)
 DSYNCLIST=$CACHEROOT/dsync.list
 MD5LIST=$INDICES/md5sums.gz
@@ -56,7 +64,7 @@
 PATH=$PATH:$LAUNCHPADROOT/scripts:$LAUNCHPADROOT/cronscripts:$LAUNCHPADROOT/cronscripts/publishing:$LAUNCHPADROOT/scripts/ftpmaster-tools
 
 # Claim the lock.
-if ! lockfile -r1 $LOCKFILE; then
+if ! ${LOCKFILE_CMD} -r1 $LOCKFILE; then
   echo "Could not claim lock file."
   exit 1
 fi
@@ -153,30 +161,34 @@
 publish-distro.py -v -v -d $DISTRONAME $SUITEOPTS -R ${DISTSROOT}.new
 
 set +x
-# Find all the Release files for which the Release.GPG is missing/too-old
-# We use -maxdepth 2 to only sign Release files for distroreleases,
-# not distroarchreleases/distrosourcereleases.
-# Also we sign the dist-upgrader tarballs because they're handy too.
-for CANDIDATE in $(find ${DISTSROOT}.new -maxdepth 2 -name Release) \
-       $(find ${DISTSROOT}.new/*/*/dist-upgrader* -name "*.tar.gz"); do
-  #  [ Release.gpg missing   ] or [ Release is newer than Release.gpg ]
-  if [ ! -f $CANDIDATE.gpg ] || [ $CANDIDATE -nt $CANDIDATE.gpg ]; then
-    echo "$(date -R): (re-)signing $CANDIDATE"
-    gpg --yes --detach-sign --armor -o $CANDIDATE.gpg --sign $CANDIDATE
-  else
-    echo "$(date -R): Not re-signing $CANDIDATE"
-  fi
-done
-SIGNLIST_PARTNER=$(find ${DISTSROOT_PARTNER}.new -maxdepth 2 -name Release)
-for CANDIDATE in $SIGNLIST_PARTNER; do
-  #  [ Release.gpg missing   ] or [ Release is newer than Release.gpg ].
-  if [ ! -f $CANDIDATE.gpg ] || [ $CANDIDATE -nt $CANDIDATE.gpg ]; then
-    echo "$(date -R): (re-)signing $CANDIDATE"
-    gpg --yes --detach-sign --armor -o $CANDIDATE.gpg --sign $CANDIDATE
-  else
-    echo "$(date -R): Not re-signing $CANDIDATE"
-  fi
-done
+if [ "${INHIBIT_GPG_SIGNING}" != yes ]; then
+  # Find all the Release files for which the Release.GPG is missing/too-old
+  # We use -maxdepth 2 to only sign Release files for distroreleases,
+  # not distroarchreleases/distrosourcereleases.
+  # Also we sign the dist-upgrader tarballs because they're handy too.
+  RELEASE_FILES=`find ${DISTSROOT}.new -maxdepth 2 -name Release`
+  DIST_UPGRADER_TARBALLS=`find ${DISTSROOT}.new/*/*/dist-upgrader* -name "*.tar.gz" || /bin/true`
+  for CANDIDATE in $RELEASE_FILES $DIST_UPGRADER_TARBALLS
+  do
+    #  [ Release.gpg missing   ] or [ Release is newer than Release.gpg ]
+    if [ ! -f $CANDIDATE.gpg ] || [ $CANDIDATE -nt $CANDIDATE.gpg ]; then
+      echo "$(date -R): (re-)signing $CANDIDATE"
+      gpg --yes --detach-sign --armor -o $CANDIDATE.gpg --sign $CANDIDATE
+    else
+      echo "$(date -R): Not re-signing $CANDIDATE"
+    fi
+  done
+  SIGNLIST_PARTNER=$(find ${DISTSROOT_PARTNER}.new -maxdepth 2 -name Release)
+  for CANDIDATE in $SIGNLIST_PARTNER; do
+    #  [ Release.gpg missing   ] or [ Release is newer than Release.gpg ].
+    if [ ! -f $CANDIDATE.gpg ] || [ $CANDIDATE -nt $CANDIDATE.gpg ]; then
+      echo "$(date -R): (re-)signing $CANDIDATE"
+      gpg --yes --detach-sign --armor -o $CANDIDATE.gpg --sign $CANDIDATE
+    else
+      echo "$(date -R): Not re-signing $CANDIDATE"
+    fi
+  done
+fi
 
 # The Packages and Sources files are very large and would cripple our
 # mirrors, so we remove them now that the uncompressed MD5SUMS are in the
@@ -207,7 +219,7 @@
 # dapper, edgy and feisty releases.  Don't fail the whole script if it
 # fails.
 echo "$(date -R): Generating -commerical pocket..."
-commercial-compat.sh || true
+${COMMERCIAL_COMPAT_CMD} || true
 
 # Timestamp our trace file to track when the last archive publisher run took
 # place.
@@ -234,9 +246,9 @@
     # Run dsync over primary archive only.
     echo "$(date -R): Running dsync over primary archive..."
     ( cd $ARCHIVEROOT ; \
-      dsync-flist -q generate $DSYNCLIST -e 'Packages*' -e 'Sources*' -e 'Release*' --md5 ; \
-      (dsync-flist -q md5sums $DSYNCLIST; find dists '(' -name 'Packages*' -o -name 'Sources*' -o -name 'Release*' ')' -print | xargs -r md5sum) | gzip -9n > ${MD5LIST} ; \
-      dsync-flist -q link-dups $DSYNCLIST || true )
+      $DSYNC_FLIST_CMD -q generate $DSYNCLIST -e 'Packages*' -e 'Sources*' -e 'Release*' --md5 ; \
+      ($DSYNC_FLIST_CMD -q md5sums $DSYNCLIST; find dists '(' -name 'Packages*' -o -name 'Sources*' -o -name 'Release*' ')' -print | xargs -r md5sum) | gzip -9n > ${MD5LIST} ; \
+      $DSYNC_FLIST_CMD -q link-dups $DSYNCLIST || true )
 
     # Clear out empty and thus redundant dirs.
     echo "$(date -R): Clearing out empty directories..."

=== added directory 'cronscripts/publishing/distro-parts'
=== added directory 'cronscripts/publishing/distro-parts/production'
=== added directory 'cronscripts/publishing/distro-parts/production/ubuntu'
=== added directory 'cronscripts/publishing/distro-parts/production/ubuntu/finalize.d'
=== added file 'cronscripts/publishing/distro-parts/production/ubuntu/finalize.d/10-germinate'
--- cronscripts/publishing/distro-parts/production/ubuntu/finalize.d/10-germinate	1970-01-01 00:00:00 +0000
+++ cronscripts/publishing/distro-parts/production/ubuntu/finalize.d/10-germinate	2011-03-28 15:47:59 +0000
@@ -0,0 +1,6 @@
+#!/bin/sh
+
+if [ "$SECURITY_UPLOAD_ONLY" != "yes" ]
+then
+    cron.germinate || /bin/true
+fi

=== added file 'cronscripts/publishing/distro-parts/production/ubuntu/finalize.d/40-timestamp-trace-file'
--- cronscripts/publishing/distro-parts/production/ubuntu/finalize.d/40-timestamp-trace-file	1970-01-01 00:00:00 +0000
+++ cronscripts/publishing/distro-parts/production/ubuntu/finalize.d/40-timestamp-trace-file	2011-03-28 15:47:59 +0000
@@ -0,0 +1,5 @@
+#! /bin/sh
+#
+# Timestamp the trace file, so we can track when the last archive publisher
+# run took place.
+date -u > "$ARCHIVEROOT"/project/trace/$(hostname --fqdn)

=== added file 'cronscripts/publishing/distro-parts/production/ubuntu/finalize.d/90-trigger-mirrors'
--- cronscripts/publishing/distro-parts/production/ubuntu/finalize.d/90-trigger-mirrors	1970-01-01 00:00:00 +0000
+++ cronscripts/publishing/distro-parts/production/ubuntu/finalize.d/90-trigger-mirrors	2011-03-28 15:47:59 +0000
@@ -0,0 +1,14 @@
+#!/bin/sh
+
+# Prod the master mirrors to propagate the update.
+
+MASTERMIRRORS="syowa frei scandium"
+echo "$(date -R): Triggering master mirrors..."
+
+for HOST in $MASTERMIRRORS
+do
+    echo "$(date -R): Triggering $HOST:"
+    ssh archvsync@$HOST
+done
+
+echo "$(date -R): Master mirror triggers completed."

=== added file 'cronscripts/publishing/distro-parts/production/ubuntu/finalize.d/README.txt'
--- cronscripts/publishing/distro-parts/production/ubuntu/finalize.d/README.txt	1970-01-01 00:00:00 +0000
+++ cronscripts/publishing/distro-parts/production/ubuntu/finalize.d/README.txt	2011-03-28 15:47:59 +0000
@@ -0,0 +1,13 @@
+Put scripts here that should be run at completion of the publish-ftpmaster
+script.  They will be executed through the run-parts command, in alphabetical
+order.
+
+The scripts' filenames must consist entirely of ASCII letters (both upper and
+lower case allowed), digits, underscores, and hyphens.  All other files,
+including this text file, are ignored.
+
+Publication happens in two passes: the first, expedited pass processes only
+security updates.  The second pass processes all packages.  The scripts in
+this directory will be run once for each pass, with the variable
+SECURITY_UPLOAD_ONLY set to "yes" in the first pass and to "no" in the second
+pass.

=== added directory 'cronscripts/publishing/distro-parts/production/ubuntu/publish-distro.d'
=== added file 'cronscripts/publishing/distro-parts/production/ubuntu/publish-distro.d/10-sign-releases'
--- cronscripts/publishing/distro-parts/production/ubuntu/publish-distro.d/10-sign-releases	1970-01-01 00:00:00 +0000
+++ cronscripts/publishing/distro-parts/production/ubuntu/publish-distro.d/10-sign-releases	2011-03-28 15:47:59 +0000
@@ -0,0 +1,16 @@
+#!/bin/sh -e
+
+RELEASE_FILES=`find "$DISTSROOT".new -maxdepth 2 -name Release`
+DIST_UPGRADER_TARBALLS=`
+	find "$DISTSROOT".new"/*/*/dist-upgrader* -name "*.tar.gz" || true`
+
+for CANDIDATE in $RELEASE_FILES $DIST_UPGRADER_TARBALLS
+do
+    if [ ! -f "$CANDIDATE.gpg" ] || [ "$CANDIDATE" -nt "$CANDIDATE.gpg" ]
+    then
+        echo "$(date -R): (re-)signing $CANDIDATE"
+        gpg --yes --detach-sign --armor -o "$CANDIDATE.gpg" --sign "$CANDIDATE"
+    else
+        echo "$(date -R): Not re-signing $CANDIDATE"
+    fi
+done

=== added file 'cronscripts/publishing/distro-parts/production/ubuntu/publish-distro.d/20-remove-uncompressed-listings'
--- cronscripts/publishing/distro-parts/production/ubuntu/publish-distro.d/20-remove-uncompressed-listings	1970-01-01 00:00:00 +0000
+++ cronscripts/publishing/distro-parts/production/ubuntu/publish-distro.d/20-remove-uncompressed-listings	2011-03-28 15:47:59 +0000
@@ -0,0 +1,11 @@
+#!/bin/sh
+#
+# Remove uncompressed Packages and Sources files.
+#
+# The uncompressed versions of these files are very large and could cause
+# problems for our mirrors, so get rid of them.
+#
+# It's safe to do this since the uncompressed MD5 hashes have already been
+# computed for inclusion in the Release files.
+
+find "$DISTSROOT".new \( -name -o -name Sources \) -exec rm -f -- "{}" \;

=== added file 'cronscripts/publishing/distro-parts/production/ubuntu/publish-distro.d/30-copy-indices'
--- cronscripts/publishing/distro-parts/production/ubuntu/publish-distro.d/30-copy-indices	1970-01-01 00:00:00 +0000
+++ cronscripts/publishing/distro-parts/production/ubuntu/publish-distro.d/30-copy-indices	2011-03-28 15:47:59 +0000
@@ -0,0 +1,8 @@
+#!/bin/sh -e
+
+echo "$(date -R): Copying the indices into place."
+
+INDICES="$ARCHIVEROOT/indices"
+
+rm -f -- "$INDICES/override"
+cp -- "$OVERRIDEROOT"/override.* "$INDICES/"

=== added file 'cronscripts/publishing/distro-parts/production/ubuntu/publish-distro.d/README.txt'
--- cronscripts/publishing/distro-parts/production/ubuntu/publish-distro.d/README.txt	1970-01-01 00:00:00 +0000
+++ cronscripts/publishing/distro-parts/production/ubuntu/publish-distro.d/README.txt	2011-03-28 15:47:59 +0000
@@ -0,0 +1,19 @@
+Put scripts here that should be run after publish-ftpmaster executes
+publish-distro.  They will be executed through the run-parts command, in
+alphabetical order.
+
+The scripts' filenames must consist entirely of ASCII letters (both upper and
+lower case allowed), digits, underscores, and hyphens.  All other files,
+including this text file, are ignored.
+
+Scripts in this directory will be run separately for each distro archive,
+possibly twice because publication happens in two passes: an expedited one for
+just the security uploads and then a second, slower pass for all packages.
+
+Some variables will be set before each script is run:
+
+ARCHIVEROOT - the archive's root directory
+(e.g. /srv/launchpad.net/ubuntu-archive/ubuntu/ )
+
+DISTSROOT - the archive's dists root directory
+(e.g. /srv/launchpad.net/ubuntu-archive/ubuntu/dists )

=== modified file 'lib/lp/services/utils.py'
--- lib/lp/services/utils.py	2011-02-20 13:26:48 +0000
+++ lib/lp/services/utils.py	2011-03-28 15:47:59 +0000
@@ -15,6 +15,7 @@
     'compress_hash',
     'decorate_with',
     'docstring_dedent',
+    'file_exists',
     'iter_split',
     'run_capturing_output',
     'synchronize',
@@ -24,6 +25,7 @@
     ]
 
 from itertools import tee
+import os
 from StringIO import StringIO
 import string
 import sys
@@ -47,6 +49,7 @@
     """
 
     class AutoDecorateMetaClass(type):
+
         def __new__(cls, class_name, bases, class_dict):
             new_class_dict = {}
             for name, value in class_dict.items():
@@ -207,11 +210,15 @@
 
 def decorate_with(context_factory, *args, **kwargs):
     """Create a decorator that runs decorated functions with 'context'."""
+
     def decorator(function):
+
         def decorated(*a, **kw):
             with context_factory(*args, **kwargs):
                 return function(*a, **kw)
+
         return mergeFunctionMetadata(function, decorated)
+
     return decorator
 
 
@@ -226,6 +233,11 @@
     return (first + '\n' + dedent(rest)).strip()
 
 
+def file_exists(filename):
+    """Does `filename` exist?"""
+    return os.access(filename, os.F_OK)
+
+
 class CapturedOutput(Fixture):
     """A fixture that captures output to stdout and stderr."""
 

=== modified file 'lib/lp/soyuz/scripts/processaccepted.py'
--- lib/lp/soyuz/scripts/processaccepted.py	2010-09-23 02:12:27 +0000
+++ lib/lp/soyuz/scripts/processaccepted.py	2011-03-28 15:47:59 +0000
@@ -30,7 +30,10 @@
 from lp.bugs.interfaces.bugtask import BugTaskStatus
 from lp.registry.interfaces.distribution import IDistributionSet
 from lp.registry.interfaces.pocket import PackagePublishingPocket
-from lp.services.scripts.base import LaunchpadScript
+from lp.services.scripts.base import (
+    LaunchpadScript,
+    LaunchpadScriptFailure,
+    )
 from lp.soyuz.enums import (
     ArchivePurpose,
     PackageUploadStatus,
@@ -241,6 +244,9 @@
         try:
             self.logger.debug("Finding distribution %s." % distro_name)
             distribution = getUtility(IDistributionSet).getByName(distro_name)
+            if distribution is None:
+                raise LaunchpadScriptFailure(
+                    "Distribution '%s' not found." % distro_name)
 
             # target_archives is a tuple of (archive, description).
             if self.options.ppa:

=== added file 'lib/lp/soyuz/scripts/publish_ftpmaster.py'
--- lib/lp/soyuz/scripts/publish_ftpmaster.py	1970-01-01 00:00:00 +0000
+++ lib/lp/soyuz/scripts/publish_ftpmaster.py	2011-03-28 15:47:59 +0000
@@ -0,0 +1,374 @@
+# Copyright 2011 Canonical Ltd.  This software is licensed under the
+# GNU Affero General Public License version 3 (see the file LICENSE).
+
+"""Master distro publishing script."""
+
+__metaclass__ = type
+__all__ = [
+    'PublishFTPMaster',
+    ]
+
+from optparse import OptionParser
+import os
+import subprocess
+from zope.component import getUtility
+
+from canonical.config import config
+from lp.archivepublisher.config import getPubConfig
+from lp.registry.interfaces.distribution import IDistributionSet
+from lp.services.scripts.base import (
+    LaunchpadCronScript,
+    LaunchpadScriptFailure,
+    )
+from lp.services.utils import file_exists
+from lp.soyuz.enums import ArchivePurpose
+from lp.soyuz.scripts import publishdistro
+from lp.soyuz.scripts.ftpmaster import LpQueryDistro
+from lp.soyuz.scripts.processaccepted import ProcessAccepted
+
+
+ARCHIVES_TO_PUBLISH = [
+    ArchivePurpose.PRIMARY,
+    ArchivePurpose.PARTNER,
+    ]
+
+
+ARCHIVE_SUFFIXES = {
+    ArchivePurpose.PRIMARY: "",
+    ArchivePurpose.PARTNER: "-partner",
+}
+
+
+def run_command(args):
+    """Run command line (passed as a list).
+
+    :return: A tuple of process return value; stdout; and stderr.
+    """
+    child = subprocess.Popen(
+        args, stdin=subprocess.PIPE, stdout=subprocess.PIPE,
+        stderr=subprocess.PIPE)
+    stdout, stderr = child.communicate()
+    result = child.wait()
+    return (result, stdout, stderr)
+
+
+def get_distscopyroot(archive_config):
+    """Return the distscropy root directory for `archive_config`."""
+    return archive_config.archiveroot + "-distscopy"
+
+
+class StoreArgument:
+    """Helper class: receive argument and store it."""
+
+    def __call__(self, argument):
+        self.argument = argument
+
+
+class PublishFTPMaster(LaunchpadCronScript):
+    """Publish a distro (update)."""
+
+    done_pub = False
+
+    def add_my_options(self):
+        self.parser.add_option(
+            '-d', '--distribution', dest='distribution', default=None,
+            help="Distribution to publish.")
+        self.parser.add_option(
+            '-s', '--security-only', dest='security_only',
+            action='store_true', default=False, help="Security upload only.")
+
+    def getArchives(self):
+        """Find archives for `self.distribution` that should be published."""
+        return [
+            archive
+            for archive in self.distribution.all_distro_archives
+                if archive.purpose in ARCHIVES_TO_PUBLISH]
+
+    def makeConfigs(self):
+        """Set up configuration objects for archives to be published.
+
+        The configs dict maps the archive purposes that are relevant for
+        publishing to the respective archives' configurations.
+        """
+        return dict(
+            (archive.purpose, getPubConfig(archive))
+            for archive in self.archives)
+
+    def cleanUp(self):
+        """Post-publishing cleanup."""
+        for purpose, archive_config in self.configs.iteritems():
+            self.logger.debug(
+                "Moving %s dists backup to safe keeping for next time.",
+                purpose.title)
+            distscopyroot = archive_config.archiveroot + '-distscopy'
+            dists = os.path.join(distscopyroot, "dists")
+            if self.done_pub:
+                replacement_dists = archive_config.distsroot + ".old"
+            else:
+                replacement_dists = archive_config.distsroot + ".new"
+            if file_exists(replacement_dists):
+                os.rename(replacement_dists, dists)
+
+    def processAccepted(self):
+        self.logger.debug(
+            "Processing the accepted queue into the publishing records...")
+        script = ProcessAccepted(test_args=[self.distribution.name])
+        script.txn = self.txn
+        script.logger = self.logger
+        script.main()
+
+    def getDirtySuites(self):
+        """Return list of suites that have packages pending publication."""
+        self.logger.debug("Querying which suites are pending publication...")
+        query_distro = LpQueryDistro(
+            test_args=['-d', self.distribution.name, "pending_suites"])
+        receiver = StoreArgument()
+        query_distro.runAction(presenter=receiver)
+        return receiver.argument.split()
+
+    def gatherSecuritySuites(self):
+        """List security suites."""
+        suites = self.getDirtySuites()
+        return [suite for suite in suites if suite.endswith('-security')]
+
+    def rsyncNewDists(self, archive_purpose):
+        """Populate dists.new with a copy of distsroot.
+
+        Uses "rsync -aH --delete" so that any obsolete files that may
+        still be in dists.new are cleaned up (bug 58835).
+
+        :param archive_purpose: The (purpose of the) archive to copy.
+        """
+        archive_config = self.configs[archive_purpose]
+        retval, stdout, stderr = run_command([
+            "rsync",
+            "-aH",
+            "--delete",
+            archive_config.distsroot + "/",
+            os.path.join(archive_config.archiveroot, "dists.new"),
+            ])
+
+        if retval != 0:
+            self.logger.warn(stdout)
+            self.logger.error(stderr)
+            raise LaunchpadScriptFailure(
+                "Failed to rsync dists.new for %s." % archive_purpose.title)
+
+    def setUpDirs(self):
+        """Copy the dists tree ready for publishing into.
+
+        We do this so that we don't get an inconsistent dists tree at
+        any point during the publishing cycle (which would cause buildds
+        to explode).
+
+        This is now done through maintaining a persistent backup copy of
+        the dists directory, which we move into place and bring up to
+        date with rsync.  Should achieve the same effect as copying, but
+        faster.
+        """
+        for archive_config in self.configs.itervalues():
+            archiveroot = archive_config.archiveroot
+            if not file_exists(archiveroot):
+                self.logger.debug("Creating archive root %s.", archiveroot)
+                os.makedirs(archiveroot)
+            distsroot = archive_config.distsroot
+            if not file_exists(distsroot):
+                self.logger.debug("Creating dists root %s.", distsroot)
+                os.makedirs(distsroot)
+
+        for purpose, archive_config in self.configs.iteritems():
+            dists = os.path.join(get_distscopyroot(archive_config), "dists")
+            dists_new = os.path.join(archive_config.archiveroot, "dists.new")
+            if not file_exists(dists):
+                os.makedirs(dists)
+            os.rename(dists, dists_new)
+            self.rsyncNewDists(purpose)
+
+    def publishDistroArchive(self, archive, security_suites=None):
+        """Publish the results for an archive.
+
+        :param archive: Archive to publish.
+        :param security_suites: An optional list of suites to restrict
+            the publishing to.
+        """
+        purpose = archive.purpose
+        self.logger.debug(
+            "Publishing the %s %s...", self.distribution.name, purpose.title)
+        archive_config = self.configs[purpose]
+        arguments = [
+            '-v', '-v',
+            '-d', self.distribution.name,
+            '-R', archive_config.distsroot + '.new',
+            ]
+
+        if archive.purpose == ArchivePurpose.PARTNER:
+            arguments.append('--partner')
+
+        if security_suites is not None:
+            arguments += sum([['-s', suite] for suite in security_suites], [])
+
+        parser = OptionParser()
+        publishdistro.add_options(parser)
+        options, args = parser.parse_args(arguments)
+        publishdistro.run_publisher(options, txn=self.txn, log=self.logger)
+
+        self.runPublishDistroParts(archive)
+
+    def runPublishDistroParts(self, archive):
+        """Execute the publish-distro hooks."""
+        archive_config = self.configs[archive.purpose]
+        env = {
+            'DISTSROOT': archive_config.distsroot,
+            'ARCHIVEROOT': archive_config.archiveroot,
+            }
+        self.runParts('publish-distro.d', env)
+
+    def installDists(self):
+        """Put the new dists into place, as near-atomically as possible."""
+        self.logger.debug("Placing the new dists into place...")
+
+        for archive_config in self.configs.itervalues():
+            distsroot = archive_config.distsroot
+            os.rename(distsroot, distsroot + ".old")
+            os.rename(distsroot + ".new", distsroot)
+
+        self.done_pub = True
+
+        for archive_config in self.configs.itervalues():
+            dists = os.path.join(get_distscopyroot(archive_config), "dists")
+            os.rename(archive_config.distsroot + ".old", dists)
+
+    def runCommercialCompat(self):
+        """Generate the -commercial pocket.
+
+        This is done for backwards compatibility with dapper, edgy, and
+        feisty releases.  Failure here is not fatal.
+        """
+        # XXX JeroenVermeulen 2011-03-24 bug=741683: Retire
+        # commercial-compat.sh (and this method) as soon as Dapper
+        # support ends.
+        if self.distribution.name != 'ubuntu':
+            return
+        if config.instance_name != 'production':
+            return
+
+        try:
+            os.system("""
+                env PATH="$PATH:%s/cronscripts/publishing" \
+                    LPCONFIG="%s" \
+                    commercial-compat.sh
+                """ % (config.root, config.instance_name))
+        except Exception:
+            pass
+
+    def generateListings(self):
+        """Create ls-lR.gz listings."""
+        self.logger.debug("Creating ls-lR.gz...")
+        lslr = "ls-lR.gz"
+        lslr_new = "." + lslr + ".new"
+        for purpose, archive_config in self.configs.iteritems():
+            lslr_file = os.path.join(archive_config.archiveroot, lslr)
+            new_lslr_file = os.path.join(archive_config.archiveroot, lslr_new)
+            if file_exists(new_lslr_file):
+                os.remove(new_lslr_file)
+            retval = os.system(
+                "cd -- '%s' ; TZ=UTC ls -lR | gzip -9n >'%s'"
+                % (archive_config.archiveroot, lslr_new))
+            if retval != 0:
+                raise LaunchpadScriptFailure(
+                    "Failed to create %s for %s." % (lslr, purpose.title))
+            os.rename(new_lslr_file, lslr_file)
+
+    def clearEmptyDirs(self):
+        """Clear out any redundant empty directories."""
+        for archive_config in self.configs.itervalues():
+            os.system(
+                "find '%s' -type d -empty | xargs -r rmdir"
+                % archive_config.archiveroot)
+
+    def processOptions(self):
+        """Handle command-line options.
+
+        Sets `self.distribution` to the `Distribution` to publish.
+        """
+        if self.options.distribution is None:
+            raise LaunchpadScriptFailure("Specify a distribution.")
+
+        self.distribution = getUtility(IDistributionSet).getByName(
+            self.options.distribution)
+        if self.distribution is None:
+            raise LaunchpadScriptFailure(
+                "Distribution %s not found." % self.options.distribution)
+
+    def runParts(self, parts, env):
+        """Execute run-parts.
+
+        :param parts: The run-parts directory to execute:
+            "publish-distro.d" or "finalize.d".
+        :param env: A dict of environment variables to pass to the
+            scripts in the run-parts directory.
+        """
+        parts_dir = os.path.join(
+            config.root, 'cronscripts', 'publishing', 'distro-parts',
+            config.instance_name, self.distribution.name, parts)
+        if not file_exists(parts_dir):
+            return
+        env_string = ' '.join(['='.join(pair for pair in env.iteritems())])
+        retval = os.system("%s run-parts -- '%s'" % (env_string, parts_dir))
+        if retval != 0:
+            raise LaunchpadScriptFailure(
+                "Failure while executing run-parts %s." % parts_dir)
+
+    def runFinalizeParts(self, security_only=False):
+        """Run the finalize.d parts to finalize publication."""
+        boolean_text = {
+            True: "yes",
+            False: "no",
+        }
+        env = {'SECURITY_UPLOAD_ONLY': boolean_text[security_only]}
+        self.runParts('finalize.d', env)
+
+    def publishSecurityUploads(self):
+        security_suites = self.gatherSecuritySuites()
+        if len(security_suites) == 0:
+            self.logger.info("Nothing to do for security publisher.")
+            return
+        partner_archive = self.distribution.getArchive("partner")
+        if partner_archive is not None:
+            self.publishDistroArchive(partner_archive)
+        self.publishDistroArchive(
+            self.distribution.main_archive, security_suites=security_suites)
+        self.installDists()
+        self.runCommercialCompat()
+        self.runFinalizeParts(security_only=True)
+
+    def publishAllUploads(self):
+        for archive in self.archives:
+            # This, for the main archive, is where the script spends
+            # most of its time.
+            self.publishDistroArchive(archive)
+
+        self.installDists()
+        self.runCommercialCompat()
+        self.generateListings()
+        self.clearEmptyDirs()
+        self.runFinalizeParts()
+
+    def setUp(self):
+        """Process options, and set up internal state."""
+        self.processOptions()
+        self.archives = self.getArchives()
+        self.configs = self.makeConfigs()
+
+    def main(self):
+        """See `LaunchpadScript`."""
+        self.setUp()
+        try:
+            self.processAccepted()
+            # XXX: Repeat setUpDirs for security/full upload?
+            self.setUpDirs()
+            self.publishSecurityUploads()
+            if not self.options.security_only:
+                self.publishAllUploads()
+        finally:
+            self.cleanUp()

=== added file 'lib/lp/soyuz/scripts/tests/test_publish_ftpmaster.py'
--- lib/lp/soyuz/scripts/tests/test_publish_ftpmaster.py	1970-01-01 00:00:00 +0000
+++ lib/lp/soyuz/scripts/tests/test_publish_ftpmaster.py	2011-03-28 15:47:59 +0000
@@ -0,0 +1,419 @@
+# Copyright 2011 Canonical Ltd.  This software is licensed under the
+# GNU Affero General Public License version 3 (see the file LICENSE).
+
+"""Test publish-ftpmaster cron script."""
+
+__metaclass__ = type
+
+import os
+import transaction
+from zope.component import getUtility
+
+from canonical.launchpad.interfaces.launchpad import ILaunchpadCelebrities
+from canonical.testing.layers import LaunchpadZopelessLayer
+from lp.archivepublisher.interfaces.publisherconfig import IPublisherConfigSet
+from lp.registry.interfaces.pocket import (
+    PackagePublishingPocket,
+    pocketsuffix,
+    )
+from lp.services.log.logger import DevNullLogger
+from lp.services.utils import file_exists
+from lp.soyuz.enums import (
+    ArchivePurpose,
+    PackagePublishingStatus,
+    )
+from lp.soyuz.scripts.publish_ftpmaster import PublishFTPMaster
+from lp.soyuz.tests.test_publishing import SoyuzTestPublisher
+from lp.testing import (
+    run_script,
+    TestCaseWithFactory,
+    )
+from lp.testing.fakemethod import FakeMethod
+
+
+def name_spph_suite(spph):
+    """Return name of `spph`'s suite."""
+    return spph.distroseries.name + pocketsuffix[spph.pocket]
+
+
+class TestPublishFTPMaster(TestCaseWithFactory):
+    layer = LaunchpadZopelessLayer
+
+    # Location of shell script.
+    SCRIPT_PATH = "cronscripts/publish-ftpmaster.py"
+
+    def setUpForScriptRun(self, distro):
+        """Prepare for a run of `PublishFTPMaster` for the named distro."""
+        config = getUtility(IPublisherConfigSet).getByDistribution(distro)
+        config.root_dir = unicode(self.makeTemporaryDirectory())
+        return config.root_dir
+
+    def makeScript(self, distro):
+        """Produce instance of the `PublishFTPMaster` script."""
+        script = PublishFTPMaster(test_args=["-d", distro.name])
+        script.txn = transaction
+        script.logger = DevNullLogger()
+        return script
+
+    def readReleaseFile(self, filename):
+        """Read a Release file, return as a keyword/value dict."""
+        lines = []
+        for line in file(filename):
+            if line.startswith(' '):
+                lines[-1] += line
+            else:
+                lines.append(line)
+        return dict(
+            (key, value.strip())
+            for key, value in [line.split(':', 1) for line in lines])
+
+    def writeMarkerFile(self, path, contents):
+        """Write a marker file for checking direction movements.
+
+        :param path: A list of path components.
+        :param contents: Text to write into the file.
+        """
+        marker = file(os.path.join(*path), "w")
+        marker.write(contents)
+        marker.flush()
+        marker.close()
+
+    def readMarkerFile(self, path):
+        """Read the contents of a marker file.
+
+        :param return: Contents of the marker file.
+        """
+        return file(os.path.join(*path)).read()
+
+    def test_script_runs_successfully(self):
+        ubuntu = getUtility(ILaunchpadCelebrities).ubuntu
+        self.setUpForScriptRun(ubuntu)
+        transaction.commit()
+        stdout, stderr, retval = run_script(
+            self.SCRIPT_PATH + " -d ubuntu")
+        self.assertEqual(0, retval, "Script failure:\n" + stderr)
+
+    def test_script_is_happy_with_no_publications(self):
+        distro = self.factory.makeDistribution()
+        self.setUpForScriptRun(distro)
+        self.makeScript(distro).main()
+
+    def test_produces_listings(self):
+        ubuntu = getUtility(ILaunchpadCelebrities).ubuntu
+        rootdir = self.setUpForScriptRun(ubuntu)
+        self.makeScript(ubuntu).main()
+
+        listing = os.path.join(rootdir, 'ubuntu', 'ls-lR.gz')
+        self.assertTrue(file_exists(listing))
+
+    def test_publishes_package(self):
+        test_publisher = SoyuzTestPublisher()
+        distroseries = test_publisher.setUpDefaultDistroSeries()
+        distro = distroseries.distribution
+        self.factory.makeComponentSelection(
+            distroseries=distroseries, component="main")
+        self.factory.makeArchive(
+            distribution=distro, purpose=ArchivePurpose.PARTNER)
+        test_publisher.getPubSource()
+
+        rootdir = self.setUpForScriptRun(distro)
+        self.makeScript(distro).main()
+
+        dsc = os.path.join(
+            rootdir, distro.name, 'pool', 'main', 'f', 'foo', 'foo_666.dsc')
+        self.assertEqual("I do not care about sources.", file(dsc).read())
+        overrides = os.path.join(
+            rootdir, distro.name + '-overrides',
+            distroseries.name + '_main_source')
+        self.assertEqual(dsc, file(overrides).read().rstrip())
+        sources = os.path.join(
+            rootdir, distro.name, 'dists', distroseries.name, 'main',
+            'source', 'Sources.gz')
+        self.assertTrue(file_exists(sources))
+        sources = os.path.join(
+            rootdir, distro.name, 'dists', distroseries.name, 'main',
+            'source', 'Sources.bz2')
+        self.assertTrue(file_exists(sources))
+
+        distcopyseries = os.path.join(
+            rootdir, distro.name, 'dists', distroseries.name)
+        release = self.readReleaseFile(
+            os.path.join(distcopyseries, "Release"))
+        self.assertEqual(distro.displayname, release['Origin'])
+        self.assertEqual(distro.displayname, release['Label'])
+        self.assertEqual(distroseries.name, release['Suite'])
+        self.assertEqual(distroseries.name, release['Codename'])
+        self.assertEqual("main", release['Components'])
+        self.assertEqual("", release["Architectures"])
+        self.assertIn("Date", release)
+        self.assertIn("Description", release)
+        self.assertNotEqual("", release["MD5Sum"])
+        self.assertNotEqual("", release["SHA1"])
+        self.assertNotEqual("", release["SHA256"])
+
+        main_release = self.readReleaseFile(
+            os.path.join(distcopyseries, 'main', 'source', "Release"))
+        self.assertEqual(distroseries.name, main_release["Archive"])
+        self.assertEqual("main", main_release["Component"])
+        self.assertEqual(distro.displayname, main_release["Origin"])
+        self.assertEqual(distro.displayname, main_release["Label"])
+        self.assertEqual("source", main_release["Architecture"])
+
+    def test_cleanup_moves_dists_to_new_if_not_published(self):
+        distro = self.factory.makeDistribution()
+        root_dir = self.setUpForScriptRun(distro)
+
+        archive_root = os.path.join(root_dir, distro.name)
+        new_distsroot = os.path.join(archive_root, "dists.new")
+        os.makedirs(new_distsroot)
+        self.writeMarkerFile([new_distsroot, "marker"], "dists.new")
+        distscopyroot = archive_root + "-distscopy"
+        os.makedirs(distscopyroot)
+
+        script = self.makeScript(distro)
+        script.setUp()
+        script.cleanUp()
+        self.assertEqual(
+            "dists.new",
+            self.readMarkerFile([distscopyroot, "dists", "marker"]))
+
+    def test_cleanup_moves_dists_to_old_if_published(self):
+        distro = self.factory.makeDistribution()
+        root_dir = self.setUpForScriptRun(distro)
+        archive_root = os.path.join(root_dir, distro.name)
+        old_distsroot = os.path.join(archive_root, "dists.old")
+        os.makedirs(old_distsroot)
+        self.writeMarkerFile([old_distsroot, "marker"], "dists.old")
+        distscopyroot = archive_root + "-distscopy"
+        os.makedirs(distscopyroot)
+
+        script = self.makeScript(distro)
+        script.setUp()
+        script.done_pub = True
+        script.cleanUp()
+        self.assertEqual(
+            "dists.old",
+            self.readMarkerFile([distscopyroot, "dists", "marker"]))
+
+    def test_getDirtySuites_returns_suite_with_pending_publication(self):
+        spph = self.factory.makeSourcePackagePublishingHistory()
+        script = self.makeScript(spph.distroseries.distribution)
+        script.setUp()
+        self.assertEqual([name_spph_suite(spph)], script.getDirtySuites())
+
+    def test_getDirtySuites_returns_suites_with_pending_publications(self):
+        distro = self.factory.makeDistribution()
+        spphs = [
+            self.factory.makeSourcePackagePublishingHistory(
+                distroseries=self.factory.makeDistroSeries(
+                    distribution=distro))
+            for counter in xrange(2)]
+
+        script = self.makeScript(distro)
+        script.setUp()
+        self.assertContentEqual(
+            [name_spph_suite(spph) for spph in spphs],
+            script.getDirtySuites())
+
+    def test_getDirtySuites_ignores_suites_without_pending_publications(self):
+        spph = self.factory.makeSourcePackagePublishingHistory(
+            status=PackagePublishingStatus.PUBLISHED)
+        script = self.makeScript(spph.distroseries.distribution)
+        script.setUp()
+        self.assertEqual([], script.getDirtySuites())
+
+    def test_gatherSecuritySuites_returns_security_suites(self):
+        distro = self.factory.makeDistribution()
+        spphs = [
+            self.factory.makeSourcePackagePublishingHistory(
+                distroseries=self.factory.makeDistroSeries(
+                    distribution=distro),
+                pocket=PackagePublishingPocket.SECURITY)
+            for counter in xrange(2)]
+
+        script = self.makeScript(distro)
+        script.setUp()
+        self.assertContentEqual(
+            [name_spph_suite(spph) for spph in spphs],
+            script.gatherSecuritySuites())
+
+    def test_gatherSecuritySuites_ignores_non_security_suites(self):
+        distroseries = self.factory.makeDistroSeries()
+        spphs = [
+            self.factory.makeSourcePackagePublishingHistory(
+                distroseries=distroseries, pocket=pocket)
+            for pocket in [
+                PackagePublishingPocket.RELEASE,
+                PackagePublishingPocket.UPDATES,
+                PackagePublishingPocket.PROPOSED,
+                PackagePublishingPocket.BACKPORTS,
+                ]]
+        script = self.makeScript(distroseries.distribution)
+        script.setUp()
+        self.assertEqual([], script.gatherSecuritySuites())
+
+    def test_rsync_copies_files(self):
+        distro = self.factory.makeDistribution()
+        root_dir = self.setUpForScriptRun(distro)
+        script = self.makeScript(distro)
+        script.setUp()
+        dists_root = os.path.join(root_dir, distro.name, "dists")
+        os.makedirs(dists_root)
+        os.makedirs(dists_root + ".new")
+        self.writeMarkerFile([dists_root, "new-file"], "New file")
+        script.rsyncNewDists(ArchivePurpose.PRIMARY)
+        self.assertEqual(
+            "New file",
+            self.readMarkerFile([dists_root + ".new", "new-file"]))
+
+    def test_rsync_cleans_up_obsolete_files(self):
+        distro = self.factory.makeDistribution()
+        root_dir = self.setUpForScriptRun(distro)
+        script = self.makeScript(distro)
+        script.setUp()
+        dists_root = os.path.join(root_dir, distro.name, "dists")
+        os.makedirs(dists_root)
+        os.makedirs(dists_root + ".new")
+        old_file = os.path.join(dists_root + ".new", "old-file")
+        self.writeMarkerFile([old_file], "old-file")
+        script.rsyncNewDists(ArchivePurpose.PRIMARY)
+        self.assertFalse(file_exists(old_file))
+
+    def test_setUpDirs_creates_directory_structure(self):
+        distro = self.factory.makeDistribution()
+        root_dir = self.setUpForScriptRun(distro)
+        archive_root = os.path.join(root_dir, distro.name)
+        script = self.makeScript(distro)
+        script.setUp()
+
+        self.assertFalse(file_exists(archive_root))
+
+        script.setUpDirs()
+
+        self.assertTrue(file_exists(archive_root))
+        self.assertTrue(file_exists(os.path.join(archive_root, "dists")))
+        self.assertTrue(file_exists(os.path.join(archive_root, "dists.new")))
+
+    def test_setUpDirs_does_not_mind_if_directories_already_exist(self):
+        distro = self.factory.makeDistribution()
+        root_dir = self.setUpForScriptRun(distro)
+        archive_root = os.path.join(root_dir, distro.name)
+        script = self.makeScript(distro)
+        script.setUp()
+        script.setUpDirs()
+        script.setUpDirs()
+        self.assertTrue(file_exists(archive_root))
+
+    def test_setUpDirs_moves_dists_to_dists_new(self):
+        distro = self.factory.makeDistribution()
+        root_dir = self.setUpForScriptRun(distro)
+        archive_root = os.path.join(root_dir, distro.name)
+        script = self.makeScript(distro)
+        script.setUp()
+        script.setUpDirs()
+        self.writeMarkerFile([archive_root, "dists", "marker"], "X")
+        script.setUpDirs()
+        self.assertEqual(
+            "X", self.readMarkerFile([archive_root, "dists.new", "marker"]))
+
+    def test_publishDistroArchive_runs_parts(self):
+        distro = self.factory.makeDistribution()
+        script = self.makeScript(distro)
+        script.setUp()
+        script.setUpDirs()
+        script.runParts = FakeMethod()
+        script.publishDistroArchive(distro.main_archive)
+        self.assertEqual(1, script.runParts.call_count)
+        args, kwargs = script.runParts.calls[0]
+        parts_dir, env = args
+        self.assertEqual("publish-distro.d", parts_dir)
+
+    def test_runPublishDistroParts_passes_parameters(self):
+        distro = self.factory.makeDistribution()
+        script = self.makeScript(distro)
+        script.setUp()
+        script.setUpDirs()
+        script.runParts = FakeMethod()
+        script.runPublishDistroParts(distro.main_archive)
+        args, kwargs = script.runParts.calls[0]
+        parts_dir, env = args
+        required_parameters = set(["DISTSROOT", "ARCHIVEROOT"])
+        missing_parameters = set(env.keys()).difference(required_parameters)
+        self.assertEqual(set(), missing_parameters)
+
+    def test_installDists_sets_done_pub(self):
+        distro = self.factory.makeDistribution()
+        script = self.makeScript(distro)
+        script.setUp()
+        script.setUpDirs()
+        self.assertFalse(script.done_pub)
+        script.installDists()
+        self.assertTrue(script.done_pub)
+
+    def test_installDists_replaces_distsroot(self):
+        distro = self.factory.makeDistribution()
+        root_dir = self.setUpForScriptRun(distro)
+        script = self.makeScript(distro)
+        script.setUp()
+        script.setUpDirs()
+        archive_root = os.path.join(root_dir, distro.name)
+        distsroot = os.path.join(archive_root, "dists")
+
+        self.writeMarkerFile([distsroot, "marker"], "old")
+        self.writeMarkerFile([distsroot + ".new", "marker"], "new")
+
+        script.installDists()
+
+        self.assertEqual("new", self.readMarkerFile([distsroot, "marker"]))
+        self.assertEqual( "old", self.readMarkerFile(
+            [archive_root + "-distscopy", "dists", "marker"]))
+
+    def test_installDists_replaces_distscopyroot(self):
+        pass
+
+    def test_runCommercialCompat_runs_commercial_compat_script(self):
+        pass
+
+    def test_runCommercialCompat_runs_only_for_ubuntu(self):
+        pass
+
+    def test_runCommercialCompat_runs_only_on_production_config(self):
+        pass
+
+    def test_generateListings_writes_ls_lR_gz(self):
+        pass
+
+    def test_clearEmptyDirs_cleans_up_empty_directories(self):
+        pass
+
+    def test_clearEmptyDirs_does_not_clean_up_nonempty_directories(self):
+        pass
+
+    def test_processOptions_finds_distribution(self):
+        pass
+
+    def test_processOptions_complains_about_unknown_distribution(self):
+        pass
+
+    def test_runParts_runs_parts(self):
+        pass
+
+    def test_runFinalizeParts_passes_parameters(self):
+        pass
+
+    def test_publishSecurityUploads_XXX(self):
+        pass
+    def test_publishSecurityUploads_XXX(self):
+        pass
+    def test_publishSecurityUploads_XXX(self):
+        pass
+
+    def test_publishAllUploads_publishes_all_distro_archives(self):
+        pass
+
+    def test_publishAllUploads_XXX(self):
+        pass
+    def test_publishAllUploads_XXX(self):
+        pass
+    def test_publishAllUploads_XXX(self):
+        pass

=== modified file 'lib/lp/testing/__init__.py'
--- lib/lp/testing/__init__.py	2011-03-24 22:25:41 +0000
+++ lib/lp/testing/__init__.py	2011-03-28 15:47:59 +0000
@@ -1061,17 +1061,19 @@
         now += delta
 
 
-def run_script(cmd_line):
+def run_script(cmd_line, env=None):
     """Run the given command line as a subprocess.
 
-    Return a 3-tuple containing stdout, stderr and the process' return code.
-
-    The environment given to the subprocess is the same as the one in the
-    parent process except for the PYTHONPATH, which is removed so that the
-    script, passed as the `cmd_line` parameter, will fail if it doesn't set it
-    up properly.
+    :param cmd_line: A command line suitable for passing to
+        `subprocess.Popen`.
+    :param env: An optional environment dict.  If none is given, the
+        script will get a copy of your present environment.  Either way,
+        PYTHONPATH will be removed from it because it will break the
+        script.
+    :return: A 3-tuple of stdout, stderr, and the process' return code.
     """
-    env = os.environ.copy()
+    if env is None:
+        env = os.environ.copy()
     env.pop('PYTHONPATH', None)
     process = subprocess.Popen(
         cmd_line, shell=True, stdin=subprocess.PIPE, stdout=subprocess.PIPE,

=== modified file 'lib/lp/testing/factory.py'
--- lib/lp/testing/factory.py	2011-03-25 03:26:29 +0000
+++ lib/lp/testing/factory.py	2011-03-28 15:47:59 +0000
@@ -253,11 +253,15 @@
     )
 from lp.soyuz.interfaces.binarypackagebuild import IBinaryPackageBuildSet
 from lp.soyuz.interfaces.binarypackagename import IBinaryPackageNameSet
-from lp.soyuz.interfaces.component import IComponentSet
+from lp.soyuz.interfaces.component import (
+    IComponent,
+    IComponentSet,
+    )
 from lp.soyuz.interfaces.packageset import IPackagesetSet
 from lp.soyuz.interfaces.processor import IProcessorFamilySet
 from lp.soyuz.interfaces.publishing import IPublishingSet
 from lp.soyuz.interfaces.section import ISectionSet
+from lp.soyuz.model.component import ComponentSelection
 from lp.soyuz.model.files import (
     BinaryPackageFile,
     SourcePackageReleaseFile,
@@ -2398,6 +2402,23 @@
             name = self.getUniqueString()
         return getUtility(IComponentSet).ensure(name)
 
+    def makeComponentSelection(self, distroseries=None, component=None):
+        """Make a new `ComponentSelection`.
+
+        :param distroseries: Optional `DistroSeries`.  If none is given,
+            one will be created.
+        :param component: Optional `Component` or a component name.  If
+            none is given, one will be created.
+        """
+        if distroseries is None:
+            distroseries = self.makeDistroSeries()
+
+        if not IComponent.providedBy(component):
+            component = self.makeComponent(component)
+
+        return ComponentSelection(
+            distroseries=distroseries, component=component)
+
     def makeArchive(self, distribution=None, owner=None, name=None,
                     purpose=None, enabled=True, private=False,
                     virtualized=True, description=None, displayname=None):

=== modified file 'lib/lp/testing/tests/test_factory.py'
--- lib/lp/testing/tests/test_factory.py	2010-12-02 16:13:51 +0000
+++ lib/lp/testing/tests/test_factory.py	2011-03-28 15:47:59 +0000
@@ -52,6 +52,7 @@
     )
 from lp.soyuz.interfaces.queue import IPackageUpload
 from lp.soyuz.interfaces.sourcepackagerelease import ISourcePackageRelease
+from lp.soyuz.model.component import ComponentSelection
 from lp.testing import TestCaseWithFactory
 from lp.testing.factory import is_security_proxied_or_harmless
 from lp.testing.matchers import (
@@ -399,6 +400,28 @@
         distroseries = self.factory.makeDistroSeries()
         self.assertThat(distroseries.displayname, StartsWith("Distroseries"))
 
+    # makeComponentSelection
+    def test_makeComponentSelection_makes_ComponentSelection(self):
+        selection = self.factory.makeComponentSelection()
+        self.assertIsInstance(selection, ComponentSelection)
+
+    def test_makeComponentSelection_uses_distroseries(self):
+        distroseries = self.factory.makeDistroSeries()
+        selection = self.factory.makeComponentSelection(
+            distroseries=distroseries)
+        self.assertEqual(distroseries, selection.distroseries)
+
+    def test_makeComponentSelection_uses_component(self):
+        component = self.factory.makeComponent()
+        selection = self.factory.makeComponentSelection(component=component)
+        self.assertEqual(component, selection.component)
+
+    def test_makeComponentSelection_finds_component(self):
+        component = self.factory.makeComponent()
+        selection = self.factory.makeComponentSelection(
+            component=component.name)
+        self.assertEqual(component, selection.component)
+
     # makeLanguage
     def test_makeLanguage(self):
         # Without parameters, makeLanguage creates a language with code


Follow ups