launchpad-reviewers team mailing list archive
-
launchpad-reviewers team
-
Mailing list archive
-
Message #03103
[Merge] lp:~jtv/launchpad/db-bug-55798 into lp:launchpad/db-devel
Jeroen T. Vermeulen has proposed merging lp:~jtv/launchpad/db-bug-55798 into lp:launchpad/db-devel.
Requested reviews:
Launchpad code reviewers (launchpad-reviewers): code
Related bugs:
Bug #55798 in Launchpad itself: "cron.publish-ftpmaster untested"
https://bugs.launchpad.net/launchpad/+bug/55798
For more details, see:
https://code.launchpad.net/~jtv/launchpad/db-bug-55798/+merge/55174
Replaces the shell script cron.publish-ftpmaster with a Python script, and generalizes away the distro so it's no longer Ubuntu-specific.
Ubuntu-specific or config-specific scripts are now run through the standard run-parts mechanism.
Since Julian knows all about this work and has agreed to review it, I can get away with not writing a long cover letter this time.
--
https://code.launchpad.net/~jtv/launchpad/db-bug-55798/+merge/55174
Your team Launchpad code reviewers is requested to review the proposed merge of lp:~jtv/launchpad/db-bug-55798 into lp:launchpad/db-devel.
=== modified file 'configs/development/launchpad-lazr.conf'
--- configs/development/launchpad-lazr.conf 2011-03-23 09:22:20 +0000
+++ configs/development/launchpad-lazr.conf 2011-03-29 13:43:18 +0000
@@ -5,6 +5,13 @@
[meta]
extends: ../../lib/canonical/config/schema-lazr.conf
+[archivepublisher]
+run_parts_location: none
+
+# XXX JeroenVermeulen 2011-03-29 bug=741683: Retire this item when
+# Dapper supports ends.
+run_commercial_compat: false
+
[branchscanner]
oops_prefix: BS
error_dir: /var/tmp/codehosting.test
=== added file 'cronscripts/publish-ftpmaster.py'
--- cronscripts/publish-ftpmaster.py 1970-01-01 00:00:00 +0000
+++ cronscripts/publish-ftpmaster.py 2011-03-29 13:43:18 +0000
@@ -0,0 +1,17 @@
+#!/usr/bin/python -S
+#
+# Copyright 2011 Canonical Ltd. This software is licensed under the
+# GNU Affero General Public License version 3 (see the file LICENSE).
+
+"""Master FTP distro publishing script."""
+
+import _pythonpath
+
+from canonical.config import config
+from lp.soyuz.scripts.publish_ftpmaster import PublishFTPMaster
+
+
+if __name__ == '__main__':
+ script = PublishFTPMaster(
+ "publish-ftpmaster", dbuser=config.archivepublisher.dbuser)
+ script.lock_and_run()
=== modified file 'cronscripts/publishing/cron.publish-ftpmaster'
--- cronscripts/publishing/cron.publish-ftpmaster 2010-06-25 14:36:11 +0000
+++ cronscripts/publishing/cron.publish-ftpmaster 2011-03-29 13:43:18 +0000
@@ -9,6 +9,15 @@
echo LPCONFIG must be set to run this script.
exit 1
fi
+
+# Injection points for testability.
+DISTRONAME="${DISTRONAME:-ubuntu}"
+ARCHIVEPARENT="${ARCHIVEPARENT:-/srv/launchpad.net/$DISTRONAME-archive}"
+LOCKFILE_CMD="${LOCKFILE_CMD:-lockfile}"
+DSYNC_FLIST_CMD="${DSYNC_FLIST_CMD:-dsync-flist}"
+COMMERCIAL_COMPAT_CMD="${COMMERCIAL_COMPAT_CMD:-commercial-compat.sh}"
+INHIBIT_GPG_SIGNING="${INHIBIT_GPG_SIGNING:-no}"
+
SECURITY_PUBLISHER="no"
if [ "$1" = "security" ]; then
# We are running a security publisher run, which skips some steps.
@@ -22,28 +31,27 @@
# Launchpad cron.daily (currently just for Ubuntu).
# Informational -- this *MUST* match the database.
-ARCHIVEROOT=/srv/launchpad.net/ubuntu-archive/ubuntu
+ARCHIVEROOT=$ARCHIVEPARENT/$DISTRONAME
DISTSROOT=$ARCHIVEROOT/dists
-OVERRIDEROOT=$ARCHIVEROOT/../ubuntu-overrides
-CACHEROOT=$ARCHIVEROOT/../ubuntu-cache
-DISTSCOPYROOT=$ARCHIVEROOT/../ubuntu-distscopy
+OVERRIDEROOT=$ARCHIVEROOT-overrides
+CACHEROOT=$ARCHIVEROOT-cache
+DISTSCOPYROOT=$ARCHIVEROOT-distscopy
INDICES=$ARCHIVEROOT/indices
PRODUCTION_CONFIG=ftpmaster-publish
if [ "$LPCONFIG" = "$PRODUCTION_CONFIG" ]; then
- ARCHIVEROOT_PARTNER=/srv/launchpad.net/ubuntu-archive/ubuntu-partner
- GNUPGHOME=/srv/launchpad.net/ubuntu-archive/gnupg-home
+ ARCHIVEROOT_PARTNER=$ARCHIVEROOT-partner
+ GNUPGHOME=$ARCHIVEROOT/gnupg-home
else
# GNUPGHOME does not need to be set, keys can come from ~/.gnupg.
- ARCHIVEROOT_PARTNER=/srv/launchpad.net/ppa/ubuntu-partner
+ ARCHIVEROOT_PARTNER=$ARCHIVEPARENT/ppa/$DISTRONAME-partner
fi
DISTSROOT_PARTNER=$ARCHIVEROOT_PARTNER/dists
-DISTSCOPYROOT_PARTNER=$ARCHIVEROOT_PARTNER/../ubuntu-partner-distscopy
+DISTSCOPYROOT_PARTNER=$ARCHIVEROOT-partner-distscopy
# Configuration options.
-LAUNCHPADROOT=/srv/launchpad.net/codelines/current
-LOCKFILE=/srv/launchpad.net/ubuntu-archive/cron.daily.lock
-DISTRONAME=ubuntu
+LAUNCHPADROOT=$ARCHIVEPARENT/codelines/current
+LOCKFILE=$ARCHIVEPARENT/cron.daily.lock
TRACEFILE=$ARCHIVEROOT/project/trace/$(hostname --fqdn)
DSYNCLIST=$CACHEROOT/dsync.list
MD5LIST=$INDICES/md5sums.gz
@@ -56,7 +64,7 @@
PATH=$PATH:$LAUNCHPADROOT/scripts:$LAUNCHPADROOT/cronscripts:$LAUNCHPADROOT/cronscripts/publishing:$LAUNCHPADROOT/scripts/ftpmaster-tools
# Claim the lock.
-if ! lockfile -r1 $LOCKFILE; then
+if ! ${LOCKFILE_CMD} -r1 $LOCKFILE; then
echo "Could not claim lock file."
exit 1
fi
@@ -153,30 +161,34 @@
publish-distro.py -v -v -d $DISTRONAME $SUITEOPTS -R ${DISTSROOT}.new
set +x
-# Find all the Release files for which the Release.GPG is missing/too-old
-# We use -maxdepth 2 to only sign Release files for distroreleases,
-# not distroarchreleases/distrosourcereleases.
-# Also we sign the dist-upgrader tarballs because they're handy too.
-for CANDIDATE in $(find ${DISTSROOT}.new -maxdepth 2 -name Release) \
- $(find ${DISTSROOT}.new/*/*/dist-upgrader* -name "*.tar.gz"); do
- # [ Release.gpg missing ] or [ Release is newer than Release.gpg ]
- if [ ! -f $CANDIDATE.gpg ] || [ $CANDIDATE -nt $CANDIDATE.gpg ]; then
- echo "$(date -R): (re-)signing $CANDIDATE"
- gpg --yes --detach-sign --armor -o $CANDIDATE.gpg --sign $CANDIDATE
- else
- echo "$(date -R): Not re-signing $CANDIDATE"
- fi
-done
-SIGNLIST_PARTNER=$(find ${DISTSROOT_PARTNER}.new -maxdepth 2 -name Release)
-for CANDIDATE in $SIGNLIST_PARTNER; do
- # [ Release.gpg missing ] or [ Release is newer than Release.gpg ].
- if [ ! -f $CANDIDATE.gpg ] || [ $CANDIDATE -nt $CANDIDATE.gpg ]; then
- echo "$(date -R): (re-)signing $CANDIDATE"
- gpg --yes --detach-sign --armor -o $CANDIDATE.gpg --sign $CANDIDATE
- else
- echo "$(date -R): Not re-signing $CANDIDATE"
- fi
-done
+if [ "${INHIBIT_GPG_SIGNING}" != yes ]; then
+ # Find all the Release files for which the Release.GPG is missing/too-old
+ # We use -maxdepth 2 to only sign Release files for distroreleases,
+ # not distroarchreleases/distrosourcereleases.
+ # Also we sign the dist-upgrader tarballs because they're handy too.
+ RELEASE_FILES=`find ${DISTSROOT}.new -maxdepth 2 -name Release`
+ DIST_UPGRADER_TARBALLS=`find ${DISTSROOT}.new/*/*/dist-upgrader* -name "*.tar.gz" || /bin/true`
+ for CANDIDATE in $RELEASE_FILES $DIST_UPGRADER_TARBALLS
+ do
+ # [ Release.gpg missing ] or [ Release is newer than Release.gpg ]
+ if [ ! -f $CANDIDATE.gpg ] || [ $CANDIDATE -nt $CANDIDATE.gpg ]; then
+ echo "$(date -R): (re-)signing $CANDIDATE"
+ gpg --yes --detach-sign --armor -o $CANDIDATE.gpg --sign $CANDIDATE
+ else
+ echo "$(date -R): Not re-signing $CANDIDATE"
+ fi
+ done
+ SIGNLIST_PARTNER=$(find ${DISTSROOT_PARTNER}.new -maxdepth 2 -name Release)
+ for CANDIDATE in $SIGNLIST_PARTNER; do
+ # [ Release.gpg missing ] or [ Release is newer than Release.gpg ].
+ if [ ! -f $CANDIDATE.gpg ] || [ $CANDIDATE -nt $CANDIDATE.gpg ]; then
+ echo "$(date -R): (re-)signing $CANDIDATE"
+ gpg --yes --detach-sign --armor -o $CANDIDATE.gpg --sign $CANDIDATE
+ else
+ echo "$(date -R): Not re-signing $CANDIDATE"
+ fi
+ done
+fi
# The Packages and Sources files are very large and would cripple our
# mirrors, so we remove them now that the uncompressed MD5SUMS are in the
@@ -207,7 +219,7 @@
# dapper, edgy and feisty releases. Don't fail the whole script if it
# fails.
echo "$(date -R): Generating -commerical pocket..."
-commercial-compat.sh || true
+${COMMERCIAL_COMPAT_CMD} || true
# Timestamp our trace file to track when the last archive publisher run took
# place.
@@ -234,9 +246,9 @@
# Run dsync over primary archive only.
echo "$(date -R): Running dsync over primary archive..."
( cd $ARCHIVEROOT ; \
- dsync-flist -q generate $DSYNCLIST -e 'Packages*' -e 'Sources*' -e 'Release*' --md5 ; \
- (dsync-flist -q md5sums $DSYNCLIST; find dists '(' -name 'Packages*' -o -name 'Sources*' -o -name 'Release*' ')' -print | xargs -r md5sum) | gzip -9n > ${MD5LIST} ; \
- dsync-flist -q link-dups $DSYNCLIST || true )
+ $DSYNC_FLIST_CMD -q generate $DSYNCLIST -e 'Packages*' -e 'Sources*' -e 'Release*' --md5 ; \
+ ($DSYNC_FLIST_CMD -q md5sums $DSYNCLIST; find dists '(' -name 'Packages*' -o -name 'Sources*' -o -name 'Release*' ')' -print | xargs -r md5sum) | gzip -9n > ${MD5LIST} ; \
+ $DSYNC_FLIST_CMD -q link-dups $DSYNCLIST || true )
# Clear out empty and thus redundant dirs.
echo "$(date -R): Clearing out empty directories..."
=== added directory 'cronscripts/publishing/distro-parts'
=== added directory 'cronscripts/publishing/distro-parts/ubuntu'
=== added directory 'cronscripts/publishing/distro-parts/ubuntu/finalize.d'
=== added file 'cronscripts/publishing/distro-parts/ubuntu/finalize.d/10-germinate'
--- cronscripts/publishing/distro-parts/ubuntu/finalize.d/10-germinate 1970-01-01 00:00:00 +0000
+++ cronscripts/publishing/distro-parts/ubuntu/finalize.d/10-germinate 2011-03-29 13:43:18 +0000
@@ -0,0 +1,6 @@
+#!/bin/sh
+
+if [ "$SECURITY_UPLOAD_ONLY" != "yes" ]
+then
+ cron.germinate || /bin/true
+fi
=== added file 'cronscripts/publishing/distro-parts/ubuntu/finalize.d/40-timestamp-trace-file'
--- cronscripts/publishing/distro-parts/ubuntu/finalize.d/40-timestamp-trace-file 1970-01-01 00:00:00 +0000
+++ cronscripts/publishing/distro-parts/ubuntu/finalize.d/40-timestamp-trace-file 2011-03-29 13:43:18 +0000
@@ -0,0 +1,5 @@
+#! /bin/sh
+#
+# Timestamp the trace file, so we can track when the last archive publisher
+# run took place.
+date -u > "$MAIN_ARCHIVEROOT"/project/trace/$(hostname --fqdn)
=== added file 'cronscripts/publishing/distro-parts/ubuntu/finalize.d/90-trigger-mirrors'
--- cronscripts/publishing/distro-parts/ubuntu/finalize.d/90-trigger-mirrors 1970-01-01 00:00:00 +0000
+++ cronscripts/publishing/distro-parts/ubuntu/finalize.d/90-trigger-mirrors 2011-03-29 13:43:18 +0000
@@ -0,0 +1,14 @@
+#!/bin/sh
+
+# Prod the master mirrors to propagate the update.
+
+MASTERMIRRORS="syowa frei scandium"
+echo "$(date -R): Triggering master mirrors..."
+
+for HOST in $MASTERMIRRORS
+do
+ echo "$(date -R): Triggering $HOST:"
+ ssh archvsync@$HOST
+done
+
+echo "$(date -R): Master mirror triggers completed."
=== added file 'cronscripts/publishing/distro-parts/ubuntu/finalize.d/README.txt'
--- cronscripts/publishing/distro-parts/ubuntu/finalize.d/README.txt 1970-01-01 00:00:00 +0000
+++ cronscripts/publishing/distro-parts/ubuntu/finalize.d/README.txt 2011-03-29 13:43:18 +0000
@@ -0,0 +1,20 @@
+Put scripts here that should be run at completion of the publish-ftpmaster
+script. They will be executed through the run-parts command, in alphabetical
+order.
+
+The scripts' filenames must consist entirely of ASCII letters (both upper and
+lower case allowed), digits, underscores, and hyphens. All other files,
+including this text file, are ignored.
+
+Publication happens in two passes: the first, expedited pass processes only
+security updates. The second pass processes all packages. The scripts in
+this directory will be run once for each pass, with the variable
+SECURITY_UPLOAD_ONLY set to indicate which pass is in progress; see below.
+
+The following variables will be set for the script:
+
+ARCHIVEROOTS - the list of root directories for the distribution's archives.
+
+PRIMARY_ARCHIVEROOT - the root directory for the distribution's main archive.
+
+SECURITY_UPLOAD_ONLY - "yes" during the security pass, or "no" otherwise.
=== added directory 'cronscripts/publishing/distro-parts/ubuntu/publish-distro.d'
=== added file 'cronscripts/publishing/distro-parts/ubuntu/publish-distro.d/10-sign-releases'
--- cronscripts/publishing/distro-parts/ubuntu/publish-distro.d/10-sign-releases 1970-01-01 00:00:00 +0000
+++ cronscripts/publishing/distro-parts/ubuntu/publish-distro.d/10-sign-releases 2011-03-29 13:43:18 +0000
@@ -0,0 +1,16 @@
+#!/bin/sh -e
+
+RELEASE_FILES=`find "$DISTSROOT".new -maxdepth 2 -name Release`
+DIST_UPGRADER_TARBALLS=`
+ find "$DISTSROOT".new"/*/*/dist-upgrader* -name "*.tar.gz" || true`
+
+for CANDIDATE in $RELEASE_FILES $DIST_UPGRADER_TARBALLS
+do
+ if [ ! -f "$CANDIDATE.gpg" ] || [ "$CANDIDATE" -nt "$CANDIDATE.gpg" ]
+ then
+ echo "$(date -R): (re-)signing $CANDIDATE"
+ gpg --yes --detach-sign --armor -o "$CANDIDATE.gpg" --sign "$CANDIDATE"
+ else
+ echo "$(date -R): Not re-signing $CANDIDATE"
+ fi
+done
=== added file 'cronscripts/publishing/distro-parts/ubuntu/publish-distro.d/20-remove-uncompressed-listings'
--- cronscripts/publishing/distro-parts/ubuntu/publish-distro.d/20-remove-uncompressed-listings 1970-01-01 00:00:00 +0000
+++ cronscripts/publishing/distro-parts/ubuntu/publish-distro.d/20-remove-uncompressed-listings 2011-03-29 13:43:18 +0000
@@ -0,0 +1,11 @@
+#!/bin/sh
+#
+# Remove uncompressed Packages and Sources files.
+#
+# The uncompressed versions of these files are very large and could cause
+# problems for our mirrors, so get rid of them.
+#
+# It's safe to do this since the uncompressed MD5 hashes have already been
+# computed for inclusion in the Release files.
+
+find "$DISTSROOT".new \( -name -o -name Sources \) -exec rm -f -- "{}" \;
=== added file 'cronscripts/publishing/distro-parts/ubuntu/publish-distro.d/30-copy-indices'
--- cronscripts/publishing/distro-parts/ubuntu/publish-distro.d/30-copy-indices 1970-01-01 00:00:00 +0000
+++ cronscripts/publishing/distro-parts/ubuntu/publish-distro.d/30-copy-indices 2011-03-29 13:43:18 +0000
@@ -0,0 +1,8 @@
+#!/bin/sh -e
+
+echo "$(date -R): Copying the indices into place."
+
+INDICES="$ARCHIVEROOT/indices"
+
+rm -f -- "$INDICES/override"
+cp -- "$OVERRIDEROOT"/override.* "$INDICES/"
=== added file 'cronscripts/publishing/distro-parts/ubuntu/publish-distro.d/README.txt'
--- cronscripts/publishing/distro-parts/ubuntu/publish-distro.d/README.txt 1970-01-01 00:00:00 +0000
+++ cronscripts/publishing/distro-parts/ubuntu/publish-distro.d/README.txt 2011-03-29 13:43:18 +0000
@@ -0,0 +1,19 @@
+Put scripts here that should be run after publish-ftpmaster executes
+publish-distro. They will be executed through the run-parts command, in
+alphabetical order.
+
+The scripts' filenames must consist entirely of ASCII letters (both upper and
+lower case allowed), digits, underscores, and hyphens. All other files,
+including this text file, are ignored.
+
+Scripts in this directory will be run separately for each distro archive,
+possibly twice because publication happens in two passes: an expedited one for
+just the security uploads and then a second, slower pass for all packages.
+
+Some variables will be set before each script is run:
+
+ARCHIVEROOT - the archive's root directory
+(e.g. /srv/launchpad.net/ubuntu-archive/ubuntu/ )
+
+DISTSROOT - the archive's dists root directory
+(e.g. /srv/launchpad.net/ubuntu-archive/ubuntu/dists )
=== modified file 'lib/canonical/config/schema-lazr.conf'
--- lib/canonical/config/schema-lazr.conf 2011-03-23 09:22:20 +0000
+++ lib/canonical/config/schema-lazr.conf 2011-03-29 13:43:18 +0000
@@ -24,6 +24,22 @@
# datatype: string
dbuser: archivepublisher
+# Location where the run-parts directories for publish-ftpmaster
+# customization are to be found. Absolute path, or path relative to the
+# Launchpad source tree, or "none" to disable.
+#
+# Under this directory, publish-ftpmaster will look for directories
+# <distro>/publish-distro.d and <distro>/finalize.d. In production, use
+# "cronscripts/publishing/distro-parts".
+#
+# datatype: string
+run_parts_location: none
+
+# XXX JeroenVermeulen 2011-03-29 bug=741683: Retire this item when
+# Dapper supports ends.
+# datatype: boolean
+run_commercial_compat: false
+
# XXX: wgrant 2011-03-22 bug=739992: These three keys are obsolete and
# should be removed after 11.04 is released.
root: none
=== modified file 'lib/lp/services/utils.py'
--- lib/lp/services/utils.py 2011-02-20 13:26:48 +0000
+++ lib/lp/services/utils.py 2011-03-29 13:43:18 +0000
@@ -15,6 +15,7 @@
'compress_hash',
'decorate_with',
'docstring_dedent',
+ 'file_exists',
'iter_split',
'run_capturing_output',
'synchronize',
@@ -24,6 +25,7 @@
]
from itertools import tee
+import os
from StringIO import StringIO
import string
import sys
@@ -47,6 +49,7 @@
"""
class AutoDecorateMetaClass(type):
+
def __new__(cls, class_name, bases, class_dict):
new_class_dict = {}
for name, value in class_dict.items():
@@ -207,11 +210,15 @@
def decorate_with(context_factory, *args, **kwargs):
"""Create a decorator that runs decorated functions with 'context'."""
+
def decorator(function):
+
def decorated(*a, **kw):
with context_factory(*args, **kwargs):
return function(*a, **kw)
+
return mergeFunctionMetadata(function, decorated)
+
return decorator
@@ -226,6 +233,11 @@
return (first + '\n' + dedent(rest)).strip()
+def file_exists(filename):
+ """Does `filename` exist?"""
+ return os.access(filename, os.F_OK)
+
+
class CapturedOutput(Fixture):
"""A fixture that captures output to stdout and stderr."""
=== modified file 'lib/lp/soyuz/scripts/processaccepted.py'
--- lib/lp/soyuz/scripts/processaccepted.py 2010-09-23 02:12:27 +0000
+++ lib/lp/soyuz/scripts/processaccepted.py 2011-03-29 13:43:18 +0000
@@ -30,7 +30,10 @@
from lp.bugs.interfaces.bugtask import BugTaskStatus
from lp.registry.interfaces.distribution import IDistributionSet
from lp.registry.interfaces.pocket import PackagePublishingPocket
-from lp.services.scripts.base import LaunchpadScript
+from lp.services.scripts.base import (
+ LaunchpadScript,
+ LaunchpadScriptFailure,
+ )
from lp.soyuz.enums import (
ArchivePurpose,
PackageUploadStatus,
@@ -241,6 +244,9 @@
try:
self.logger.debug("Finding distribution %s." % distro_name)
distribution = getUtility(IDistributionSet).getByName(distro_name)
+ if distribution is None:
+ raise LaunchpadScriptFailure(
+ "Distribution '%s' not found." % distro_name)
# target_archives is a tuple of (archive, description).
if self.options.ppa:
=== added file 'lib/lp/soyuz/scripts/publish_ftpmaster.py'
--- lib/lp/soyuz/scripts/publish_ftpmaster.py 1970-01-01 00:00:00 +0000
+++ lib/lp/soyuz/scripts/publish_ftpmaster.py 2011-03-29 13:43:18 +0000
@@ -0,0 +1,409 @@
+# Copyright 2011 Canonical Ltd. This software is licensed under the
+# GNU Affero General Public License version 3 (see the file LICENSE).
+
+"""Master distro publishing script."""
+
+__metaclass__ = type
+__all__ = [
+ 'PublishFTPMaster',
+ ]
+
+from optparse import OptionParser
+import os
+from zope.component import getUtility
+
+from canonical.config import config
+from lp.archivepublisher.config import getPubConfig
+from lp.registry.interfaces.distribution import IDistributionSet
+from lp.services.scripts.base import (
+ LaunchpadCronScript,
+ LaunchpadScriptFailure,
+ )
+from lp.services.utils import file_exists
+from lp.soyuz.enums import ArchivePurpose
+from lp.soyuz.scripts import publishdistro
+from lp.soyuz.scripts.ftpmaster import LpQueryDistro
+from lp.soyuz.scripts.processaccepted import ProcessAccepted
+
+
+ARCHIVES_TO_PUBLISH = [
+ ArchivePurpose.PRIMARY,
+ ArchivePurpose.PARTNER,
+ ]
+
+
+ARCHIVE_SUFFIXES = {
+ ArchivePurpose.PRIMARY: "",
+ ArchivePurpose.PARTNER: "-partner",
+}
+
+
+def compose_shell_boolean(boolean_value):
+ """Represent a boolean value as "yes" or "no"."""
+ boolean_text = {
+ True: "yes",
+ False: "no",
+ }
+ return boolean_text[boolean_value]
+
+
+def compose_env_string(env):
+ """Turn a dict into a series of shell parameter assignments."""
+ return ' '.join(['='.join(pair) for pair in env.iteritems()])
+
+
+def get_distscopyroot(archive_config):
+ """Return the distscopy root directory for `archive_config`."""
+ return archive_config.archiveroot + "-distscopy"
+
+
+class StoreArgument:
+ """Helper class: receive argument and store it."""
+
+ def __call__(self, argument):
+ self.argument = argument
+
+
+def find_run_parts_dir(distro, parts):
+ """Find the requested run-parts directory, if it exists."""
+ run_parts_location = config.archivepublisher.run_parts_location
+ if not run_parts_location:
+ return
+
+ if run_parts_location.startswith("/"):
+ # Absolute path.
+ base_dir = run_parts_location
+ else:
+ # Relative path.
+ base_dir = os.path.join(config.root, run_parts_location)
+
+ parts_dir = os.path.join(base_dir, distro.name, parts)
+ if file_exists(parts_dir):
+ return parts_dir
+ else:
+ return None
+
+
+class PublishFTPMaster(LaunchpadCronScript):
+ """Publish a distro (update)."""
+
+ done_pub = False
+
+ def add_my_options(self):
+ self.parser.add_option(
+ '-d', '--distribution', dest='distribution', default=None,
+ help="Distribution to publish.")
+ self.parser.add_option(
+ '-s', '--security-only', dest='security_only',
+ action='store_true', default=False, help="Security upload only.")
+
+ def executeShell(self, command_line, failure=None):
+ """Run `command_line` through a shell.
+
+ This won't just load an external program and run it; the command
+ line goes through the full shell treatment including variable
+ substitutions, output redirections, and so on.
+
+ :param command_line: Shell command.
+ :param failure: Raise `failure` as an exception if the shell
+ command returns a nonzero value. If omitted, nonzero return
+ values are ignored.
+ """
+ self.logger.debug("Executing: %s" % command_line)
+ retval = os.system(command_line)
+ if retval != 0 and failure is not None:
+ self.logger.debug("Command failed: %s" % failure)
+ raise failure
+
+ def getArchives(self):
+ """Find archives for `self.distribution` that should be published."""
+ return [
+ archive
+ for archive in self.distribution.all_distro_archives
+ if archive.purpose in ARCHIVES_TO_PUBLISH]
+
+ def makeConfigs(self):
+ """Set up configuration objects for archives to be published.
+
+ The configs dict maps the archive purposes that are relevant for
+ publishing to the respective archives' configurations.
+ """
+ return dict(
+ (archive.purpose, getPubConfig(archive))
+ for archive in self.archives)
+
+ def cleanUp(self):
+ """Post-publishing cleanup."""
+ self.logger.debug("Cleaning up.")
+ for purpose, archive_config in self.configs.iteritems():
+ self.logger.debug(
+ "Moving %s dists backup to safe keeping for next time.",
+ purpose.title)
+ distscopyroot = archive_config.archiveroot + '-distscopy'
+ dists = os.path.join(distscopyroot, "dists")
+ if self.done_pub:
+ replacement_dists = archive_config.distsroot + ".old"
+ else:
+ replacement_dists = archive_config.distsroot + ".new"
+ if file_exists(replacement_dists):
+ self.logger.debug(
+ "Renaming %s to %s.", replacement_dists, dists)
+ os.rename(replacement_dists, dists)
+
+ def processAccepted(self):
+ """Run the process-accepted script."""
+ self.logger.debug(
+ "Processing the accepted queue into the publishing records...")
+ script = ProcessAccepted(test_args=[self.distribution.name])
+ script.txn = self.txn
+ script.logger = self.logger
+ script.main()
+
+ def getDirtySuites(self):
+ """Return list of suites that have packages pending publication."""
+ self.logger.debug("Querying which suites are pending publication...")
+ query_distro = LpQueryDistro(
+ test_args=['-d', self.distribution.name, "pending_suites"])
+ receiver = StoreArgument()
+ query_distro.runAction(presenter=receiver)
+ return receiver.argument.split()
+
+ def getDirtySecuritySuites(self):
+ """List security suites with pending publications."""
+ suites = self.getDirtySuites()
+ return [suite for suite in suites if suite.endswith('-security')]
+
+ def rsyncNewDists(self, archive_purpose):
+ """Populate dists.new with a copy of distsroot.
+
+ Uses "rsync -aH --delete" so that any obsolete files that may
+ still be in dists.new are cleaned up (bug 58835).
+
+ :param archive_purpose: The (purpose of the) archive to copy.
+ """
+ archive_config = self.configs[archive_purpose]
+ self.executeShell(
+ "rsync -aH --delete '%s/' '%s/dists.new'"
+ % (archive_config.distsroot, archive_config.archiveroot),
+ failure=LaunchpadScriptFailure(
+ "Failed to rsync dists.new for %s." % archive_purpose.title))
+
+ def setUpDirs(self):
+ """Copy the dists tree ready for publishing into.
+
+ We do this so that we don't get an inconsistent dists tree at
+ any point during the publishing cycle (which would cause buildds
+ to explode).
+
+ This is now done through maintaining a persistent backup copy of
+ the dists directory, which we move into place and bring up to
+ date with rsync. Should achieve the same effect as copying, but
+ faster.
+ """
+ for archive_config in self.configs.itervalues():
+ archiveroot = archive_config.archiveroot
+ if not file_exists(archiveroot):
+ self.logger.debug("Creating archive root %s.", archiveroot)
+ os.makedirs(archiveroot)
+ distsroot = archive_config.distsroot
+ if not file_exists(distsroot):
+ self.logger.debug("Creating dists root %s.", distsroot)
+ os.makedirs(distsroot)
+
+ for purpose, archive_config in self.configs.iteritems():
+ dists = os.path.join(get_distscopyroot(archive_config), "dists")
+ dists_new = os.path.join(archive_config.archiveroot, "dists.new")
+ if not file_exists(dists):
+ os.makedirs(dists)
+ os.rename(dists, dists_new)
+ self.rsyncNewDists(purpose)
+
+ def publishDistroArchive(self, archive, security_suites=None):
+ """Publish the results for an archive.
+
+ :param archive: Archive to publish.
+ :param security_suites: An optional list of suites to restrict
+ the publishing to.
+ """
+ purpose = archive.purpose
+ self.logger.debug(
+ "Publishing the %s %s...", self.distribution.name, purpose.title)
+ archive_config = self.configs[purpose]
+ arguments = [
+ '-v', '-v',
+ '-d', self.distribution.name,
+ '-R', archive_config.distsroot + '.new',
+ ]
+
+ if archive.purpose == ArchivePurpose.PARTNER:
+ arguments.append('--partner')
+
+ if security_suites is not None:
+ arguments += sum([['-s', suite] for suite in security_suites], [])
+
+ parser = OptionParser()
+ publishdistro.add_options(parser)
+ options, args = parser.parse_args(arguments)
+ publishdistro.run_publisher(options, txn=self.txn, log=self.logger)
+
+ self.runPublishDistroParts(archive)
+
+ def runPublishDistroParts(self, archive):
+ """Execute the publish-distro hooks."""
+ archive_config = self.configs[archive.purpose]
+ env = {
+ 'DISTSROOT': archive_config.distsroot,
+ 'ARCHIVEROOT': archive_config.archiveroot,
+ }
+ self.runParts('publish-distro.d', env)
+
+ def installDists(self):
+ """Put the new dists into place, as near-atomically as possible."""
+ self.logger.debug("Placing the new dists into place...")
+
+ for archive_config in self.configs.itervalues():
+ distsroot = archive_config.distsroot
+ os.rename(distsroot, distsroot + ".old")
+ os.rename(distsroot + ".new", distsroot)
+
+ self.done_pub = True
+
+ for archive_config in self.configs.itervalues():
+ dists = os.path.join(get_distscopyroot(archive_config), "dists")
+ os.rename(archive_config.distsroot + ".old", dists)
+
+ def runCommercialCompat(self):
+ """Generate the -commercial pocket.
+
+ This is done for backwards compatibility with dapper, edgy, and
+ feisty releases. Failure here is not fatal.
+ """
+ # XXX JeroenVermeulen 2011-03-24 bug=741683: Retire
+ # commercial-compat.sh (and this method) as soon as Dapper
+ # support ends.
+ if self.distribution.name != 'ubuntu':
+ return
+ if not config.archivepublisher.run_commercial_compat:
+ return
+
+ self.executeShell("""
+ env PATH="$PATH:%s/cronscripts/publishing" \
+ LPCONFIG="%s" \
+ commercial-compat.sh
+ """ % (config.root, config.instance_name))
+
+ def generateListings(self):
+ """Create ls-lR.gz listings."""
+ self.logger.debug("Creating ls-lR.gz...")
+ lslr = "ls-lR.gz"
+ lslr_new = "." + lslr + ".new"
+ for purpose, archive_config in self.configs.iteritems():
+ lslr_file = os.path.join(archive_config.archiveroot, lslr)
+ new_lslr_file = os.path.join(archive_config.archiveroot, lslr_new)
+ if file_exists(new_lslr_file):
+ os.remove(new_lslr_file)
+ self.executeShell(
+ "cd -- '%s' ; TZ=UTC ls -lR | gzip -9n >'%s'"
+ % (archive_config.archiveroot, lslr_new),
+ failure=LaunchpadScriptFailure(
+ "Failed to create %s for %s." % (lslr, purpose.title)))
+ os.rename(new_lslr_file, lslr_file)
+
+ def clearEmptyDirs(self):
+ """Clear out any redundant empty directories."""
+ for archive_config in self.configs.itervalues():
+ self.executeShell(
+ "find '%s' -type d -empty | xargs -r rmdir"
+ % archive_config.archiveroot)
+
+ def processOptions(self):
+ """Handle command-line options.
+
+ Sets `self.distribution` to the `Distribution` to publish.
+ """
+ if self.options.distribution is None:
+ raise LaunchpadScriptFailure("Specify a distribution.")
+
+ self.distribution = getUtility(IDistributionSet).getByName(
+ self.options.distribution)
+ if self.distribution is None:
+ raise LaunchpadScriptFailure(
+ "Distribution %s not found." % self.options.distribution)
+
+ def runParts(self, parts, env):
+ """Execute run-parts.
+
+ :param parts: The run-parts directory to execute:
+ "publish-distro.d" or "finalize.d".
+ :param env: A dict of environment variables to pass to the
+ scripts in the run-parts directory.
+ """
+ parts_dir = find_run_parts_dir(self.distribution, parts)
+ if parts_dir is None:
+ self.logger.debug("Skipping run-parts %s: not configured.", parts)
+ return
+ self.executeShell(
+ "%s run-parts -- '%s'" % (compose_env_string(env), parts_dir),
+ failure=LaunchpadScriptFailure(
+ "Failure while executing run-parts %s." % parts_dir))
+
+ def runFinalizeParts(self, security_only=False):
+ """Run the finalize.d parts to finalize publication."""
+ env = {
+ 'SECURITY_UPLOAD_ONLY': compose_shell_boolean(security_only),
+ 'PRIMARY_ARCHIVEROOT':
+ self.configs[ArchivePurpose.PRIMARY].archiveroot,
+ 'ARCHIVEROOTS':
+ ' '.join([
+ archive_config.archiveroot
+ for archive_config in self.configs.itervalues()]),
+ }
+ self.runParts('finalize.d', env)
+
+ def publishSecurityUploads(self):
+ """Quickly process just the pending security uploads."""
+ self.logger.debug("Expediting security uploads.")
+ security_suites = self.getDirtySecuritySuites()
+ if len(security_suites) == 0:
+ self.logger.info("Nothing to do for security publisher.")
+ return
+ partner_archive = self.distribution.getArchive("partner")
+ if partner_archive is not None:
+ self.publishDistroArchive(partner_archive)
+ self.publishDistroArchive(
+ self.distribution.main_archive, security_suites=security_suites)
+ self.installDists()
+ self.runCommercialCompat()
+ self.runFinalizeParts(security_only=True)
+
+ def publishAllUploads(self):
+ """Publish the distro's complete uploads."""
+ self.logger.debug("Full publication. This may take some time.")
+ for archive in self.archives:
+ # This, for the main archive, is where the script spends
+ # most of its time.
+ self.publishDistroArchive(archive)
+
+ self.installDists()
+ self.runCommercialCompat()
+ self.generateListings()
+ self.clearEmptyDirs()
+ self.runFinalizeParts()
+
+ def setUp(self):
+ """Process options, and set up internal state."""
+ self.processOptions()
+ self.archives = self.getArchives()
+ self.configs = self.makeConfigs()
+
+ def main(self):
+ """See `LaunchpadScript`."""
+ self.setUp()
+ try:
+ self.processAccepted()
+ self.setUpDirs()
+ self.publishSecurityUploads()
+ if not self.options.security_only:
+ self.publishAllUploads()
+ finally:
+ self.cleanUp()
=== added file 'lib/lp/soyuz/scripts/tests/test_publish_ftpmaster.py'
--- lib/lp/soyuz/scripts/tests/test_publish_ftpmaster.py 1970-01-01 00:00:00 +0000
+++ lib/lp/soyuz/scripts/tests/test_publish_ftpmaster.py 2011-03-29 13:43:18 +0000
@@ -0,0 +1,684 @@
+# Copyright 2011 Canonical Ltd. This software is licensed under the
+# GNU Affero General Public License version 3 (see the file LICENSE).
+
+"""Test publish-ftpmaster cron script."""
+
+__metaclass__ = type
+
+import os
+from textwrap import dedent
+import transaction
+from zope.component import getUtility
+
+from canonical.config import config
+from canonical.launchpad.interfaces.launchpad import ILaunchpadCelebrities
+from canonical.testing.layers import (
+ LaunchpadZopelessLayer,
+ ZopelessDatabaseLayer,
+ )
+from lp.archivepublisher.interfaces.publisherconfig import IPublisherConfigSet
+from lp.registry.interfaces.pocket import (
+ PackagePublishingPocket,
+ pocketsuffix,
+ )
+from lp.services.log.logger import DevNullLogger
+from lp.services.scripts.base import LaunchpadScriptFailure
+from lp.services.utils import file_exists
+from lp.soyuz.enums import (
+ ArchivePurpose,
+ PackagePublishingStatus,
+ )
+from lp.soyuz.scripts.publish_ftpmaster import (
+ compose_env_string,
+ compose_shell_boolean,
+ find_run_parts_dir,
+ PublishFTPMaster,
+ )
+from lp.soyuz.tests.test_publishing import SoyuzTestPublisher
+from lp.testing import (
+ run_script,
+ TestCaseWithFactory,
+ )
+from lp.testing.fakemethod import FakeMethod
+
+
+def path_exists(*path_components):
+ """Does the given file or directory exist?"""
+ return file_exists(os.path.join(*path_components))
+
+
+def name_spph_suite(spph):
+ """Return name of `spph`'s suite."""
+ return spph.distroseries.name + pocketsuffix[spph.pocket]
+
+
+def get_pub_config(distro):
+ """Find the publishing config for `distro`."""
+ return getUtility(IPublisherConfigSet).getByDistribution(distro)
+
+
+def get_archive_root(pub_config):
+ """Return the archive root for the given publishing config."""
+ return os.path.join(pub_config.root_dir, pub_config.distribution.name)
+
+
+def get_dists_root(pub_config):
+ """Return the dists root directory for the given publishing config."""
+ return os.path.join(get_archive_root(pub_config), "dists")
+
+
+def get_distscopy_root(pub_config):
+ """Return the "distscopy" root for the given publishing config."""
+ return get_archive_root(pub_config) + "-distscopy"
+
+
+def get_run_parts_path():
+ """Get relative path to run-parts location the Launchpad source."""
+ return os.path.join("cronscripts", "publishing", "distro-parts")
+
+
+class HelpersMixin:
+ """Helpers for the PublishFTPMaster tests."""
+
+ def enableRunParts(self, parts_directory=None):
+ """Set up for run-parts execution.
+
+ :param parts_directory: Base location for the run-parts
+ directories. If omitted, the run-parts directory from the
+ Launchpad source tree will be used.
+ """
+ if parts_directory is None:
+ parts_directory = get_run_parts_path()
+
+ config.push("run-parts", dedent("""\
+ [archivepublisher]
+ run_parts_location: %s
+ """ % parts_directory))
+
+ self.addCleanup(config.pop, "run-parts")
+
+
+class TestPublishFTPMasterHelpers(TestCaseWithFactory, HelpersMixin):
+ layer = ZopelessDatabaseLayer
+
+ def test_compose_env_string_iterates_env(self):
+ env = {
+ "A": "1",
+ "B": "2",
+ }
+ env_string = compose_env_string(env)
+ self.assertIn(env_string, ["A=1 B=2", "B=2 A=1"])
+
+ def test_compose_shell_boolean_shows_True_as_yes(self):
+ self.assertEqual("yes", compose_shell_boolean(True))
+
+ def test_compose_shell_boolean_shows_False_as_no(self):
+ self.assertEqual("no", compose_shell_boolean(False))
+
+ def test_find_run_parts_dir_finds_relative_runparts_directory(self):
+ self.enableRunParts()
+ ubuntu = getUtility(ILaunchpadCelebrities).ubuntu
+ self.assertEqual(
+ os.path.join(
+ config.root, get_run_parts_path(), "ubuntu", "finalize.d"),
+ find_run_parts_dir(ubuntu, "finalize.d"))
+
+ def test_find_run_parts_dir_finds_absolute_runparts_directory(self):
+ self.enableRunParts(os.path.join(config.root, get_run_parts_path()))
+ ubuntu = getUtility(ILaunchpadCelebrities).ubuntu
+ self.assertEqual(
+ os.path.join(
+ config.root, get_run_parts_path(), "ubuntu", "finalize.d"),
+ find_run_parts_dir(ubuntu, "finalize.d"))
+
+ def test_find_run_parts_dir_ignores_blank_config(self):
+ self.enableRunParts("")
+ ubuntu = getUtility(ILaunchpadCelebrities).ubuntu
+ self.assertIs(None, find_run_parts_dir(ubuntu, "finalize.d"))
+
+ def test_find_run_parts_dir_ignores_none_config(self):
+ self.enableRunParts("none")
+ ubuntu = getUtility(ILaunchpadCelebrities).ubuntu
+ self.assertIs(None, find_run_parts_dir(ubuntu, "finalize.d"))
+
+ def test_find_run_parts_dir_ignores_nonexistent_directory(self):
+ self.enableRunParts()
+ distro = self.factory.makeDistribution()
+ self.assertIs(None, find_run_parts_dir(distro, "finalize.d"))
+
+
+class TestPublishFTPMasterScript(TestCaseWithFactory, HelpersMixin):
+ layer = LaunchpadZopelessLayer
+
+ # Location of shell script.
+ SCRIPT_PATH = "cronscripts/publish-ftpmaster.py"
+
+ def setUpForScriptRun(self, distro):
+ """Mock up config to run the script on `distro`."""
+ pub_config = getUtility(IPublisherConfigSet).getByDistribution(distro)
+ pub_config.root_dir = unicode(
+ self.makeTemporaryDirectory())
+
+ def getDistro(self, use_ubuntu=False):
+ """Obtain a `Distribution` for testing, and set up test directory.
+
+ :param use_ubuntu: Use Ubuntu as the test distro? If not,
+ create a new one.
+ """
+ if use_ubuntu:
+ distro = getUtility(ILaunchpadCelebrities).ubuntu
+ else:
+ distro = self.factory.makeDistribution()
+ self.setUpForScriptRun(distro)
+ return distro
+
+ def makeScript(self, distro=None):
+ """Produce instance of the `PublishFTPMaster` script."""
+ if distro is None:
+ distro = self.getDistro()
+ script = PublishFTPMaster(test_args=["-d", distro.name])
+ script.txn = transaction
+ script.logger = DevNullLogger()
+ return script
+
+ def readReleaseFile(self, filename):
+ """Read a Release file, return as a keyword/value dict."""
+ lines = []
+ for line in file(filename):
+ if line.startswith(' '):
+ lines[-1] += line
+ else:
+ lines.append(line)
+ return dict(
+ (key, value.strip())
+ for key, value in [line.split(':', 1) for line in lines])
+
+ def writeMarkerFile(self, path, contents):
+ """Write a marker file for checking direction movements.
+
+ :param path: A list of path components.
+ :param contents: Text to write into the file.
+ """
+ marker = file(os.path.join(*path), "w")
+ marker.write(contents)
+ marker.flush()
+ marker.close()
+
+ def readMarkerFile(self, path):
+ """Read the contents of a marker file.
+
+ :param return: Contents of the marker file.
+ """
+ return file(os.path.join(*path)).read()
+
+ def enableCommercialCompat(self):
+ """Enable commercial-compat.sh runs for the duration of the test."""
+ config.push("commercial-compat", dedent("""\
+ [archivepublisher]
+ run_commercial_compat: true
+ """))
+ self.addCleanup(config.pop, "commercial-compat")
+
+ def test_script_runs_successfully(self):
+ ubuntu = self.getDistro(use_ubuntu=True)
+ transaction.commit()
+ stdout, stderr, retval = run_script(
+ self.SCRIPT_PATH + " -d ubuntu")
+ self.assertEqual(0, retval, "Script failure:\n" + stderr)
+
+ def test_script_is_happy_with_no_publications(self):
+ distro = self.getDistro()
+ self.makeScript(distro).main()
+
+ def test_produces_listings(self):
+ distro = self.getDistro()
+ self.makeScript(distro).main()
+ self.assertTrue(
+ path_exists(get_archive_root(get_pub_config(distro)), 'ls-lR.gz'))
+
+ def test_publishes_package(self):
+ test_publisher = SoyuzTestPublisher()
+ distroseries = test_publisher.setUpDefaultDistroSeries()
+ distro = distroseries.distribution
+ pub_config = get_pub_config(distro)
+ self.factory.makeComponentSelection(
+ distroseries=distroseries, component="main")
+ self.factory.makeArchive(
+ distribution=distro, purpose=ArchivePurpose.PARTNER)
+ test_publisher.getPubSource()
+
+ self.setUpForScriptRun(distro)
+ self.makeScript(distro).main()
+
+ archive_root = get_archive_root(pub_config)
+ dists_root = get_dists_root(pub_config)
+
+ dsc = os.path.join(
+ archive_root, 'pool', 'main', 'f', 'foo', 'foo_666.dsc')
+ self.assertEqual("I do not care about sources.", file(dsc).read())
+ overrides = os.path.join(
+ archive_root + '-overrides', distroseries.name + '_main_source')
+ self.assertEqual(dsc, file(overrides).read().rstrip())
+ self.assertTrue(path_exists(
+ dists_root, distroseries.name, 'main', 'source', 'Sources.gz'))
+ self.assertTrue(path_exists(
+ dists_root, distroseries.name, 'main', 'source', 'Sources.bz2'))
+
+ distcopyseries = os.path.join(dists_root, distroseries.name)
+ release = self.readReleaseFile(
+ os.path.join(distcopyseries, "Release"))
+ self.assertEqual(distro.displayname, release['Origin'])
+ self.assertEqual(distro.displayname, release['Label'])
+ self.assertEqual(distroseries.name, release['Suite'])
+ self.assertEqual(distroseries.name, release['Codename'])
+ self.assertEqual("main", release['Components'])
+ self.assertEqual("", release["Architectures"])
+ self.assertIn("Date", release)
+ self.assertIn("Description", release)
+ self.assertNotEqual("", release["MD5Sum"])
+ self.assertNotEqual("", release["SHA1"])
+ self.assertNotEqual("", release["SHA256"])
+
+ main_release = self.readReleaseFile(
+ os.path.join(distcopyseries, 'main', 'source', "Release"))
+ self.assertEqual(distroseries.name, main_release["Archive"])
+ self.assertEqual("main", main_release["Component"])
+ self.assertEqual(distro.displayname, main_release["Origin"])
+ self.assertEqual(distro.displayname, main_release["Label"])
+ self.assertEqual("source", main_release["Architecture"])
+
+ def test_cleanup_moves_dists_to_new_if_not_published(self):
+ distro = self.getDistro()
+ pub_config = get_pub_config(distro)
+ dists_root = get_dists_root(pub_config)
+ dists_copy_root = get_distscopy_root(pub_config)
+ new_distsroot = dists_root + ".new"
+ os.makedirs(new_distsroot)
+ self.writeMarkerFile([new_distsroot, "marker"], "dists.new")
+ os.makedirs(dists_copy_root)
+
+ script = self.makeScript(distro)
+ script.setUp()
+ script.cleanUp()
+ self.assertEqual(
+ "dists.new",
+ self.readMarkerFile([dists_copy_root, "dists", "marker"]))
+
+ def test_cleanup_moves_dists_to_old_if_published(self):
+ distro = self.getDistro()
+ pub_config = get_pub_config(distro)
+ dists_root = get_dists_root(pub_config)
+ old_distsroot = dists_root + ".old"
+ dists_copy_root = get_distscopy_root(pub_config)
+ os.makedirs(old_distsroot)
+ self.writeMarkerFile([old_distsroot, "marker"], "dists.old")
+ os.makedirs(dists_copy_root)
+
+ script = self.makeScript(distro)
+ script.setUp()
+ script.done_pub = True
+ script.cleanUp()
+ self.assertEqual(
+ "dists.old",
+ self.readMarkerFile([dists_copy_root, "dists", "marker"]))
+
+ def test_getDirtySuites_returns_suite_with_pending_publication(self):
+ spph = self.factory.makeSourcePackagePublishingHistory()
+ script = self.makeScript(spph.distroseries.distribution)
+ script.setUp()
+ self.assertEqual([name_spph_suite(spph)], script.getDirtySuites())
+
+ def test_getDirtySuites_returns_suites_with_pending_publications(self):
+ distro = self.getDistro()
+ spphs = [
+ self.factory.makeSourcePackagePublishingHistory(
+ distroseries=self.factory.makeDistroSeries(
+ distribution=distro))
+ for counter in xrange(2)]
+
+ script = self.makeScript(distro)
+ script.setUp()
+ self.assertContentEqual(
+ [name_spph_suite(spph) for spph in spphs],
+ script.getDirtySuites())
+
+ def test_getDirtySuites_ignores_suites_without_pending_publications(self):
+ spph = self.factory.makeSourcePackagePublishingHistory(
+ status=PackagePublishingStatus.PUBLISHED)
+ script = self.makeScript(spph.distroseries.distribution)
+ script.setUp()
+ self.assertEqual([], script.getDirtySuites())
+
+ def test_getDirtySecuritySuites_returns_security_suites(self):
+ distro = self.getDistro()
+ spphs = [
+ self.factory.makeSourcePackagePublishingHistory(
+ distroseries=self.factory.makeDistroSeries(
+ distribution=distro),
+ pocket=PackagePublishingPocket.SECURITY)
+ for counter in xrange(2)]
+
+ script = self.makeScript(distro)
+ script.setUp()
+ self.assertContentEqual(
+ [name_spph_suite(spph) for spph in spphs],
+ script.getDirtySecuritySuites())
+
+ def test_getDirtySecuritySuites_ignores_non_security_suites(self):
+ distroseries = self.factory.makeDistroSeries()
+ spphs = [
+ self.factory.makeSourcePackagePublishingHistory(
+ distroseries=distroseries, pocket=pocket)
+ for pocket in [
+ PackagePublishingPocket.RELEASE,
+ PackagePublishingPocket.UPDATES,
+ PackagePublishingPocket.PROPOSED,
+ PackagePublishingPocket.BACKPORTS,
+ ]]
+ script = self.makeScript(distroseries.distribution)
+ script.setUp()
+ self.assertEqual([], script.getDirtySecuritySuites())
+
+ def test_rsync_copies_files(self):
+ distro = self.getDistro()
+ script = self.makeScript(distro)
+ script.setUp()
+ dists_root = get_dists_root(get_pub_config(distro))
+ os.makedirs(dists_root)
+ os.makedirs(dists_root + ".new")
+ self.writeMarkerFile([dists_root, "new-file"], "New file")
+ script.rsyncNewDists(ArchivePurpose.PRIMARY)
+ self.assertEqual(
+ "New file",
+ self.readMarkerFile([dists_root + ".new", "new-file"]))
+
+ def test_rsync_cleans_up_obsolete_files(self):
+ distro = self.getDistro()
+ script = self.makeScript(distro)
+ script.setUp()
+ dists_root = get_dists_root(get_pub_config(distro))
+ os.makedirs(dists_root)
+ os.makedirs(dists_root + ".new")
+ old_file = [dists_root + ".new", "old-file"]
+ self.writeMarkerFile(old_file, "old-file")
+ script.rsyncNewDists(ArchivePurpose.PRIMARY)
+ self.assertFalse(path_exists(*old_file))
+
+ def test_setUpDirs_creates_directory_structure(self):
+ distro = self.getDistro()
+ pub_config = get_pub_config(distro)
+ archive_root = get_archive_root(pub_config)
+ dists_root = get_dists_root(pub_config)
+ script = self.makeScript(distro)
+ script.setUp()
+
+ self.assertFalse(file_exists(archive_root))
+
+ script.setUpDirs()
+
+ self.assertTrue(file_exists(archive_root))
+ self.assertTrue(file_exists(dists_root))
+ self.assertTrue(file_exists(dists_root + ".new"))
+
+ def test_setUpDirs_does_not_mind_if_directories_already_exist(self):
+ distro = self.getDistro()
+ script = self.makeScript(distro)
+ script.setUp()
+ script.setUpDirs()
+ script.setUpDirs()
+ self.assertTrue(file_exists(get_archive_root(get_pub_config(distro))))
+
+ def test_setUpDirs_moves_dists_to_dists_new(self):
+ distro = self.getDistro()
+ dists_root = get_dists_root(get_pub_config(distro))
+ script = self.makeScript(distro)
+ script.setUp()
+ script.setUpDirs()
+ self.writeMarkerFile([dists_root, "marker"], "X")
+ script.setUpDirs()
+ self.assertEqual(
+ "X", self.readMarkerFile([dists_root + ".new", "marker"]))
+
+ def test_publishDistroArchive_runs_parts(self):
+ distro = self.getDistro()
+ script = self.makeScript(distro)
+ script.setUp()
+ script.setUpDirs()
+ script.runParts = FakeMethod()
+ script.publishDistroArchive(distro.main_archive)
+ self.assertEqual(1, script.runParts.call_count)
+ args, kwargs = script.runParts.calls[0]
+ parts_dir, env = args
+ self.assertEqual("publish-distro.d", parts_dir)
+
+ def test_runPublishDistroParts_passes_parameters(self):
+ distro = self.getDistro()
+ script = self.makeScript(distro)
+ script.setUp()
+ script.setUpDirs()
+ script.runParts = FakeMethod()
+ script.runPublishDistroParts(distro.main_archive)
+ args, kwargs = script.runParts.calls[0]
+ parts_dir, env = args
+ required_parameters = set(["DISTSROOT", "ARCHIVEROOT"])
+ missing_parameters = set(env.keys()).difference(required_parameters)
+ self.assertEqual(set(), missing_parameters)
+
+ def test_installDists_sets_done_pub(self):
+ script = self.makeScript()
+ script.setUp()
+ script.setUpDirs()
+ self.assertFalse(script.done_pub)
+ script.installDists()
+ self.assertTrue(script.done_pub)
+
+ def test_installDists_replaces_distsroot(self):
+ distro = self.getDistro()
+ script = self.makeScript(distro)
+ script.setUp()
+ script.setUpDirs()
+ pub_config = get_pub_config(distro)
+ dists_root = get_dists_root(pub_config)
+
+ self.writeMarkerFile([dists_root, "marker"], "old")
+ self.writeMarkerFile([dists_root + ".new", "marker"], "new")
+
+ script.installDists()
+
+ self.assertEqual("new", self.readMarkerFile([dists_root, "marker"]))
+ self.assertEqual( "old", self.readMarkerFile(
+ [get_distscopy_root(pub_config), "dists", "marker"]))
+
+ def test_runCommercialCompat_runs_commercial_compat_script(self):
+ # XXX JeroenVermeulen 2011-03-29 bug=741683: Retire
+ # runCommercialCompat as soon as Dapper support ends.
+ self.enableCommercialCompat()
+ script = self.makeScript(self.getDistro(use_ubuntu=True))
+ script.setUp()
+ script.executeShell = FakeMethod()
+ script.runCommercialCompat()
+ self.assertEqual(1, script.executeShell.call_count)
+ args, kwargs = script.executeShell.calls[0]
+ command_line, = args
+ self.assertIn("commercial-compat.sh", command_line)
+
+ def test_runCommercialCompat_runs_only_for_ubuntu(self):
+ # XXX JeroenVermeulen 2011-03-29 bug=741683: Retire
+ # runCommercialCompat as soon as Dapper support ends.
+ self.enableCommercialCompat()
+ script = self.makeScript(self.getDistro(use_ubuntu=False))
+ script.setUp()
+ script.executeShell = FakeMethod()
+ script.runCommercialCompat()
+ self.assertEqual(0, script.executeShell.call_count)
+
+ def test_runCommercialCompat_runs_only_if_configured(self):
+ # XXX JeroenVermeulen 2011-03-29 bug=741683: Retire
+ # runCommercialCompat as soon as Dapper support ends.
+ script = self.makeScript(self.getDistro(use_ubuntu=True))
+ script.setUp()
+ script.executeShell = FakeMethod()
+ script.runCommercialCompat()
+ self.assertEqual(0, script.executeShell.call_count)
+
+ def test_generateListings_writes_ls_lR_gz(self):
+ distro = self.getDistro()
+ script = self.makeScript(distro)
+ script.setUp()
+ script.setUpDirs()
+ script.generateListings()
+ pass
+
+ def test_clearEmptyDirs_cleans_up_empty_directories(self):
+ distro = self.getDistro()
+ script = self.makeScript(distro)
+ script.setUp()
+ script.setUpDirs()
+ empty_dir = os.path.join(
+ get_dists_root(get_pub_config(distro)), 'empty-dir')
+ os.makedirs(empty_dir)
+ script.clearEmptyDirs()
+ self.assertFalse(file_exists(empty_dir))
+
+ def test_clearEmptyDirs_does_not_clean_up_nonempty_directories(self):
+ distro = self.getDistro()
+ script = self.makeScript(distro)
+ script.setUp()
+ script.setUpDirs()
+ nonempty_dir = os.path.join(
+ get_dists_root(get_pub_config(distro)), 'nonempty-dir')
+ os.makedirs(nonempty_dir)
+ self.writeMarkerFile([nonempty_dir, "placeholder"], "Data here!")
+ script.clearEmptyDirs()
+ self.assertTrue(file_exists(nonempty_dir))
+
+ def test_processOptions_finds_distribution(self):
+ distro = self.getDistro()
+ script = self.makeScript(distro)
+ script.processOptions()
+ self.assertEqual(distro.name, script.options.distribution)
+ self.assertEqual(distro, script.distribution)
+
+ def test_processOptions_complains_about_unknown_distribution(self):
+ script = self.makeScript()
+ script.options.distribution = self.factory.getUniqueString()
+ self.assertRaises(LaunchpadScriptFailure, script.processOptions)
+
+ def test_runParts_runs_parts(self):
+ self.enableRunParts()
+ script = self.makeScript(self.getDistro(use_ubuntu=True))
+ script.setUp()
+ script.executeShell = FakeMethod()
+ script.runParts("finalize.d", {})
+ self.assertEqual(1, script.executeShell.call_count)
+ args, kwargs = script.executeShell.calls[-1]
+ command_line, = args
+ self.assertIn("run-parts", command_line)
+ self.assertIn(
+ "cronscripts/publishing/distro-parts/ubuntu/finalize.d",
+ command_line)
+
+ def test_runParts_passes_parameters(self):
+ self.enableRunParts()
+ script = self.makeScript(self.getDistro(use_ubuntu=True))
+ script.setUp()
+ script.executeShell = FakeMethod()
+ key = self.factory.getUniqueString()
+ value = self.factory.getUniqueString()
+ script.runParts("finalize.d", {key: value})
+ args, kwargs = script.executeShell.calls[-1]
+ command_line, = args
+ self.assertIn("%s=%s" % (key, value), command_line)
+
+ def test_executeShell_executes_shell_command(self):
+ distro = self.getDistro()
+ script = self.makeScript(distro)
+ marker = os.path.join(
+ get_pub_config(distro).root_dir, "marker")
+ script.executeShell("touch %s" % marker)
+ self.assertTrue(file_exists(marker))
+
+ def test_executeShell_reports_failure_if_requested(self):
+ distro = self.getDistro()
+ script = self.makeScript(distro)
+
+ class ArbitraryFailure(Exception):
+ """Some exception that's not likely to come from elsewhere."""
+
+ self.assertRaises(
+ ArbitraryFailure,
+ script.executeShell, "/bin/false", failure=ArbitraryFailure())
+
+ def test_executeShell_does_not_report_failure_if_not_requested(self):
+ distro = self.getDistro()
+ script = self.makeScript(distro)
+ # The test is that this does not fail:
+ script.executeShell("/bin/false")
+
+ def test_runFinalizeParts_passes_parameters(self):
+ script = self.makeScript(self.getDistro(use_ubuntu=True))
+ script.setUp()
+ script.runParts = FakeMethod()
+ script.runFinalizeParts()
+ args, kwargs = script.runParts.calls[0]
+ parts_dir, env = args
+ required_parameters = set([
+ "ARCHIVEROOTS",
+ "PRIMARY_ARCHIVEROOT",
+ "SECURITY_UPLOAD_ONLY",
+ ])
+ missing_parameters = set(env.keys()).difference(required_parameters)
+ self.assertEqual(set(), missing_parameters)
+
+ def test_publishSecurityUploads_skips_pub_if_no_security_updates(self):
+ script = self.makeScript(self.getDistro())
+ script.setUp()
+ script.setUpDirs()
+ script.installDists = FakeMethod()
+ script.publishSecurityUploads()
+ self.assertEqual(0, script.installDists.call_count)
+
+ def test_publishSecurityUploads_runs_finalize_parts(self):
+ distro = self.getDistro()
+ self.factory.makeSourcePackagePublishingHistory(
+ distroseries=self.factory.makeDistroSeries(distribution=distro),
+ pocket=PackagePublishingPocket.SECURITY)
+ script = self.makeScript(distro)
+ script.setUp()
+ script.setUpDirs()
+ script.runFinalizeParts = FakeMethod()
+ script.publishSecurityUploads()
+ self.assertEqual(1, script.runFinalizeParts.call_count)
+ args, kwargs = script.runFinalizeParts.calls[0]
+ self.assertTrue(kwargs["security_only"])
+
+ def test_publishAllUploads_publishes_all_distro_archives(self):
+ distro = self.getDistro()
+ distroseries = self.factory.makeDistroSeries(distribution=distro)
+ partner_archive = self.factory.makeArchive(
+ distribution=distro, purpose=ArchivePurpose.PARTNER)
+ for archive in distro.all_distro_archives:
+ self.factory.makeSourcePackagePublishingHistory(
+ distroseries=distroseries,
+ archive=archive)
+ script = self.makeScript(distro)
+ script.setUp()
+ script.setUpDirs()
+ script.publishDistroArchive = FakeMethod()
+ script.publishAllUploads()
+ published_archives = [
+ args[0] for args, kwargs in script.publishDistroArchive.calls]
+
+ self.assertContentEqual(
+ distro.all_distro_archives, published_archives)
+ self.assertIn(distro.main_archive, published_archives)
+ self.assertIn(partner_archive, published_archives)
+
+ def test_publishAllUploads_runs_finalize_parts(self):
+ distro = self.getDistro()
+ script = self.makeScript(distro)
+ script.setUp()
+ script.setUpDirs()
+ script.runFinalizeParts = FakeMethod()
+ script.publishAllUploads()
+ self.assertEqual(1, script.runFinalizeParts.call_count)
=== modified file 'lib/lp/testing/__init__.py'
--- lib/lp/testing/__init__.py 2011-03-29 00:11:57 +0000
+++ lib/lp/testing/__init__.py 2011-03-29 13:43:18 +0000
@@ -1061,17 +1061,19 @@
now += delta
-def run_script(cmd_line):
+def run_script(cmd_line, env=None):
"""Run the given command line as a subprocess.
- Return a 3-tuple containing stdout, stderr and the process' return code.
-
- The environment given to the subprocess is the same as the one in the
- parent process except for the PYTHONPATH, which is removed so that the
- script, passed as the `cmd_line` parameter, will fail if it doesn't set it
- up properly.
+ :param cmd_line: A command line suitable for passing to
+ `subprocess.Popen`.
+ :param env: An optional environment dict. If none is given, the
+ script will get a copy of your present environment. Either way,
+ PYTHONPATH will be removed from it because it will break the
+ script.
+ :return: A 3-tuple of stdout, stderr, and the process' return code.
"""
- env = os.environ.copy()
+ if env is None:
+ env = os.environ.copy()
env.pop('PYTHONPATH', None)
process = subprocess.Popen(
cmd_line, shell=True, stdin=subprocess.PIPE, stdout=subprocess.PIPE,
=== modified file 'lib/lp/testing/factory.py'
--- lib/lp/testing/factory.py 2011-03-29 01:28:52 +0000
+++ lib/lp/testing/factory.py 2011-03-29 13:43:18 +0000
@@ -256,11 +256,15 @@
)
from lp.soyuz.interfaces.binarypackagebuild import IBinaryPackageBuildSet
from lp.soyuz.interfaces.binarypackagename import IBinaryPackageNameSet
-from lp.soyuz.interfaces.component import IComponentSet
+from lp.soyuz.interfaces.component import (
+ IComponent,
+ IComponentSet,
+ )
from lp.soyuz.interfaces.packageset import IPackagesetSet
from lp.soyuz.interfaces.processor import IProcessorFamilySet
from lp.soyuz.interfaces.publishing import IPublishingSet
from lp.soyuz.interfaces.section import ISectionSet
+from lp.soyuz.model.component import ComponentSelection
from lp.soyuz.model.files import (
BinaryPackageFile,
SourcePackageReleaseFile,
@@ -2405,6 +2409,23 @@
name = self.getUniqueString()
return getUtility(IComponentSet).ensure(name)
+ def makeComponentSelection(self, distroseries=None, component=None):
+ """Make a new `ComponentSelection`.
+
+ :param distroseries: Optional `DistroSeries`. If none is given,
+ one will be created.
+ :param component: Optional `Component` or a component name. If
+ none is given, one will be created.
+ """
+ if distroseries is None:
+ distroseries = self.makeDistroSeries()
+
+ if not IComponent.providedBy(component):
+ component = self.makeComponent(component)
+
+ return ComponentSelection(
+ distroseries=distroseries, component=component)
+
def makeArchive(self, distribution=None, owner=None, name=None,
purpose=None, enabled=True, private=False,
virtualized=True, description=None, displayname=None):
=== modified file 'lib/lp/testing/tests/test_factory.py'
--- lib/lp/testing/tests/test_factory.py 2010-12-02 16:13:51 +0000
+++ lib/lp/testing/tests/test_factory.py 2011-03-29 13:43:18 +0000
@@ -52,6 +52,7 @@
)
from lp.soyuz.interfaces.queue import IPackageUpload
from lp.soyuz.interfaces.sourcepackagerelease import ISourcePackageRelease
+from lp.soyuz.model.component import ComponentSelection
from lp.testing import TestCaseWithFactory
from lp.testing.factory import is_security_proxied_or_harmless
from lp.testing.matchers import (
@@ -399,6 +400,28 @@
distroseries = self.factory.makeDistroSeries()
self.assertThat(distroseries.displayname, StartsWith("Distroseries"))
+ # makeComponentSelection
+ def test_makeComponentSelection_makes_ComponentSelection(self):
+ selection = self.factory.makeComponentSelection()
+ self.assertIsInstance(selection, ComponentSelection)
+
+ def test_makeComponentSelection_uses_distroseries(self):
+ distroseries = self.factory.makeDistroSeries()
+ selection = self.factory.makeComponentSelection(
+ distroseries=distroseries)
+ self.assertEqual(distroseries, selection.distroseries)
+
+ def test_makeComponentSelection_uses_component(self):
+ component = self.factory.makeComponent()
+ selection = self.factory.makeComponentSelection(component=component)
+ self.assertEqual(component, selection.component)
+
+ def test_makeComponentSelection_finds_component(self):
+ component = self.factory.makeComponent()
+ selection = self.factory.makeComponentSelection(
+ component=component.name)
+ self.assertEqual(component, selection.component)
+
# makeLanguage
def test_makeLanguage(self):
# Without parameters, makeLanguage creates a language with code
Follow ups