← Back to team overview

launchpad-reviewers team mailing list archive

[Merge] lp:~benji/launchpad/add-edit-tests into lp:launchpad

 

Benji York has proposed merging lp:~benji/launchpad/add-edit-tests into lp:launchpad.

Requested reviews:
  Launchpad code reviewers (launchpad-reviewers)

For more details, see:
https://code.launchpad.net/~benji/launchpad/add-edit-tests/+merge/56786

This branch adds workflow tests for adding bug notification
subscriptions, editing them, and unsubscribing from them.  The intent is
to test the basic workflow in each scenario as well as provide a good
framework for more tests in the future.

The tests can be run by loading
lib/lp/registry/javascript/tests/test_structural_subscription.html in a
browser.

There is not lint reported for the added/changed lines but there is
quite a bit for the pre-existing lines.  Fixes for those are in a
forthcoming branch in order to keep this branch sane.

-- 
https://code.launchpad.net/~benji/launchpad/add-edit-tests/+merge/56786
Your team Launchpad code reviewers is requested to review the proposed merge of lp:~benji/launchpad/add-edit-tests into lp:launchpad.
=== modified file 'configs/development/launchpad-lazr.conf'
--- configs/development/launchpad-lazr.conf	2011-03-23 18:29:09 +0000
+++ configs/development/launchpad-lazr.conf	2011-04-07 15:19:36 +0000
@@ -5,6 +5,13 @@
 [meta]
 extends: ../../lib/canonical/config/schema-lazr.conf
 
+[archivepublisher]
+run_parts_location: none
+
+# XXX JeroenVermeulen 2011-03-29 bug=741683: Retire this item when
+# Dapper supports ends.
+run_commercial_compat: false
+
 [branchscanner]
 oops_prefix: BS
 error_dir: /var/tmp/codehosting.test

=== added file 'cronscripts/publish-ftpmaster.py'
--- cronscripts/publish-ftpmaster.py	1970-01-01 00:00:00 +0000
+++ cronscripts/publish-ftpmaster.py	2011-04-07 15:19:36 +0000
@@ -0,0 +1,17 @@
+#!/usr/bin/python -S
+#
+# Copyright 2011 Canonical Ltd.  This software is licensed under the
+# GNU Affero General Public License version 3 (see the file LICENSE).
+
+"""Master distro publishing script."""
+
+import _pythonpath
+
+from canonical.config import config
+from lp.archivepublisher.scripts.publish_ftpmaster import PublishFTPMaster
+
+
+if __name__ == '__main__':
+    script = PublishFTPMaster(
+        "publish-ftpmaster", dbuser=config.archivepublisher.dbuser)
+    script.lock_and_run()

=== added directory 'cronscripts/publishing/distro-parts'
=== added directory 'cronscripts/publishing/distro-parts/ubuntu'
=== added directory 'cronscripts/publishing/distro-parts/ubuntu/finalize.d'
=== added file 'cronscripts/publishing/distro-parts/ubuntu/finalize.d/10-germinate'
--- cronscripts/publishing/distro-parts/ubuntu/finalize.d/10-germinate	1970-01-01 00:00:00 +0000
+++ cronscripts/publishing/distro-parts/ubuntu/finalize.d/10-germinate	2011-04-07 15:19:36 +0000
@@ -0,0 +1,6 @@
+#!/bin/sh
+
+if [ "$SECURITY_UPLOAD_ONLY" != "yes" ]
+then
+    cron.germinate || /bin/true
+fi

=== added file 'cronscripts/publishing/distro-parts/ubuntu/finalize.d/40-timestamp-trace-file'
--- cronscripts/publishing/distro-parts/ubuntu/finalize.d/40-timestamp-trace-file	1970-01-01 00:00:00 +0000
+++ cronscripts/publishing/distro-parts/ubuntu/finalize.d/40-timestamp-trace-file	2011-04-07 15:19:36 +0000
@@ -0,0 +1,9 @@
+#! /bin/sh
+#
+# Timestamp the trace file, so we can track when the last archive publisher
+# run took place.
+for DIR in $ARCHIVEROOTS
+do
+    mkdir -p -- "$DIR/project/trace"
+    date -u > "$DIR/project/trace/$(hostname --fqdn)"
+done

=== added file 'cronscripts/publishing/distro-parts/ubuntu/finalize.d/90-trigger-mirrors'
--- cronscripts/publishing/distro-parts/ubuntu/finalize.d/90-trigger-mirrors	1970-01-01 00:00:00 +0000
+++ cronscripts/publishing/distro-parts/ubuntu/finalize.d/90-trigger-mirrors	2011-04-07 15:19:36 +0000
@@ -0,0 +1,14 @@
+#!/bin/sh
+
+# Prod the master mirrors to propagate the update.
+
+MASTERMIRRORS="syowa frei wahoo scandium"
+echo "$(date -R): Triggering master mirrors..."
+
+for HOST in $MASTERMIRRORS
+do
+    echo "$(date -R): Triggering $HOST:"
+    ssh archvsync@$HOST
+done
+
+echo "$(date -R): Master mirror triggers completed."

=== added file 'cronscripts/publishing/distro-parts/ubuntu/finalize.d/README.txt'
--- cronscripts/publishing/distro-parts/ubuntu/finalize.d/README.txt	1970-01-01 00:00:00 +0000
+++ cronscripts/publishing/distro-parts/ubuntu/finalize.d/README.txt	2011-04-07 15:19:36 +0000
@@ -0,0 +1,22 @@
+Put scripts here that should be run at completion of the publish-ftpmaster
+script.  They will be executed through the run-parts command, in alphabetical
+order.
+
+The scripts' filenames must consist entirely of ASCII letters (both upper and
+lower case allowed), digits, underscores, and hyphens.  All other files,
+including this text file, are ignored.
+
+Publication happens in two passes: the first, expedited pass processes only
+security updates.  The second pass processes all packages.  The scripts in
+this directory will be run once for each pass, with the variable
+SECURITY_UPLOAD_ONLY set to indicate which pass is in progress; see below.
+
+The following variables will be set for the script:
+
+ARCHIVEROOTS - the list of root directories for the distribution's archives.
+(e.g. "/srv/ubuntu-archive/ubuntu/ /srv/ubuntu-archive/ubuntu-partner/" )
+
+SECURITY_UPLOAD_ONLY - "yes" during the security pass, or "no" otherwise.
+
+The script's PATH will be extended with the Launchpad source tree's
+cronscripts/publishing directory.

=== added directory 'cronscripts/publishing/distro-parts/ubuntu/publish-distro.d'
=== added file 'cronscripts/publishing/distro-parts/ubuntu/publish-distro.d/10-sign-releases'
--- cronscripts/publishing/distro-parts/ubuntu/publish-distro.d/10-sign-releases	1970-01-01 00:00:00 +0000
+++ cronscripts/publishing/distro-parts/ubuntu/publish-distro.d/10-sign-releases	2011-04-07 15:19:36 +0000
@@ -0,0 +1,17 @@
+#!/bin/sh -e
+
+RELEASE_FILES=`find "$DISTSROOT".new -maxdepth 2 -name Release`
+DIST_UPGRADER_TARBALLS=`
+	find "$DISTSROOT".new"/*/*/dist-upgrader* -name "*.tar.gz" || true`
+
+for CANDIDATE in $RELEASE_FILES $DIST_UPGRADER_TARBALLS
+do
+    if [ ! -f "$CANDIDATE.gpg" ] || [ "$CANDIDATE" -nt "$CANDIDATE.gpg" ]
+    then
+        echo "$(date -R): (re-)signing $CANDIDATE"
+        gpg --yes --detach-sign --armor -o "$CANDIDATE.gpg" \
+            --sign "$CANDIDATE"
+    else
+        echo "$(date -R): Not re-signing $CANDIDATE"
+    fi
+done

=== added file 'cronscripts/publishing/distro-parts/ubuntu/publish-distro.d/20-remove-uncompressed-listings'
--- cronscripts/publishing/distro-parts/ubuntu/publish-distro.d/20-remove-uncompressed-listings	1970-01-01 00:00:00 +0000
+++ cronscripts/publishing/distro-parts/ubuntu/publish-distro.d/20-remove-uncompressed-listings	2011-04-07 15:19:36 +0000
@@ -0,0 +1,11 @@
+#!/bin/sh
+#
+# Remove uncompressed Packages and Sources files.
+#
+# The uncompressed versions of these files are very large and could cause
+# problems for our mirrors, so get rid of them.
+#
+# It's safe to do this since the uncompressed MD5 hashes have already been
+# computed for inclusion in the Release files.
+
+find "$DISTSROOT".new \( -name -o -name Sources \) -exec rm -f -- "{}" \;

=== added file 'cronscripts/publishing/distro-parts/ubuntu/publish-distro.d/30-copy-indices'
--- cronscripts/publishing/distro-parts/ubuntu/publish-distro.d/30-copy-indices	1970-01-01 00:00:00 +0000
+++ cronscripts/publishing/distro-parts/ubuntu/publish-distro.d/30-copy-indices	2011-04-07 15:19:36 +0000
@@ -0,0 +1,8 @@
+#!/bin/sh -e
+
+echo "$(date -R): Copying the indices into place."
+
+INDICES="$ARCHIVEROOT/indices"
+
+rm -f -- "$INDICES/override"
+cp -- "$OVERRIDEROOT"/override.* "$INDICES/"

=== added file 'cronscripts/publishing/distro-parts/ubuntu/publish-distro.d/README.txt'
--- cronscripts/publishing/distro-parts/ubuntu/publish-distro.d/README.txt	1970-01-01 00:00:00 +0000
+++ cronscripts/publishing/distro-parts/ubuntu/publish-distro.d/README.txt	2011-04-07 15:19:36 +0000
@@ -0,0 +1,22 @@
+Put scripts here that should be run after publish-ftpmaster executes
+publish-distro.  They will be executed through the run-parts command, in
+alphabetical order.
+
+The scripts' filenames must consist entirely of ASCII letters (both upper and
+lower case allowed), digits, underscores, and hyphens.  All other files,
+including this text file, are ignored.
+
+Scripts in this directory will be run separately for each distro archive,
+possibly twice because publication happens in two passes: an expedited one for
+just the security uploads and then a second, slower pass for all packages.
+
+Some variables will be set before each script is run:
+
+ARCHIVEROOT - the archive's root directory
+(e.g. /srv/launchpad.net/ubuntu-archive/ubuntu/ )
+
+DISTSROOT - the archive's dists root directory
+(e.g. /srv/launchpad.net/ubuntu-archive/ubuntu/dists )
+
+The script's PATH will be extended with the Launchpad source tree's
+cronscripts/publishing directory.

=== modified file 'database/schema/comments.sql'
--- database/schema/comments.sql	2011-03-29 10:17:46 +0000
+++ database/schema/comments.sql	2011-04-07 15:19:36 +0000
@@ -231,6 +231,12 @@
 COMMENT ON COLUMN BugSubscriptionFilterTag.tag IS 'A bug tag.';
 COMMENT ON COLUMN BugSubscriptionFilterTag.include IS 'If True, send only messages for bugs having this tag, else send only messages for bugs which do not have this tag.';
 
+-- BugSubscriptionFilterMute
+COMMENT ON TABLE BugSubscriptionFilterMute IS 'Mutes for subscription filters.';
+COMMENT ON COLUMN BugSubscriptionFilterMute.person IS 'The person that muted their subscription to this filter.';
+COMMENT ON COLUMN BugSubscriptionFilterMute.filter IS 'The subscription filter of this record';
+COMMENT ON COLUMN BugSubscriptionFilterMute.date_created IS 'The date at which this mute was created.';
+
 -- BugTag
 COMMENT ON TABLE BugTag IS 'Attaches simple text tags to a bug.';
 COMMENT ON COLUMN BugTag.bug IS 'The bug the tags is attached to.';

=== added file 'database/schema/patch-2208-58-0.sql'
--- database/schema/patch-2208-58-0.sql	1970-01-01 00:00:00 +0000
+++ database/schema/patch-2208-58-0.sql	2011-04-07 15:19:36 +0000
@@ -0,0 +1,25 @@
+-- Copyright 2011 Canonical Ltd. This software is licensed under the
+-- GNU Affero General Public License version 3 (see the file LICENSE).
+
+SET client_min_messages=ERROR;
+
+-- A table to store subscription mutes in.
+
+CREATE TABLE BugSubscriptionFilterMute (
+    person integer REFERENCES Person(id)
+        ON DELETE CASCADE NOT NULL,
+    filter integer REFERENCES BugSubscriptionFilter(id)
+        ON DELETE CASCADE NOT NULL,
+    date_created timestamp without time zone
+        DEFAULT timezone('UTC'::text, now()) NOT NULL,
+    CONSTRAINT bugsubscriptionfiltermute_pkey PRIMARY KEY (person, filter)
+);
+
+-- We don't need an index on person, as the primary key index can be used
+-- for those lookups. We have an index on just filter, as the bulk of our
+-- lookups will be on filter.
+CREATE INDEX bugsubscriptionfiltermute__filter__idx
+    ON BugSubscriptionFilterMute(filter);
+
+INSERT INTO LaunchpadDatabaseRevision VALUES (2208, 58, 0);
+

=== modified file 'database/schema/security.cfg'
--- database/schema/security.cfg	2011-04-06 15:35:05 +0000
+++ database/schema/security.cfg	2011-04-07 15:19:36 +0000
@@ -1108,6 +1108,7 @@
 public.bugproductinfestation            = SELECT, INSERT, UPDATE
 public.bugsubscription                  = SELECT, INSERT, UPDATE, DELETE
 public.bugsubscriptionfilter            = SELECT, INSERT, UPDATE, DELETE
+public.bugsubscriptionfiltermute        = SELECT, INSERT, UPDATE, DELETE
 public.bugsubscriptionfilterstatus      = SELECT, INSERT, UPDATE, DELETE
 public.bugsubscriptionfilterimportance  = SELECT, INSERT, UPDATE, DELETE
 public.bugsubscriptionfiltertag         = SELECT, INSERT, UPDATE, DELETE
@@ -1531,6 +1532,7 @@
 public.bugnotificationrecipient         = SELECT, INSERT, UPDATE
 public.bugsubscription                  = SELECT, INSERT
 public.bugsubscriptionfilter            = SELECT, INSERT
+public.bugsubscriptionfiltermute        = SELECT, INSERT
 public.bugsubscriptionfilterstatus      = SELECT, INSERT
 public.bugsubscriptionfilterimportance  = SELECT, INSERT
 public.bugsubscriptionfiltertag         = SELECT, INSERT

=== modified file 'lib/canonical/config/schema-lazr.conf'
--- lib/canonical/config/schema-lazr.conf	2011-03-23 18:29:09 +0000
+++ lib/canonical/config/schema-lazr.conf	2011-04-07 15:19:36 +0000
@@ -24,6 +24,21 @@
 # datatype: string
 dbuser: archivepublisher
 
+# Location where the run-parts directories for publish-ftpmaster
+# customization are to be found.  Absolute path, or path relative to the
+# Launchpad source tree, or "none" to skip execution of run-parts.
+#
+# Under this directory, publish-ftpmaster will look for directories
+# <distro>/publish-distro.d and <distro>/finalize.d.
+#
+# datatype: string
+run_parts_location: none
+
+# XXX JeroenVermeulen 2011-03-29 bug=741683: Retire this item when
+# Dapper supports ends.
+# datatype: boolean
+run_commercial_compat: false
+
 # XXX: wgrant 2011-03-22 bug=739992: These three keys are obsolete and
 # should be removed after 11.04 is released.
 root: none

=== modified file 'lib/lp/app/javascript/client.js'
--- lib/lp/app/javascript/client.js	2011-04-06 16:59:07 +0000
+++ lib/lp/app/javascript/client.js	2011-04-07 15:19:36 +0000
@@ -260,6 +260,7 @@
     }
 };
 
+<<<<<<< TREE
 /**
  * Display a list of notifications - error, warning, informational or debug.
  * @param notifications An json encoded array of (level, message) tuples.
@@ -326,6 +327,69 @@
         });
     });
 }
+=======
+/**
+ * Display a list of notifications - error, warning, informational or debug.
+ * @param notifications An json encoded array of (level, message) tuples.
+ */
+function display_notifications(notifications) {
+    if (notifications === undefined)
+        return;
+
+    var notification_info = {
+        'level10': {
+            'notifications': new Array(),
+            'selector': '.debug.message',
+            'css_class': 'debug message'
+        },
+        'level20': {
+            'notifications': new Array(),
+            'selector': '.informational.message',
+            'css_class': 'informational message'
+        },
+        'level30': {
+            'notifications': new Array(),
+            'selector': '.warning.message',
+            'css_class': 'warning message'
+        },
+        'level40': {
+            'notifications': new Array(),
+            'selector': '.error.message',
+            'css_class': 'error message'
+        }
+    };
+
+    // First remove any existing notifications.
+    Y.each(notification_info, function (info) {
+        var nodes = Y.all('div'+info.selector);
+        nodes.each(function(node) {
+            var parent = node.get('parentNode');
+            parent.removeChild(node);
+        });
+    });
+
+    // Now display the new ones.
+    notifications = Y.JSON.parse(notifications);
+    Y.each(notifications, function(notification, key) {
+        var level = notification[0];
+        var message = notification[1];
+        var info = notification_info['level'+level];
+        info.notifications.push(message);
+    });
+
+    // The place where we want to insert the notification divs.
+    var last_message = Y.one('div.context-publication');
+    // A mapping from the div class to notification messages.
+    Y.each(notification_info, function(info) {
+        Y.each(info.notifications, function(notification) {
+            var node = Y.Node.create("<div class='"+info.css_class+"'/>");
+            node.set('innerHTML', notification);
+            last_message.insert(node, 'after');
+            last_message = node;
+        });
+    });
+}
+>>>>>>> MERGE-SOURCE
 
 // The resources that come together to make Launchpad.
 

=== modified file 'lib/lp/app/javascript/tests/test_lp_client.html'
--- lib/lp/app/javascript/tests/test_lp_client.html	2011-04-05 11:47:15 +0000
+++ lib/lp/app/javascript/tests/test_lp_client.html	2011-04-07 15:19:36 +0000
@@ -18,11 +18,17 @@
   <script type="text/javascript" src="test_lp_client.js"></script>
 </head>
 <body class="yui3-skin-sam">
+<<<<<<< TREE
   <div class="context-publication">
     <div id="request-notifications">
     </div>
     <div id="container-of-stuff">
     </div>
+=======
+  <div class="context-publication">
+    <div id="container-of-stuff">
+    </div>
+>>>>>>> MERGE-SOURCE
   </div>
 </body>
 </html>

=== modified file 'lib/lp/app/javascript/tests/test_lp_client.js'
--- lib/lp/app/javascript/tests/test_lp_client.js	2011-04-05 12:10:01 +0000
+++ lib/lp/app/javascript/tests/test_lp_client.js	2011-04-07 15:19:36 +0000
@@ -187,6 +187,7 @@
       }
 }));
 
+<<<<<<< TREE
 function MockHttpResponse () {
     this.responseText = '[]';
     this.responseHeaders = {};
@@ -247,6 +248,55 @@
 
 }));
 
+=======
+function MockHttpResponse () {
+    this.responseText = '[]';
+    this.responseHeaders = {};
+}
+
+MockHttpResponse.prototype = {
+    setResponseHeader: function (header, value) {
+        this.responseHeaders[header] = value;
+    },
+
+    getResponseHeader: function(header) {
+        return this.responseHeaders[header];
+    }
+};
+
+suite.add(new Y.Test.Case({
+    name: "lp.client.notifications",
+
+    setUp: function() {
+        this.client = new Y.lp.client.Launchpad();
+        this.args=[this.client, null, this._on_success, false];
+        this.response = new MockHttpResponse();
+        this.response.setResponseHeader('Content-Type', 'application/json');
+    },
+
+    _on_success: function(entry) {
+    },
+
+    _checkNotificationNode: function(node_class, node_text) {
+        var node = Y.one('div'+node_class);
+        Assert.areEqual(node_text, node.get("innerHTML"));
+    },
+
+    test_display_notifications: function() {
+        var notifications = '[ [10, "A debug"], [20, "An info"], ' +
+            '[30, "A warning"], [40, "An error"] ]';
+        this.response.setResponseHeader(
+                'X-Lazr-Notifications', notifications);
+        Y.lp.client.wrap_resource_on_success(null, this.response, this.args);
+        this._checkNotificationNode('.debug.message', 'A debug');
+        this._checkNotificationNode('.informational.message', 'An info');
+        this._checkNotificationNode('.warning.message', 'A warning');
+        this._checkNotificationNode('.error.message', 'An error');
+    }
+
+}));
+
+>>>>>>> MERGE-SOURCE
 
 // Lock, stock, and two smoking barrels.
 var handle_complete = function(data) {

=== modified file 'lib/lp/archivepublisher/config.py'
--- lib/lp/archivepublisher/config.py	2011-03-14 16:25:18 +0000
+++ lib/lp/archivepublisher/config.py	2011-04-07 15:19:36 +0000
@@ -1,4 +1,4 @@
-# Copyright 2009 Canonical Ltd.  This software is licensed under the
+# Copyright 2009-2011 Canonical Ltd.  This software is licensed under the
 # GNU Affero General Public License version 3 (see the file LICENSE).
 #
 # This is the python package that defines the
@@ -12,7 +12,10 @@
 
 from canonical.config import config
 from lp.archivepublisher.interfaces.publisherconfig import IPublisherConfigSet
-from lp.soyuz.enums import ArchivePurpose
+from lp.soyuz.enums import (
+    ArchivePurpose,
+    archive_suffixes,
+    )
 
 
 APT_FTPARCHIVE_PURPOSES = (ArchivePurpose.PRIMARY, ArchivePurpose.COPY)
@@ -48,10 +51,7 @@
         pubconf.distroroot = db_pubconf.root_dir
         pubconf.archiveroot = os.path.join(
             pubconf.distroroot, archive.distribution.name)
-        if archive.purpose == ArchivePurpose.PARTNER:
-            pubconf.archiveroot += '-partner'
-        elif archive.purpose == ArchivePurpose.DEBUG:
-            pubconf.archiveroot += '-debug'
+        pubconf.archiveroot += archive_suffixes[archive.purpose]
     elif archive.is_copy:
         pubconf.distroroot = db_pubconf.root_dir
         pubconf.archiveroot = os.path.join(

=== added file 'lib/lp/archivepublisher/scripts/publish_ftpmaster.py'
--- lib/lp/archivepublisher/scripts/publish_ftpmaster.py	1970-01-01 00:00:00 +0000
+++ lib/lp/archivepublisher/scripts/publish_ftpmaster.py	2011-04-07 15:19:36 +0000
@@ -0,0 +1,429 @@
+# Copyright 2011 Canonical Ltd.  This software is licensed under the
+# GNU Affero General Public License version 3 (see the file LICENSE).
+
+"""Master distro publishing script."""
+
+__metaclass__ = type
+__all__ = [
+    'PublishFTPMaster',
+    ]
+
+from optparse import OptionParser
+import os
+from zope.component import getUtility
+
+from canonical.config import config
+from lp.archivepublisher.config import getPubConfig
+from lp.registry.interfaces.distribution import IDistributionSet
+from lp.services.scripts.base import (
+    LaunchpadCronScript,
+    LaunchpadScriptFailure,
+    )
+from lp.services.utils import file_exists
+from lp.soyuz.enums import ArchivePurpose
+from lp.soyuz.scripts import publishdistro
+from lp.soyuz.scripts.ftpmaster import LpQueryDistro
+from lp.soyuz.scripts.processaccepted import ProcessAccepted
+
+
+# XXX JeroenVermeulen 2011-03-31 bug=746229: to start publishing debug
+# archives, get rid of this list.
+ARCHIVES_TO_PUBLISH = [
+    ArchivePurpose.PRIMARY,
+    ArchivePurpose.PARTNER,
+    ]
+
+
+def compose_shell_boolean(boolean_value):
+    """Represent a boolean value as "yes" or "no"."""
+    boolean_text = {
+        True: "yes",
+        False: "no",
+    }
+    return boolean_text[boolean_value]
+
+
+def compose_env_string(env):
+    """Turn a dict into a series of shell parameter assignments."""
+    return ' '.join(['='.join(pair) for pair in env.iteritems()])
+
+
+def get_distscopyroot(archive_config):
+    """Return the distscopy root directory for `archive_config`."""
+    return archive_config.archiveroot + "-distscopy"
+
+
+class StoreArgument:
+    """Helper class: receive argument and store it."""
+
+    def __call__(self, argument):
+        self.argument = argument
+
+
+def find_run_parts_dir(distro, parts):
+    """Find the requested run-parts directory, if it exists."""
+    run_parts_location = config.archivepublisher.run_parts_location
+    if not run_parts_location:
+        return
+
+    if run_parts_location.startswith("/"):
+        # Absolute path.
+        base_dir = run_parts_location
+    else:
+        # Relative path.
+        base_dir = os.path.join(config.root, run_parts_location)
+
+    parts_dir = os.path.join(base_dir, distro.name, parts)
+    if file_exists(parts_dir):
+        return parts_dir
+    else:
+        return None
+
+
+class PublishFTPMaster(LaunchpadCronScript):
+    """Publish a distro (update)."""
+
+    # Has the publication been done?  This indicates that the distsroots
+    # have been replaced with newly generated ones.  It has implications
+    # for cleanup.
+    done_pub = False
+
+    def add_my_options(self):
+        self.parser.add_option(
+            '-d', '--distribution', dest='distribution', default=None,
+            help="Distribution to publish.")
+        self.parser.add_option(
+            '-s', '--security-only', dest='security_only',
+            action='store_true', default=False, help="Security upload only.")
+
+    def executeShell(self, command_line, failure=None):
+        """Run `command_line` through a shell.
+
+        This won't just load an external program and run it; the command
+        line goes through the full shell treatment including variable
+        substitutions, output redirections, and so on.
+
+        :param command_line: Shell command.
+        :param failure: Raise `failure` as an exception if the shell
+            command returns a nonzero value.  If omitted, nonzero return
+            values are ignored.
+        """
+        self.logger.debug("Executing: %s" % command_line)
+        retval = os.system(command_line)
+        if retval != 0:
+            self.logger.debug("Command returned %d.", retval)
+            if failure is not None:
+                self.logger.debug("Command failed: %s", failure)
+                raise failure
+
+    def getArchives(self):
+        """Find archives for `self.distribution` that should be published."""
+        # XXX JeroenVermeulen 2011-03-31 bug=746229: to start publishing
+        # debug archives, change this to return
+        # list(self.distribution.all_distro_archives).
+        return [
+            archive
+            for archive in self.distribution.all_distro_archives
+                if archive.purpose in ARCHIVES_TO_PUBLISH]
+
+    def makeConfigs(self):
+        """Set up configuration objects for archives to be published.
+
+        The configs dict maps the archive purposes that are relevant for
+        publishing to the respective archives' configurations.
+        """
+        return dict(
+            (archive.purpose, getPubConfig(archive))
+            for archive in self.archives)
+
+    def cleanUp(self):
+        """Post-publishing cleanup."""
+        self.logger.debug("Cleaning up.")
+        for purpose, archive_config in self.configs.iteritems():
+            self.logger.debug(
+                "Moving %s dists backup to safe keeping for next time.",
+                purpose.title)
+            distscopyroot = get_distscopyroot(archive_config)
+            dists = os.path.join(distscopyroot, "dists")
+            if self.done_pub:
+                replacement_dists = archive_config.distsroot + ".old"
+            else:
+                replacement_dists = archive_config.distsroot + ".new"
+            if file_exists(replacement_dists):
+                self.logger.debug(
+                    "Renaming %s to %s.", replacement_dists, dists)
+                os.rename(replacement_dists, dists)
+
+    def processAccepted(self):
+        """Run the process-accepted script."""
+        self.logger.debug(
+            "Processing the accepted queue into the publishing records...")
+        script = ProcessAccepted(test_args=[self.distribution.name])
+        script.txn = self.txn
+        script.logger = self.logger
+        script.main()
+
+    def getDirtySuites(self):
+        """Return list of suites that have packages pending publication."""
+        self.logger.debug("Querying which suites are pending publication...")
+        query_distro = LpQueryDistro(
+            test_args=['-d', self.distribution.name, "pending_suites"])
+        receiver = StoreArgument()
+        query_distro.runAction(presenter=receiver)
+        return receiver.argument.split()
+
+    def getDirtySecuritySuites(self):
+        """List security suites with pending publications."""
+        suites = self.getDirtySuites()
+        return [suite for suite in suites if suite.endswith('-security')]
+
+    def rsyncNewDists(self, archive_purpose):
+        """Populate dists.new with a copy of distsroot.
+
+        Uses "rsync -aH --delete" so that any obsolete files that may
+        still be in dists.new are cleaned up (bug 58835).
+
+        :param archive_purpose: The (purpose of the) archive to copy.
+        """
+        archive_config = self.configs[archive_purpose]
+        self.executeShell(
+            "rsync -aH --delete '%s/' '%s/dists.new'"
+            % (archive_config.distsroot, archive_config.archiveroot),
+            failure=LaunchpadScriptFailure(
+                "Failed to rsync dists.new for %s." % archive_purpose.title))
+
+    def setUpDirs(self):
+        """Copy the dists tree ready for publishing into.
+
+        We do this so that we don't get an inconsistent dists tree at
+        any point during the publishing cycle (which would cause buildds
+        to explode).
+
+        This is now done through maintaining a persistent backup copy of
+        the dists directory, which we move into place and bring up to
+        date with rsync.  Should achieve the same effect as copying, but
+        faster.
+        """
+        for archive_config in self.configs.itervalues():
+            archiveroot = archive_config.archiveroot
+            if not file_exists(archiveroot):
+                self.logger.debug("Creating archive root %s.", archiveroot)
+                os.makedirs(archiveroot)
+            distsroot = archive_config.distsroot
+            if not file_exists(distsroot):
+                self.logger.debug("Creating dists root %s.", distsroot)
+                os.makedirs(distsroot)
+
+        for purpose, archive_config in self.configs.iteritems():
+            dists = os.path.join(get_distscopyroot(archive_config), "dists")
+            dists_new = os.path.join(archive_config.archiveroot, "dists.new")
+            if not file_exists(dists):
+                os.makedirs(dists)
+            os.rename(dists, dists_new)
+            self.rsyncNewDists(purpose)
+
+    def publishDistroArchive(self, archive, security_suites=None):
+        """Publish the results for an archive.
+
+        :param archive: Archive to publish.
+        :param security_suites: An optional list of suites to restrict
+            the publishing to.
+        """
+        purpose = archive.purpose
+        self.logger.debug(
+            "Publishing the %s %s...", self.distribution.name, purpose.title)
+        archive_config = self.configs[purpose]
+        arguments = [
+            '-v', '-v',
+            '-d', self.distribution.name,
+            '-R', archive_config.distsroot + '.new',
+            ]
+
+        if archive.purpose == ArchivePurpose.PARTNER:
+            arguments.append('--partner')
+
+        if security_suites is not None:
+            arguments += sum([['-s', suite] for suite in security_suites], [])
+
+        parser = OptionParser()
+        publishdistro.add_options(parser)
+        options, args = parser.parse_args(arguments)
+        publishdistro.run_publisher(options, txn=self.txn, log=self.logger)
+
+        self.runPublishDistroParts(archive)
+
+    def runPublishDistroParts(self, archive):
+        """Execute the publish-distro hooks."""
+        archive_config = self.configs[archive.purpose]
+        env = {
+            'DISTSROOT': archive_config.distsroot,
+            'ARCHIVEROOT': archive_config.archiveroot,
+            }
+        self.runParts('publish-distro.d', env)
+
+    def installDists(self):
+        """Put the new dists into place, as near-atomically as possible."""
+        # Before we start moving directories around, make as nearly
+        # sure as possible that we can do either none or all of them.
+        self.logger.debug("Looking for impediments to publication.")
+        for purpose, archive_config in self.configs.iteritems():
+            old_distsroot = archive_config.distsroot + '.old'
+            if file_exists(old_distsroot):
+                raise LaunchpadScriptFailure(
+                    "Old %s distsroot %s is in the way."
+                    % (purpose.title, old_distsroot))
+
+        # Move the existing distsroots out of the way, and move the new
+        # ones in their place.
+        self.logger.debug("Placing the new dists into place...")
+        for archive_config in self.configs.itervalues():
+            distsroot = archive_config.distsroot
+            os.rename(distsroot, distsroot + ".old")
+            os.rename(distsroot + ".new", distsroot)
+
+        # Yay, we did it!  Mark the fact because it makes a difference
+        # to the cleanup procedure.
+        self.done_pub = True
+
+        for purpose, archive_config in self.configs.iteritems():
+            dists = os.path.join(get_distscopyroot(archive_config), "dists")
+            self.logger.debug(
+                "Renaming old %s distsroot to %s." % (purpose.title, dists))
+            os.rename(archive_config.distsroot + ".old", dists)
+
+    def runCommercialCompat(self):
+        """Generate the -commercial pocket.
+
+        This is done for backwards compatibility with dapper, edgy, and
+        feisty releases.  Failure here is not fatal.
+        """
+        # XXX JeroenVermeulen 2011-03-24 bug=741683: Retire
+        # commercial-compat.sh (and this method) as soon as Dapper
+        # support ends.
+        if self.distribution.name != 'ubuntu':
+            return
+        if not config.archivepublisher.run_commercial_compat:
+            return
+
+        self.executeShell("""
+            env PATH="$PATH:%s/cronscripts/publishing" \
+                LPCONFIG="%s" \
+                commercial-compat.sh
+            """ % (config.root, config.instance_name))
+
+    def generateListings(self):
+        """Create ls-lR.gz listings."""
+        self.logger.debug("Creating ls-lR.gz...")
+        lslr = "ls-lR.gz"
+        lslr_new = "." + lslr + ".new"
+        for purpose, archive_config in self.configs.iteritems():
+            lslr_file = os.path.join(archive_config.archiveroot, lslr)
+            new_lslr_file = os.path.join(archive_config.archiveroot, lslr_new)
+            if file_exists(new_lslr_file):
+                os.remove(new_lslr_file)
+            self.executeShell(
+                "cd -- '%s' ; TZ=UTC ls -lR | gzip -9n >'%s'"
+                % (archive_config.archiveroot, lslr_new),
+                failure=LaunchpadScriptFailure(
+                    "Failed to create %s for %s." % (lslr, purpose.title)))
+            os.rename(new_lslr_file, lslr_file)
+
+    def clearEmptyDirs(self):
+        """Clear out any redundant empty directories."""
+        for archive_config in self.configs.itervalues():
+            self.executeShell(
+                "find '%s' -type d -empty | xargs -r rmdir"
+                % archive_config.archiveroot)
+
+    def processOptions(self):
+        """Handle command-line options.
+
+        Sets `self.distribution` to the `Distribution` to publish.
+        """
+        if self.options.distribution is None:
+            raise LaunchpadScriptFailure("Specify a distribution.")
+
+        self.distribution = getUtility(IDistributionSet).getByName(
+            self.options.distribution)
+        if self.distribution is None:
+            raise LaunchpadScriptFailure(
+                "Distribution %s not found." % self.options.distribution)
+
+    def runParts(self, parts, env):
+        """Execute run-parts.
+
+        :param parts: The run-parts directory to execute:
+            "publish-distro.d" or "finalize.d".
+        :param env: A dict of environment variables to pass to the
+            scripts in the run-parts directory.
+        """
+        parts_dir = find_run_parts_dir(self.distribution, parts)
+        if parts_dir is None:
+            self.logger.debug("Skipping run-parts %s: not configured.", parts)
+            return
+        total_env_string = ' '.join([
+            "PATH=\"$PATH:%s/cronscripts/publishing\"" % config.root,
+            compose_env_string(env),
+            ])
+        self.executeShell(
+            "env %s run-parts -- '%s'" % (total_env_string, parts_dir),
+            failure=LaunchpadScriptFailure(
+                "Failure while executing run-parts %s." % parts_dir))
+
+    def runFinalizeParts(self, security_only=False):
+        """Run the finalize.d parts to finalize publication."""
+        env = {
+            'SECURITY_UPLOAD_ONLY': compose_shell_boolean(security_only),
+            'ARCHIVEROOTS': ' '.join([
+                archive_config.archiveroot
+                for archive_config in self.configs.itervalues()]),
+        }
+        self.runParts('finalize.d', env)
+
+    def publishSecurityUploads(self):
+        """Quickly process just the pending security uploads."""
+        self.logger.debug("Expediting security uploads.")
+        security_suites = self.getDirtySecuritySuites()
+        if len(security_suites) == 0:
+            self.logger.debug("Nothing to do for security publisher.")
+            return
+        partner_archive = self.distribution.getArchive("partner")
+        if partner_archive is not None:
+            self.publishDistroArchive(partner_archive)
+        self.publishDistroArchive(
+            self.distribution.main_archive, security_suites=security_suites)
+        self.installDists()
+        self.runCommercialCompat()
+        self.runFinalizeParts(security_only=True)
+
+    def publishAllUploads(self):
+        """Publish the distro's complete uploads."""
+        self.logger.debug("Full publication.  This may take some time.")
+        for archive in self.archives:
+            # This, for the main archive, is where the script spends
+            # most of its time.
+            self.publishDistroArchive(archive)
+
+        self.installDists()
+        self.runCommercialCompat()
+        self.generateListings()
+        self.clearEmptyDirs()
+        self.runFinalizeParts()
+
+    def setUp(self):
+        """Process options, and set up internal state."""
+        self.processOptions()
+        self.archives = self.getArchives()
+        self.configs = self.makeConfigs()
+
+    def main(self):
+        """See `LaunchpadScript`."""
+        self.setUp()
+        try:
+            self.processAccepted()
+            self.setUpDirs()
+            self.publishSecurityUploads()
+            if not self.options.security_only:
+                self.publishAllUploads()
+        finally:
+            self.cleanUp()

=== added file 'lib/lp/archivepublisher/tests/test_publish_ftpmaster.py'
--- lib/lp/archivepublisher/tests/test_publish_ftpmaster.py	1970-01-01 00:00:00 +0000
+++ lib/lp/archivepublisher/tests/test_publish_ftpmaster.py	2011-04-07 15:19:36 +0000
@@ -0,0 +1,699 @@
+# Copyright 2011 Canonical Ltd.  This software is licensed under the
+# GNU Affero General Public License version 3 (see the file LICENSE).
+
+"""Test publish-ftpmaster cron script."""
+
+__metaclass__ = type
+
+from apt_pkg import TagFile
+import os
+from textwrap import dedent
+from zope.component import getUtility
+
+from canonical.config import config
+from canonical.launchpad.interfaces.launchpad import ILaunchpadCelebrities
+from canonical.testing.layers import (
+    LaunchpadZopelessLayer,
+    ZopelessDatabaseLayer,
+    )
+from lp.archivepublisher.interfaces.publisherconfig import IPublisherConfigSet
+from lp.registry.interfaces.pocket import (
+    PackagePublishingPocket,
+    pocketsuffix,
+    )
+from lp.services.log.logger import DevNullLogger
+from lp.services.scripts.base import LaunchpadScriptFailure
+from lp.services.utils import file_exists
+from lp.soyuz.enums import (
+    ArchivePurpose,
+    PackagePublishingStatus,
+    )
+from lp.archivepublisher.scripts.publish_ftpmaster import (
+    compose_env_string,
+    compose_shell_boolean,
+    find_run_parts_dir,
+    PublishFTPMaster,
+    )
+from lp.soyuz.tests.test_publishing import SoyuzTestPublisher
+from lp.testing import (
+    run_script,
+    TestCase,
+    TestCaseWithFactory,
+    )
+from lp.testing.fakemethod import FakeMethod
+
+
+def path_exists(*path_components):
+    """Does the given file or directory exist?"""
+    return file_exists(os.path.join(*path_components))
+
+
+def name_spph_suite(spph):
+    """Return name of `spph`'s suite."""
+    return spph.distroseries.name + pocketsuffix[spph.pocket]
+
+
+def get_pub_config(distro):
+    """Find the publishing config for `distro`."""
+    return getUtility(IPublisherConfigSet).getByDistribution(distro)
+
+
+def get_archive_root(pub_config):
+    """Return the archive root for the given publishing config."""
+    return os.path.join(pub_config.root_dir, pub_config.distribution.name)
+
+
+def get_dists_root(pub_config):
+    """Return the dists root directory for the given publishing config."""
+    return os.path.join(get_archive_root(pub_config), "dists")
+
+
+def get_distscopy_root(pub_config):
+    """Return the "distscopy" root for the given publishing config."""
+    return get_archive_root(pub_config) + "-distscopy"
+
+
+def get_run_parts_path():
+    """Get relative path to run-parts location the Launchpad source."""
+    return os.path.join("cronscripts", "publishing", "distro-parts")
+
+
+class HelpersMixin:
+    """Helpers for the PublishFTPMaster tests."""
+
+    def enableRunParts(self, parts_directory=None):
+        """Set up for run-parts execution.
+
+        :param parts_directory: Base location for the run-parts
+            directories.  If omitted, the run-parts directory from the
+            Launchpad source tree will be used.
+        """
+        if parts_directory is None:
+            parts_directory = get_run_parts_path()
+
+        config.push("run-parts", dedent("""\
+            [archivepublisher]
+            run_parts_location: %s
+            """ % parts_directory))
+
+        self.addCleanup(config.pop, "run-parts")
+
+
+class TestPublishFTPMasterHelpers(TestCase):
+
+    def test_compose_env_string_iterates_env(self):
+        env = {
+            "A": "1",
+            "B": "2",
+        }
+        env_string = compose_env_string(env)
+        self.assertIn(env_string, ["A=1 B=2", "B=2 A=1"])
+
+    def test_compose_shell_boolean_shows_True_as_yes(self):
+        self.assertEqual("yes", compose_shell_boolean(True))
+
+    def test_compose_shell_boolean_shows_False_as_no(self):
+        self.assertEqual("no", compose_shell_boolean(False))
+
+
+class TestFindRunPartsDir(TestCaseWithFactory, HelpersMixin):
+    layer = ZopelessDatabaseLayer
+
+    def test_find_run_parts_dir_finds_relative_runparts_directory(self):
+        self.enableRunParts()
+        ubuntu = getUtility(ILaunchpadCelebrities).ubuntu
+        self.assertEqual(
+            os.path.join(
+                config.root, get_run_parts_path(), "ubuntu", "finalize.d"),
+            find_run_parts_dir(ubuntu, "finalize.d"))
+
+    def test_find_run_parts_dir_finds_absolute_runparts_directory(self):
+        self.enableRunParts(os.path.join(config.root, get_run_parts_path()))
+        ubuntu = getUtility(ILaunchpadCelebrities).ubuntu
+        self.assertEqual(
+            os.path.join(
+                config.root, get_run_parts_path(), "ubuntu", "finalize.d"),
+                find_run_parts_dir(ubuntu, "finalize.d"))
+
+    def test_find_run_parts_dir_ignores_blank_config(self):
+        self.enableRunParts("")
+        ubuntu = getUtility(ILaunchpadCelebrities).ubuntu
+        self.assertIs(None, find_run_parts_dir(ubuntu, "finalize.d"))
+
+    def test_find_run_parts_dir_ignores_none_config(self):
+        self.enableRunParts("none")
+        ubuntu = getUtility(ILaunchpadCelebrities).ubuntu
+        self.assertIs(None, find_run_parts_dir(ubuntu, "finalize.d"))
+
+    def test_find_run_parts_dir_ignores_nonexistent_directory(self):
+        self.enableRunParts()
+        distro = self.factory.makeDistribution()
+        self.assertIs(None, find_run_parts_dir(distro, "finalize.d"))
+
+
+class TestPublishFTPMasterScript(TestCaseWithFactory, HelpersMixin):
+    layer = LaunchpadZopelessLayer
+
+    # Location of shell script.
+    SCRIPT_PATH = "cronscripts/publish-ftpmaster.py"
+
+    def setUpForScriptRun(self, distro):
+        """Mock up config to run the script on `distro`."""
+        pub_config = getUtility(IPublisherConfigSet).getByDistribution(distro)
+        pub_config.root_dir = unicode(
+            self.makeTemporaryDirectory())
+
+    def makeDistro(self):
+        """Create a `Distribution` for testing.
+
+        The distribution will have a publishing directory set up, which
+        will be cleaned up after the test.
+        """
+        return self.factory.makeDistribution(
+            publish_root_dir=unicode(self.makeTemporaryDirectory()))
+
+    def prepareUbuntu(self):
+        """Obtain a reference to Ubuntu, set up for testing.
+
+        A temporary publishing directory will be set up, and it will be
+        cleaned up after the test.
+        """
+        ubuntu = getUtility(ILaunchpadCelebrities).ubuntu
+        self.setUpForScriptRun(ubuntu)
+        return ubuntu
+
+    def makeScript(self, distro=None):
+        """Produce instance of the `PublishFTPMaster` script."""
+        if distro is None:
+            distro = self.makeDistro()
+        script = PublishFTPMaster(test_args=["-d", distro.name])
+        script.txn = self.layer.txn
+        script.logger = DevNullLogger()
+        return script
+
+    def readReleaseFile(self, filename):
+        """Read a Release file, return as a keyword/value dict."""
+        sections = list(TagFile(file(filename)))
+        self.assertEqual(1, len(sections))
+        return dict(sections[0])
+
+    def writeMarkerFile(self, path, contents):
+        """Write a marker file for checking direction movements.
+
+        :param path: A list of path components.
+        :param contents: Text to write into the file.
+        """
+        marker = file(os.path.join(*path), "w")
+        marker.write(contents)
+        marker.flush()
+        marker.close()
+
+    def readMarkerFile(self, path):
+        """Read the contents of a marker file.
+
+        :param return: Contents of the marker file.
+        """
+        return file(os.path.join(*path)).read()
+
+    def enableCommercialCompat(self):
+        """Enable commercial-compat.sh runs for the duration of the test."""
+        config.push("commercial-compat", dedent("""\
+            [archivepublisher]
+            run_commercial_compat: true
+            """))
+        self.addCleanup(config.pop, "commercial-compat")
+
+    def test_script_runs_successfully(self):
+        ubuntu = self.prepareUbuntu()
+        self.layer.txn.commit()
+        stdout, stderr, retval = run_script(
+            self.SCRIPT_PATH + " -d ubuntu")
+        self.assertEqual(0, retval, "Script failure:\n" + stderr)
+
+    def test_script_is_happy_with_no_publications(self):
+        distro = self.makeDistro()
+        self.makeScript(distro).main()
+
+    def test_produces_listings(self):
+        distro = self.makeDistro()
+        self.makeScript(distro).main()
+        self.assertTrue(
+            path_exists(get_archive_root(get_pub_config(distro)), 'ls-lR.gz'))
+
+    def test_can_run_twice(self):
+        test_publisher = SoyuzTestPublisher()
+        distroseries = test_publisher.setUpDefaultDistroSeries()
+        distro = distroseries.distribution
+        pub_config = get_pub_config(distro)
+        self.factory.makeComponentSelection(
+            distroseries=distroseries, component="main")
+        self.factory.makeArchive(
+            distribution=distro, purpose=ArchivePurpose.PARTNER)
+        test_publisher.getPubSource()
+
+        self.setUpForScriptRun(distro)
+        self.makeScript(distro).main()
+        self.makeScript(distro).main()
+
+    def test_publishes_package(self):
+        test_publisher = SoyuzTestPublisher()
+        distroseries = test_publisher.setUpDefaultDistroSeries()
+        distro = distroseries.distribution
+        pub_config = get_pub_config(distro)
+        self.factory.makeComponentSelection(
+            distroseries=distroseries, component="main")
+        self.factory.makeArchive(
+            distribution=distro, purpose=ArchivePurpose.PARTNER)
+        test_publisher.getPubSource()
+
+        self.setUpForScriptRun(distro)
+        self.makeScript(distro).main()
+
+        archive_root = get_archive_root(pub_config)
+        dists_root = get_dists_root(pub_config)
+
+        dsc = os.path.join(
+            archive_root, 'pool', 'main', 'f', 'foo', 'foo_666.dsc')
+        self.assertEqual("I do not care about sources.", file(dsc).read())
+        overrides = os.path.join(
+            archive_root + '-overrides', distroseries.name + '_main_source')
+        self.assertEqual(dsc, file(overrides).read().rstrip())
+        self.assertTrue(path_exists(
+            dists_root, distroseries.name, 'main', 'source', 'Sources.gz'))
+        self.assertTrue(path_exists(
+            dists_root, distroseries.name, 'main', 'source', 'Sources.bz2'))
+
+        distcopyseries = os.path.join(dists_root, distroseries.name)
+        release = self.readReleaseFile(
+            os.path.join(distcopyseries, "Release"))
+        self.assertEqual(distro.displayname, release['Origin'])
+        self.assertEqual(distro.displayname, release['Label'])
+        self.assertEqual(distroseries.name, release['Suite'])
+        self.assertEqual(distroseries.name, release['Codename'])
+        self.assertEqual("main", release['Components'])
+        self.assertEqual("", release["Architectures"])
+        self.assertIn("Date", release)
+        self.assertIn("Description", release)
+        self.assertNotEqual("", release["MD5Sum"])
+        self.assertNotEqual("", release["SHA1"])
+        self.assertNotEqual("", release["SHA256"])
+
+        main_release = self.readReleaseFile(
+            os.path.join(distcopyseries, 'main', 'source', "Release"))
+        self.assertEqual(distroseries.name, main_release["Archive"])
+        self.assertEqual("main", main_release["Component"])
+        self.assertEqual(distro.displayname, main_release["Origin"])
+        self.assertEqual(distro.displayname, main_release["Label"])
+        self.assertEqual("source", main_release["Architecture"])
+
+    def test_cleanup_moves_dists_to_new_if_not_published(self):
+        distro = self.makeDistro()
+        pub_config = get_pub_config(distro)
+        dists_root = get_dists_root(pub_config)
+        dists_copy_root = get_distscopy_root(pub_config)
+        new_distsroot = dists_root + ".new"
+        os.makedirs(new_distsroot)
+        self.writeMarkerFile([new_distsroot, "marker"], "dists.new")
+        os.makedirs(dists_copy_root)
+
+        script = self.makeScript(distro)
+        script.setUp()
+        script.cleanUp()
+        self.assertEqual(
+            "dists.new",
+            self.readMarkerFile([dists_copy_root, "dists", "marker"]))
+
+    def test_cleanup_moves_dists_to_old_if_published(self):
+        distro = self.makeDistro()
+        pub_config = get_pub_config(distro)
+        dists_root = get_dists_root(pub_config)
+        old_distsroot = dists_root + ".old"
+        dists_copy_root = get_distscopy_root(pub_config)
+        os.makedirs(old_distsroot)
+        self.writeMarkerFile([old_distsroot, "marker"], "dists.old")
+        os.makedirs(dists_copy_root)
+
+        script = self.makeScript(distro)
+        script.setUp()
+        script.done_pub = True
+        script.cleanUp()
+        self.assertEqual(
+            "dists.old",
+            self.readMarkerFile([dists_copy_root, "dists", "marker"]))
+
+    def test_getDirtySuites_returns_suite_with_pending_publication(self):
+        spph = self.factory.makeSourcePackagePublishingHistory()
+        script = self.makeScript(spph.distroseries.distribution)
+        script.setUp()
+        self.assertEqual([name_spph_suite(spph)], script.getDirtySuites())
+
+    def test_getDirtySuites_returns_suites_with_pending_publications(self):
+        distro = self.makeDistro()
+        spphs = [
+            self.factory.makeSourcePackagePublishingHistory(
+                distroseries=self.factory.makeDistroSeries(
+                    distribution=distro))
+            for counter in xrange(2)]
+
+        script = self.makeScript(distro)
+        script.setUp()
+        self.assertContentEqual(
+            [name_spph_suite(spph) for spph in spphs],
+            script.getDirtySuites())
+
+    def test_getDirtySuites_ignores_suites_without_pending_publications(self):
+        spph = self.factory.makeSourcePackagePublishingHistory(
+            status=PackagePublishingStatus.PUBLISHED)
+        script = self.makeScript(spph.distroseries.distribution)
+        script.setUp()
+        self.assertEqual([], script.getDirtySuites())
+
+    def test_getDirtySecuritySuites_returns_security_suites(self):
+        distro = self.makeDistro()
+        spphs = [
+            self.factory.makeSourcePackagePublishingHistory(
+                distroseries=self.factory.makeDistroSeries(
+                    distribution=distro),
+                pocket=PackagePublishingPocket.SECURITY)
+            for counter in xrange(2)]
+
+        script = self.makeScript(distro)
+        script.setUp()
+        self.assertContentEqual(
+            [name_spph_suite(spph) for spph in spphs],
+            script.getDirtySecuritySuites())
+
+    def test_getDirtySecuritySuites_ignores_non_security_suites(self):
+        distroseries = self.factory.makeDistroSeries()
+        spphs = [
+            self.factory.makeSourcePackagePublishingHistory(
+                distroseries=distroseries, pocket=pocket)
+            for pocket in [
+                PackagePublishingPocket.RELEASE,
+                PackagePublishingPocket.UPDATES,
+                PackagePublishingPocket.PROPOSED,
+                PackagePublishingPocket.BACKPORTS,
+                ]]
+        script = self.makeScript(distroseries.distribution)
+        script.setUp()
+        self.assertEqual([], script.getDirtySecuritySuites())
+
+    def test_rsync_copies_files(self):
+        distro = self.makeDistro()
+        script = self.makeScript(distro)
+        script.setUp()
+        dists_root = get_dists_root(get_pub_config(distro))
+        os.makedirs(dists_root)
+        os.makedirs(dists_root + ".new")
+        self.writeMarkerFile([dists_root, "new-file"], "New file")
+        script.rsyncNewDists(ArchivePurpose.PRIMARY)
+        self.assertEqual(
+            "New file",
+            self.readMarkerFile([dists_root + ".new", "new-file"]))
+
+    def test_rsync_cleans_up_obsolete_files(self):
+        distro = self.makeDistro()
+        script = self.makeScript(distro)
+        script.setUp()
+        dists_root = get_dists_root(get_pub_config(distro))
+        os.makedirs(dists_root)
+        os.makedirs(dists_root + ".new")
+        old_file = [dists_root + ".new", "old-file"]
+        self.writeMarkerFile(old_file, "old-file")
+        script.rsyncNewDists(ArchivePurpose.PRIMARY)
+        self.assertFalse(path_exists(*old_file))
+
+    def test_setUpDirs_creates_directory_structure(self):
+        distro = self.makeDistro()
+        pub_config = get_pub_config(distro)
+        archive_root = get_archive_root(pub_config)
+        dists_root = get_dists_root(pub_config)
+        script = self.makeScript(distro)
+        script.setUp()
+
+        self.assertFalse(file_exists(archive_root))
+
+        script.setUpDirs()
+
+        self.assertTrue(file_exists(archive_root))
+        self.assertTrue(file_exists(dists_root))
+        self.assertTrue(file_exists(dists_root + ".new"))
+
+    def test_setUpDirs_does_not_mind_if_directories_already_exist(self):
+        distro = self.makeDistro()
+        script = self.makeScript(distro)
+        script.setUp()
+        script.setUpDirs()
+        script.setUpDirs()
+        self.assertTrue(file_exists(get_archive_root(get_pub_config(distro))))
+
+    def test_setUpDirs_moves_dists_to_dists_new(self):
+        distro = self.makeDistro()
+        dists_root = get_dists_root(get_pub_config(distro))
+        script = self.makeScript(distro)
+        script.setUp()
+        script.setUpDirs()
+        self.writeMarkerFile([dists_root, "marker"], "X")
+        script.setUpDirs()
+        self.assertEqual(
+            "X", self.readMarkerFile([dists_root + ".new", "marker"]))
+
+    def test_publishDistroArchive_runs_parts(self):
+        distro = self.makeDistro()
+        script = self.makeScript(distro)
+        script.setUp()
+        script.setUpDirs()
+        script.runParts = FakeMethod()
+        script.publishDistroArchive(distro.main_archive)
+        self.assertEqual(1, script.runParts.call_count)
+        args, kwargs = script.runParts.calls[0]
+        parts_dir, env = args
+        self.assertEqual("publish-distro.d", parts_dir)
+
+    def test_runPublishDistroParts_passes_parameters(self):
+        distro = self.makeDistro()
+        script = self.makeScript(distro)
+        script.setUp()
+        script.setUpDirs()
+        script.runParts = FakeMethod()
+        script.runPublishDistroParts(distro.main_archive)
+        args, kwargs = script.runParts.calls[0]
+        parts_dir, env = args
+        required_parameters = set(["DISTSROOT", "ARCHIVEROOT"])
+        missing_parameters = set(env.keys()).difference(required_parameters)
+        self.assertEqual(set(), missing_parameters)
+
+    def test_installDists_sets_done_pub(self):
+        script = self.makeScript()
+        script.setUp()
+        script.setUpDirs()
+        self.assertFalse(script.done_pub)
+        script.installDists()
+        self.assertTrue(script.done_pub)
+
+    def test_installDists_replaces_distsroot(self):
+        distro = self.makeDistro()
+        script = self.makeScript(distro)
+        script.setUp()
+        script.setUpDirs()
+        pub_config = get_pub_config(distro)
+        dists_root = get_dists_root(pub_config)
+
+        self.writeMarkerFile([dists_root, "marker"], "old")
+        self.writeMarkerFile([dists_root + ".new", "marker"], "new")
+
+        script.installDists()
+
+        self.assertEqual("new", self.readMarkerFile([dists_root, "marker"]))
+        self.assertEqual("old", self.readMarkerFile(
+            [get_distscopy_root(pub_config), "dists", "marker"]))
+
+    def test_runCommercialCompat_runs_commercial_compat_script(self):
+        # XXX JeroenVermeulen 2011-03-29 bug=741683: Retire
+        # runCommercialCompat as soon as Dapper support ends.
+        self.enableCommercialCompat()
+        script = self.makeScript(self.prepareUbuntu())
+        script.setUp()
+        script.executeShell = FakeMethod()
+        script.runCommercialCompat()
+        self.assertEqual(1, script.executeShell.call_count)
+        args, kwargs = script.executeShell.calls[0]
+        command_line, = args
+        self.assertIn("commercial-compat.sh", command_line)
+
+    def test_runCommercialCompat_runs_only_for_ubuntu(self):
+        # XXX JeroenVermeulen 2011-03-29 bug=741683: Retire
+        # runCommercialCompat as soon as Dapper support ends.
+        self.enableCommercialCompat()
+        script = self.makeScript(self.makeDistro())
+        script.setUp()
+        script.executeShell = FakeMethod()
+        script.runCommercialCompat()
+        self.assertEqual(0, script.executeShell.call_count)
+
+    def test_runCommercialCompat_runs_only_if_configured(self):
+        # XXX JeroenVermeulen 2011-03-29 bug=741683: Retire
+        # runCommercialCompat as soon as Dapper support ends.
+        script = self.makeScript(self.prepareUbuntu())
+        script.setUp()
+        script.executeShell = FakeMethod()
+        script.runCommercialCompat()
+        self.assertEqual(0, script.executeShell.call_count)
+
+    def test_generateListings_writes_ls_lR_gz(self):
+        distro = self.makeDistro()
+        script = self.makeScript(distro)
+        script.setUp()
+        script.setUpDirs()
+        script.generateListings()
+        pass
+
+    def test_clearEmptyDirs_cleans_up_empty_directories(self):
+        distro = self.makeDistro()
+        script = self.makeScript(distro)
+        script.setUp()
+        script.setUpDirs()
+        empty_dir = os.path.join(
+            get_dists_root(get_pub_config(distro)), 'empty-dir')
+        os.makedirs(empty_dir)
+        script.clearEmptyDirs()
+        self.assertFalse(file_exists(empty_dir))
+
+    def test_clearEmptyDirs_does_not_clean_up_nonempty_directories(self):
+        distro = self.makeDistro()
+        script = self.makeScript(distro)
+        script.setUp()
+        script.setUpDirs()
+        nonempty_dir = os.path.join(
+            get_dists_root(get_pub_config(distro)), 'nonempty-dir')
+        os.makedirs(nonempty_dir)
+        self.writeMarkerFile([nonempty_dir, "placeholder"], "Data here!")
+        script.clearEmptyDirs()
+        self.assertTrue(file_exists(nonempty_dir))
+
+    def test_processOptions_finds_distribution(self):
+        distro = self.makeDistro()
+        script = self.makeScript(distro)
+        script.processOptions()
+        self.assertEqual(distro.name, script.options.distribution)
+        self.assertEqual(distro, script.distribution)
+
+    def test_processOptions_complains_about_unknown_distribution(self):
+        script = self.makeScript()
+        script.options.distribution = self.factory.getUniqueString()
+        self.assertRaises(LaunchpadScriptFailure, script.processOptions)
+
+    def test_runParts_runs_parts(self):
+        self.enableRunParts()
+        script = self.makeScript(self.prepareUbuntu())
+        script.setUp()
+        script.executeShell = FakeMethod()
+        script.runParts("finalize.d", {})
+        self.assertEqual(1, script.executeShell.call_count)
+        args, kwargs = script.executeShell.calls[-1]
+        command_line, = args
+        self.assertIn("run-parts", command_line)
+        self.assertIn(
+            "cronscripts/publishing/distro-parts/ubuntu/finalize.d",
+            command_line)
+
+    def test_runParts_passes_parameters(self):
+        self.enableRunParts()
+        script = self.makeScript(self.prepareUbuntu())
+        script.setUp()
+        script.executeShell = FakeMethod()
+        key = self.factory.getUniqueString()
+        value = self.factory.getUniqueString()
+        script.runParts("finalize.d", {key: value})
+        args, kwargs = script.executeShell.calls[-1]
+        command_line, = args
+        self.assertIn("%s=%s" % (key, value), command_line)
+
+    def test_executeShell_executes_shell_command(self):
+        distro = self.makeDistro()
+        script = self.makeScript(distro)
+        marker = os.path.join(
+            get_pub_config(distro).root_dir, "marker")
+        script.executeShell("touch %s" % marker)
+        self.assertTrue(file_exists(marker))
+
+    def test_executeShell_reports_failure_if_requested(self):
+        distro = self.makeDistro()
+        script = self.makeScript(distro)
+
+        class ArbitraryFailure(Exception):
+            """Some exception that's not likely to come from elsewhere."""
+
+        self.assertRaises(
+            ArbitraryFailure,
+            script.executeShell, "/bin/false", failure=ArbitraryFailure())
+
+    def test_executeShell_does_not_report_failure_if_not_requested(self):
+        distro = self.makeDistro()
+        script = self.makeScript(distro)
+        # The test is that this does not fail:
+        script.executeShell("/bin/false")
+
+    def test_runFinalizeParts_passes_parameters(self):
+        script = self.makeScript(self.prepareUbuntu())
+        script.setUp()
+        script.runParts = FakeMethod()
+        script.runFinalizeParts()
+        args, kwargs = script.runParts.calls[0]
+        parts_dir, env = args
+        required_parameters = set(["ARCHIVEROOTS", "SECURITY_UPLOAD_ONLY"])
+        missing_parameters = set(env.keys()).difference(required_parameters)
+        self.assertEqual(set(), missing_parameters)
+
+    def test_publishSecurityUploads_skips_pub_if_no_security_updates(self):
+        script = self.makeScript(self.makeDistro())
+        script.setUp()
+        script.setUpDirs()
+        script.installDists = FakeMethod()
+        script.publishSecurityUploads()
+        self.assertEqual(0, script.installDists.call_count)
+
+    def test_publishSecurityUploads_runs_finalize_parts(self):
+        distro = self.makeDistro()
+        self.factory.makeSourcePackagePublishingHistory(
+            distroseries=self.factory.makeDistroSeries(distribution=distro),
+            pocket=PackagePublishingPocket.SECURITY)
+        script = self.makeScript(distro)
+        script.setUp()
+        script.setUpDirs()
+        script.runFinalizeParts = FakeMethod()
+        script.publishSecurityUploads()
+        self.assertEqual(1, script.runFinalizeParts.call_count)
+        args, kwargs = script.runFinalizeParts.calls[0]
+        self.assertTrue(kwargs["security_only"])
+
+    def test_publishAllUploads_publishes_all_distro_archives(self):
+        distro = self.makeDistro()
+        distroseries = self.factory.makeDistroSeries(distribution=distro)
+        partner_archive = self.factory.makeArchive(
+            distribution=distro, purpose=ArchivePurpose.PARTNER)
+        for archive in distro.all_distro_archives:
+            self.factory.makeSourcePackagePublishingHistory(
+                distroseries=distroseries,
+                archive=archive)
+        script = self.makeScript(distro)
+        script.setUp()
+        script.setUpDirs()
+        script.publishDistroArchive = FakeMethod()
+        script.publishAllUploads()
+        published_archives = [
+            args[0] for args, kwargs in script.publishDistroArchive.calls]
+
+        self.assertContentEqual(
+            distro.all_distro_archives, published_archives)
+        self.assertIn(distro.main_archive, published_archives)
+        self.assertIn(partner_archive, published_archives)
+
+    def test_publishAllUploads_runs_finalize_parts(self):
+        distro = self.makeDistro()
+        script = self.makeScript(distro)
+        script.setUp()
+        script.setUpDirs()
+        script.runFinalizeParts = FakeMethod()
+        script.publishAllUploads()
+        self.assertEqual(1, script.runFinalizeParts.call_count)

=== modified file 'lib/lp/archivepublisher/tests/test_publisher.py'
--- lib/lp/archivepublisher/tests/test_publisher.py	2011-03-29 05:07:23 +0000
+++ lib/lp/archivepublisher/tests/test_publisher.py	2011-04-07 15:19:36 +0000
@@ -883,8 +883,7 @@
         self.assertEqual(
             self._getReleaseFileOrigin(release_contents), 'LP-PPA-cprov')
 
-        # XXX cprov 20090427: we should write a Release file parsing for
-        # making tests less cryptic.
+        # XXX cprov 2009-04-27 bug=440014: Use a generic parser.
         release_contents = release_contents.splitlines()
         md5_header = 'MD5Sum:'
         self.assertTrue(md5_header in release_contents)

=== modified file 'lib/lp/bugs/browser/structuralsubscription.py'
--- lib/lp/bugs/browser/structuralsubscription.py	2011-04-06 15:58:38 +0000
+++ lib/lp/bugs/browser/structuralsubscription.py	2011-04-07 15:19:36 +0000
@@ -430,10 +430,15 @@
     IJSONRequestCache(request).objects['administratedTeams'] = info
 
 
+<<<<<<< TREE
 def expose_user_subscriptions_to_js(user, subscriptions, request, target=None):
+=======
+def expose_user_subscriptions_to_js(user, subscriptions, request):
+>>>>>>> MERGE-SOURCE
     """Make the user's subscriptions available to JavaScript."""
     info = {}
     api_request = IWebServiceClientRequest(request)
+<<<<<<< TREE
     if user is None:
         administered_teams = []
     else:
@@ -451,6 +456,12 @@
             # We export nothing if the target implements no canonical URL.
             pass
 
+=======
+    if user is None:
+        administered_teams = []
+    else:
+        administered_teams = user.getAdministratedTeams()
+>>>>>>> MERGE-SOURCE
     for subscription in subscriptions:
         target = subscription.target
         record = info.get(target)
@@ -472,7 +483,9 @@
                     subscriber, rootsite='mainsite'),
                 subscriber_title=subscriber.title,
                 subscriber_is_team=is_team,
-                user_is_team_admin=user_is_team_admin,))
+                user_is_team_admin=user_is_team_admin,
+                can_mute=filter.isMuteAllowed(user),
+                is_muted=filter.muted(user) is not None))
     info = info.values()
     info.sort(key=lambda item: item['target_url'])
     IJSONRequestCache(request).objects['subscription_info'] = info

=== modified file 'lib/lp/bugs/browser/tests/test_expose.py'
--- lib/lp/bugs/browser/tests/test_expose.py	2011-04-06 15:03:54 +0000
+++ lib/lp/bugs/browser/tests/test_expose.py	2011-04-07 15:19:36 +0000
@@ -251,8 +251,10 @@
         self.assertEqual(len(target_info['filters']), 1) # One filter.
         filter_info = target_info['filters'][0]
         self.assertEqual(filter_info['filter'], sub.bug_filters[0])
-        self.failUnless(filter_info['subscriber_is_team'])
-        self.failUnless(filter_info['user_is_team_admin'])
+        self.assertTrue(filter_info['subscriber_is_team'])
+        self.assertTrue(filter_info['user_is_team_admin'])
+        self.assertTrue(filter_info['can_mute'])
+        self.assertFalse(filter_info['is_muted'])
         self.assertEqual(filter_info['subscriber_title'], team.title)
         self.assertEqual(
             filter_info['subscriber_link'],
@@ -273,8 +275,10 @@
         expose_user_subscriptions_to_js(user, [sub], request)
         info = IJSONRequestCache(request).objects['subscription_info']
         filter_info = info[0]['filters'][0]
-        self.failUnless(filter_info['subscriber_is_team'])
-        self.failIf(filter_info['user_is_team_admin'])
+        self.assertTrue(filter_info['subscriber_is_team'])
+        self.assertFalse(filter_info['user_is_team_admin'])
+        self.assertTrue(filter_info['can_mute'])
+        self.assertFalse(filter_info['is_muted'])
         self.assertEqual(filter_info['subscriber_title'], team.title)
         self.assertEqual(
             filter_info['subscriber_link'],
@@ -283,6 +287,21 @@
             filter_info['subscriber_url'],
             canonical_url(team, rootsite='mainsite'))
 
+    def test_muted_team_member_subscription(self):
+        # Show that a muted team subscription is correctly represented.
+        user = self.factory.makePerson()
+        target = self.factory.makeProduct()
+        request = LaunchpadTestRequest()
+        team = self.factory.makeTeam(members=[user])
+        with person_logged_in(team.teamowner):
+            sub = target.addBugSubscription(team, team.teamowner)
+        sub.bug_filters.one().mute(user)
+        expose_user_subscriptions_to_js(user, [sub], request)
+        info = IJSONRequestCache(request).objects['subscription_info']
+        filter_info = info[0]['filters'][0]
+        self.assertTrue(filter_info['can_mute'])
+        self.assertTrue(filter_info['is_muted'])
+
     def test_self_subscription(self):
         # Make a subscription directly for the user and see what we record.
         user = self.factory.makePerson()
@@ -293,8 +312,10 @@
         expose_user_subscriptions_to_js(user, [sub], request)
         info = IJSONRequestCache(request).objects['subscription_info']
         filter_info = info[0]['filters'][0]
-        self.failIf(filter_info['subscriber_is_team'])
+        self.assertFalse(filter_info['subscriber_is_team'])
         self.assertEqual(filter_info['subscriber_title'], user.title)
+        self.assertFalse(filter_info['can_mute'])
+        self.assertFalse(filter_info['is_muted'])
         self.assertEqual(
             filter_info['subscriber_link'],
             absoluteURL(user, IWebServiceClientRequest(request)))

=== modified file 'lib/lp/bugs/configure.zcml'
--- lib/lp/bugs/configure.zcml	2011-03-30 13:33:35 +0000
+++ lib/lp/bugs/configure.zcml	2011-04-07 15:19:36 +0000
@@ -636,12 +636,25 @@
         class=".model.bugsubscriptionfilter.BugSubscriptionFilter">
       <allow
           interface=".interfaces.bugsubscriptionfilter.IBugSubscriptionFilterAttributes"/>
+      <allow
+          interface=".interfaces.bugsubscriptionfilter.IBugSubscriptionFilterMethodsPublic"/>
       <require
           permission="launchpad.Edit"
-          interface=".interfaces.bugsubscriptionfilter.IBugSubscriptionFilterMethods"
+          interface=".interfaces.bugsubscriptionfilter.IBugSubscriptionFilterMethodsProtected"
           set_schema=".interfaces.bugsubscriptionfilter.IBugSubscriptionFilterAttributes" />
     </class>
 
+    <!-- BugSubscriptionFilterMute -->
+    <class
+        class=".model.bugsubscriptionfilter.BugSubscriptionFilterMute">
+      <allow
+          interface=".interfaces.bugsubscriptionfilter.IBugSubscriptionFilterMute"/>
+      <require
+          permission="launchpad.Edit"
+          set_schema=".interfaces.bugsubscriptionfilter.IBugSubscriptionFilterMute" />
+    </class>
+
+
     <!-- BugSubscriptionInfo -->
 
     <class class=".model.bug.BugSubscriptionInfo">

=== modified file 'lib/lp/bugs/interfaces/bugsubscriptionfilter.py'
--- lib/lp/bugs/interfaces/bugsubscriptionfilter.py	2011-03-23 15:55:44 +0000
+++ lib/lp/bugs/interfaces/bugsubscriptionfilter.py	2011-04-07 15:19:36 +0000
@@ -6,19 +6,25 @@
 __metaclass__ = type
 __all__ = [
     "IBugSubscriptionFilter",
+    "IBugSubscriptionFilterMute",
     ]
 
-
 from lazr.restful.declarations import (
+    call_with,
     export_as_webservice_entry,
     export_destructor_operation,
+    export_read_operation,
+    export_write_operation,
     exported,
+    operation_for_version,
+    REQUEST_USER,
     )
 from lazr.restful.fields import Reference
 from zope.interface import Interface
 from zope.schema import (
     Bool,
     Choice,
+    Datetime,
     FrozenSet,
     Int,
     Text,
@@ -33,7 +39,10 @@
 from lp.bugs.interfaces.structuralsubscription import (
     IStructuralSubscription,
     )
-from lp.services.fields import SearchTag
+from lp.services.fields import (
+    PersonChoice,
+    SearchTag,
+    )
 
 
 class IBugSubscriptionFilterAttributes(Interface):
@@ -94,8 +103,37 @@
             value_type=SearchTag()))
 
 
-class IBugSubscriptionFilterMethods(Interface):
-    """Methods of `IBugSubscriptionFilter`."""
+class IBugSubscriptionFilterMethodsPublic(Interface):
+    """Methods on `IBugSubscriptionFilter` that can be called by anyone."""
+
+    @call_with(person=REQUEST_USER)
+    @export_read_operation()
+    @operation_for_version('devel')
+    def isMuteAllowed(person):
+        """Return True if this filter can be muted for `person`."""
+
+    @call_with(person=REQUEST_USER)
+    @export_read_operation()
+    @operation_for_version('devel')
+    def muted(person):
+        """Return date muted if this filter was muted for `person`, or None.
+        """
+
+    @call_with(person=REQUEST_USER)
+    @export_write_operation()
+    @operation_for_version('devel')
+    def mute(person):
+        """Add a mute for `person` to this filter."""
+
+    @call_with(person=REQUEST_USER)
+    @export_write_operation()
+    @operation_for_version('devel')
+    def unmute(person):
+        """Remove any mute for `person` to this filter."""
+
+
+class IBugSubscriptionFilterMethodsProtected(Interface):
+    """Methods of `IBugSubscriptionFilter` that require launchpad.Edit."""
 
     @export_destructor_operation()
     def delete():
@@ -106,6 +144,22 @@
 
 
 class IBugSubscriptionFilter(
-    IBugSubscriptionFilterAttributes, IBugSubscriptionFilterMethods):
+    IBugSubscriptionFilterAttributes, IBugSubscriptionFilterMethodsProtected,
+    IBugSubscriptionFilterMethodsPublic):
     """A bug subscription filter."""
     export_as_webservice_entry()
+
+
+class IBugSubscriptionFilterMute(Interface):
+    """A mute on an IBugSubscriptionFilter."""
+
+    person = PersonChoice(
+        title=_('Person'), required=True, vocabulary='ValidPersonOrTeam',
+        readonly=True, description=_("The person subscribed."))
+    filter = Reference(
+        IBugSubscriptionFilter, title=_("Subscription filter"),
+        required=True, readonly=True,
+        description=_("The subscription filter to be muted."))
+    date_created = Datetime(
+        title=_("The date on which the mute was created."), required=False,
+        readonly=True)

=== modified file 'lib/lp/bugs/model/bugsubscriptionfilter.py'
--- lib/lp/bugs/model/bugsubscriptionfilter.py	2011-03-30 15:20:46 +0000
+++ lib/lp/bugs/model/bugsubscriptionfilter.py	2011-04-07 15:19:36 +0000
@@ -4,12 +4,18 @@
 # pylint: disable-msg=E0611,W0212
 
 __metaclass__ = type
-__all__ = ['BugSubscriptionFilter']
+__all__ = [
+    'BugSubscriptionFilter',
+    'BugSubscriptionFilterMute',
+    ]
+
+import pytz
 
 from itertools import chain
 
 from storm.locals import (
     Bool,
+    DateTime,
     Int,
     Reference,
     SQL,
@@ -18,12 +24,16 @@
     )
 from zope.interface import implements
 
+from canonical.database.constants import UTC_NOW
 from canonical.database.enumcol import DBEnum
 from canonical.database.sqlbase import sqlvalues
 from canonical.launchpad import searchbuilder
 from canonical.launchpad.interfaces.lpstorm import IStore
 from lp.bugs.enum import BugNotificationLevel
-from lp.bugs.interfaces.bugsubscriptionfilter import IBugSubscriptionFilter
+from lp.bugs.interfaces.bugsubscriptionfilter import (
+    IBugSubscriptionFilter,
+    IBugSubscriptionFilterMute,
+    )
 from lp.bugs.interfaces.bugtask import (
     BugTaskImportance,
     BugTaskStatus,
@@ -35,9 +45,14 @@
     BugSubscriptionFilterStatus,
     )
 from lp.bugs.model.bugsubscriptionfiltertag import BugSubscriptionFilterTag
+from lp.registry.interfaces.person import validate_person
 from lp.services.database.stormbase import StormBase
 
 
+class MuteNotAllowed(Exception):
+    """Raised when someone tries to mute a filter that can't be muted."""
+
+
 class BugSubscriptionFilter(StormBase):
     """A filter to specialize a *structural* subscription."""
 
@@ -237,3 +252,64 @@
             # There are no other filters.  We can delete the parent
             # subscription.
             self.structural_subscription.delete()
+
+    def isMuteAllowed(self, person):
+        """See `IBugSubscriptionFilter`."""
+        return (
+            self.structural_subscription.subscriber.isTeam() and
+            person.inTeam(self.structural_subscription.subscriber))
+
+    def muted(self, person):
+        store = Store.of(self)
+        existing_mutes = store.find(
+            BugSubscriptionFilterMute,
+            BugSubscriptionFilterMute.filter_id == self.id,
+            BugSubscriptionFilterMute.person_id == person.id)
+        if not existing_mutes.is_empty():
+            return existing_mutes.one().date_created
+
+    def mute(self, person):
+        """See `IBugSubscriptionFilter`."""
+        if not self.isMuteAllowed(person):
+            raise MuteNotAllowed(
+                "This subscription cannot be muted for %s" % person.name)
+
+        store = Store.of(self)
+        existing_mutes = store.find(
+            BugSubscriptionFilterMute,
+            BugSubscriptionFilterMute.filter_id == self.id,
+            BugSubscriptionFilterMute.person_id == person.id)
+        if existing_mutes.is_empty():
+            mute = BugSubscriptionFilterMute()
+            mute.person = person
+            mute.filter = self.id
+            store.add(mute)
+
+    def unmute(self, person):
+        """See `IBugSubscriptionFilter`."""
+        store = Store.of(self)
+        existing_mutes = store.find(
+            BugSubscriptionFilterMute,
+            BugSubscriptionFilterMute.filter_id == self.id,
+            BugSubscriptionFilterMute.person_id == person.id)
+        existing_mutes.remove()
+
+
+class BugSubscriptionFilterMute(StormBase):
+    """A filter to specialize a *structural* subscription."""
+
+    implements(IBugSubscriptionFilterMute)
+
+    __storm_table__ = "BugSubscriptionFilterMute"
+
+    person_id = Int("person", allow_none=False, validator=validate_person)
+    person = Reference(person_id, "Person.id")
+
+    filter_id = Int("filter", allow_none=False)
+    filter = Reference(filter_id, "StructuralSubscription.id")
+
+    __storm_primary__ = 'person_id', 'filter_id'
+
+    date_created = DateTime(
+        "date_created", allow_none=False, default=UTC_NOW,
+        tzinfo=pytz.UTC)

=== modified file 'lib/lp/bugs/tests/test_structuralsubscription.py'
--- lib/lp/bugs/tests/test_structuralsubscription.py	2011-03-21 18:23:31 +0000
+++ lib/lp/bugs/tests/test_structuralsubscription.py	2011-04-07 15:19:36 +0000
@@ -23,7 +23,11 @@
     BugTaskStatus,
     )
 from lp.bugs.mail.bugnotificationrecipients import BugNotificationRecipients
-from lp.bugs.model.bugsubscriptionfilter import BugSubscriptionFilter
+from lp.bugs.model.bugsubscriptionfilter import (
+    BugSubscriptionFilter,
+    BugSubscriptionFilterMute,
+    MuteNotAllowed,
+    )
 from lp.bugs.model.structuralsubscription import (
     get_structural_subscriptions_for_bug,
     get_structural_subscribers,
@@ -680,3 +684,113 @@
             [], list(
                 get_structural_subscribers(
                     bug, None, BugNotificationLevel.COMMENTS, None)))
+
+
+class TestBugSubscriptionFilterMute(TestCaseWithFactory):
+    """Tests for the BugSubscriptionFilterMute class."""
+
+    layer = DatabaseFunctionalLayer
+
+    def setUp(self):
+        super(TestBugSubscriptionFilterMute, self).setUp()
+        self.target = self.factory.makeProduct()
+        self.team = self.factory.makeTeam()
+        self.team_member = self.factory.makePerson()
+        with person_logged_in(self.team.teamowner):
+            self.team.addMember(self.team_member, self.team.teamowner)
+            self.team_subscription = self.target.addBugSubscription(
+                self.team, self.team.teamowner)
+            self.filter = self.team_subscription.bug_filters.one()
+
+    def test_isMuteAllowed_returns_true_for_team_subscriptions(self):
+        # BugSubscriptionFilter.isMuteAllowed() will return True for
+        # subscriptions where the owner of the subscription is a team.
+        self.assertTrue(self.filter.isMuteAllowed(self.team_member))
+
+    def test_isMuteAllowed_returns_false_for_non_team_subscriptions(self):
+        # BugSubscriptionFilter.isMuteAllowed() will return False for
+        # subscriptions where the owner of the subscription is not a team.
+        person = self.factory.makePerson()
+        with person_logged_in(person):
+            non_team_subscription = self.target.addBugSubscription(
+                person, person)
+        filter = non_team_subscription.bug_filters.one()
+        self.assertFalse(filter.isMuteAllowed(person))
+
+    def test_isMuteAllowed_returns_false_for_non_team_members(self):
+        # BugSubscriptionFilter.isMuteAllowed() will return False if the
+        # user passed to it is not a member of the subscribing team.
+        non_team_person = self.factory.makePerson()
+        self.assertFalse(self.filter.isMuteAllowed(non_team_person))
+
+    def test_mute_adds_mute(self):
+        # BugSubscriptionFilter.mute() adds a mute for the filter.
+        filter_id = self.filter.id
+        person_id = self.team_member.id
+        store = Store.of(self.filter)
+        mutes = store.find(
+            BugSubscriptionFilterMute,
+            BugSubscriptionFilterMute.filter == filter_id,
+            BugSubscriptionFilterMute.person == person_id)
+        self.assertTrue(mutes.is_empty())
+        self.assertFalse(self.filter.muted(self.team_member))
+        self.filter.mute(self.team_member)
+        self.assertTrue(self.filter.muted(self.team_member))
+        store.flush()
+        self.assertFalse(mutes.is_empty())
+
+    def test_unmute_removes_mute(self):
+        # BugSubscriptionFilter.unmute() removes any mute for a given
+        # person on that filter.
+        filter_id = self.filter.id
+        person_id = self.team_member.id
+        store = Store.of(self.filter)
+        self.filter.mute(self.team_member)
+        store.flush()
+        mutes = store.find(
+            BugSubscriptionFilterMute,
+            BugSubscriptionFilterMute.filter == filter_id,
+            BugSubscriptionFilterMute.person == person_id)
+        self.assertFalse(mutes.is_empty())
+        self.assertTrue(self.filter.muted(self.team_member))
+        self.filter.unmute(self.team_member)
+        self.assertFalse(self.filter.muted(self.team_member))
+        store.flush()
+        self.assertTrue(mutes.is_empty())
+
+    def test_mute_is_idempotent(self):
+        # Muting works even if the user is already muted.
+        store = Store.of(self.filter)
+        mute = self.filter.mute(self.team_member)
+        store.flush()
+        second_mute = self.filter.mute(self.team_member)
+        self.assertEqual(mute, second_mute)
+
+    def test_unmute_is_idempotent(self):
+        # Unmuting works even if the user is not muted
+        store = Store.of(self.filter)
+        mutes = store.find(
+            BugSubscriptionFilterMute,
+            BugSubscriptionFilterMute.filter == self.filter.id,
+            BugSubscriptionFilterMute.person == self.team_member.id)
+        self.assertTrue(mutes.is_empty())
+        self.filter.unmute(self.team_member)
+        self.assertTrue(mutes.is_empty())
+
+    def test_mute_raises_error_for_non_team_subscriptions(self):
+        # BugSubscriptionFilter.mute() will raise an error if called on
+        # a non-team subscription.
+        person = self.factory.makePerson()
+        with person_logged_in(person):
+            non_team_subscription = self.target.addBugSubscription(
+                person, person)
+        filter = non_team_subscription.bug_filters.one()
+        self.assertFalse(filter.isMuteAllowed(person))
+        self.assertRaises(MuteNotAllowed, filter.mute, person)
+
+    def test_mute_raises_error_for_non_team_members(self):
+        # BugSubscriptionFilter.mute() will raise an error if called on
+        # a subscription of which the calling person is not a member.
+        non_team_person = self.factory.makePerson()
+        self.assertFalse(self.filter.isMuteAllowed(non_team_person))
+        self.assertRaises(MuteNotAllowed, self.filter.mute, non_team_person)

=== modified file 'lib/lp/registry/browser/configure.zcml'
--- lib/lp/registry/browser/configure.zcml	2011-04-06 22:51:53 +0000
+++ lib/lp/registry/browser/configure.zcml	2011-04-07 15:19:36 +0000
@@ -1976,8 +1976,13 @@
         for="lp.registry.interfaces.distribution.IDerivativeDistribution"
         class="lp.registry.browser.distroseries.DistroSeriesAddView"
         facet="overview"
+<<<<<<< TREE
         permission="launchpad.Moderate"
         template="../templates/distroseries-add.pt">
+=======
+        permission="launchpad.Append"
+        template="../templates/distroseries-add.pt">
+>>>>>>> MERGE-SOURCE
     </browser:page>
     <browser:page
         name="+initseries"

=== modified file 'lib/lp/registry/browser/distroseries.py'
=== modified file 'lib/lp/registry/browser/distroseriesdifference.py'
--- lib/lp/registry/browser/distroseriesdifference.py	2011-04-06 07:34:18 +0000
+++ lib/lp/registry/browser/distroseriesdifference.py	2011-04-07 15:19:36 +0000
@@ -10,6 +10,7 @@
     ]
 
 from lazr.restful.interfaces import IWebServiceClientRequest
+from storm.zope.interfaces import IResultSet
 from z3c.ptcompat import ViewPageTemplateFile
 from zope.app.form.browser.itemswidgets import RadioWidget
 from zope.component import (
@@ -27,17 +28,23 @@
     )
 
 from canonical.launchpad.webapp import (
+    canonical_url,
     LaunchpadView,
     Navigation,
     stepthrough,
     )
 from canonical.launchpad.webapp.authorization import check_permission
 from canonical.launchpad.webapp.launchpadform import custom_widget
+<<<<<<< TREE
 from lp.app.browser.launchpadform import LaunchpadFormView
 from lp.registry.enum import (
     DistroSeriesDifferenceStatus,
     DistroSeriesDifferenceType,
     )
+=======
+from lp.app.browser.launchpadform import LaunchpadFormView
+from lp.registry.enum import DistroSeriesDifferenceStatus
+>>>>>>> MERGE-SOURCE
 from lp.registry.interfaces.distroseriesdifference import (
     IDistroSeriesDifference,
     )
@@ -52,6 +59,10 @@
     IComment,
     IConversation,
     )
+from lp.soyuz.enums import PackagePublishingStatus
+from lp.soyuz.model.distroseriessourcepackagerelease import (
+    DistroSeriesSourcePackageRelease,
+    )
 
 
 class DistroSeriesDifferenceNavigation(Navigation):
@@ -68,6 +79,7 @@
             IDistroSeriesDifferenceCommentSource).getForDifference(
                 self.context, id)
 
+<<<<<<< TREE
     @property
     def parent_packagesets_names(self):
         """Return the formatted list of packagesets for the related
@@ -91,6 +103,37 @@
         else:
             return None
 
+=======
+    @property
+    def parent_source_package_url(self):
+        return self._package_url(
+            self.context.derived_series.parent_series,
+            self.context.parent_source_version)
+
+    @property
+    def source_package_url(self):
+        return self._package_url(
+            self.context.derived_series,
+            self.context.source_version)
+
+    def _package_url(self, distro_series, version):
+        pubs = distro_series.main_archive.getPublishedSources(
+            name=self.context.source_package_name.name,
+            version=version,
+            status=PackagePublishingStatus.PUBLISHED,
+            distroseries=distro_series,
+            exact_match=True)
+
+        # There is only one or zero published package.
+        pub = IResultSet(pubs).one()
+        if pub is None:
+            return None
+        else:
+            return canonical_url(
+                DistroSeriesSourcePackageRelease(
+                    distro_series, pub.sourcepackagerelease))
+
+>>>>>>> MERGE-SOURCE
 
 class IDistroSeriesDifferenceForm(Interface):
     """An interface used in the browser only for displaying form elements."""

=== modified file 'lib/lp/registry/browser/tests/distroseries-views.txt'
=== modified file 'lib/lp/registry/browser/tests/test_series_views.py'
=== modified file 'lib/lp/registry/javascript/distroseriesdifferences_details.js'
=== modified file 'lib/lp/registry/javascript/structural-subscription.js'
--- lib/lp/registry/javascript/structural-subscription.js	2011-04-07 02:14:43 +0000
+++ lib/lp/registry/javascript/structural-subscription.js	2011-04-07 15:19:36 +0000
@@ -208,6 +208,7 @@
  * @method make_add_subscription_handler
  * @param {Object} success_callback Function to execute on successful addition.
  */
+<<<<<<< TREE
 function make_add_subscription_handler(success_callback) {
     var save_subscription = function(form_data) {
         var who;
@@ -225,6 +226,22 @@
         return add_bug_filter(who, form_data, success_callback);
     };
     return save_subscription;
+=======
+
+function save_subscription(form_data) {
+    var who;
+    var has_errors = check_for_errors_in_overlay(add_subscription_overlay);
+    if (has_errors) {
+        return false;
+    }
+    if (form_data.recipient[0] === 'user') {
+        who = LP.links.me;
+    } else {
+        // There can be only one.
+        who = form_data.team[0];
+    }
+    add_bug_filter(who, form_data);
+>>>>>>> MERGE-SOURCE
 }
 namespace._make_add_subscription_handler = make_add_subscription_handler;
 
@@ -252,6 +269,7 @@
     } else {
         return false;
     }
+<<<<<<< TREE
 }
 
 /**
@@ -265,21 +283,39 @@
         .empty()
         .appendChild(render_filter_title(filter_info, filter));
 };
+=======
+}
+>>>>>>> MERGE-SOURCE
 
 /**
  * Handle the activation of the edit subscription link.
  */
 function edit_subscription_handler(context, form_data) {
     var has_errors = check_for_errors_in_overlay(add_subscription_overlay);
+<<<<<<< TREE
     var filter_id = '#filter-description-'+context.filter_id.toString();
     if (has_errors) {
+=======
+    var filter_node = Y.one(
+        '#subscription-filter-'+context.filter_id.toString());
+    if (has_errors) {
+>>>>>>> MERGE-SOURCE
         return false;
     }
     var on = {success: function (new_data) {
         var description_node = Y.one(filter_id);
         var filter = new_data.getAttrs();
+<<<<<<< TREE
         fill_filter_description(
             description_node, context.filter_info, filter);
+=======
+        filter_node.one('.filter-description')
+            .empty()
+            .appendChild(create_filter_description(filter));
+        filter_node.one('.filter-name')
+            .empty()
+            .appendChild(render_filter_title(context.filter_info, filter));
+>>>>>>> MERGE-SOURCE
         add_subscription_overlay.hide();
     }};
     patch_bug_filter(context.filter_info.filter, form_data, on);
@@ -660,6 +696,7 @@
  */
 function setup_overlay(content_box_id, hide_recipient_picker) {
     var content_node = Y.one(content_box_id);
+<<<<<<< TREE
     if (!Y.Lang.isValue(content_node)) {
         Y.error("Node not found: " + content_box_id);
     }
@@ -736,6 +773,81 @@
     add_recipient_picker(content_node, hide_recipient_picker);
 
     var accordion = create_accordion('#accordion-overlay', content_node);
+=======
+    var container = Y.Node.create(
+        '<div id="overlay-container"><dl>' +
+        '    <dt>Bug mail recipient</dt>' +
+        '    <dd id="bug-mail-recipient">' +
+        '    </dd>' +
+        '  <dt>Subscription name</dt>' +
+        '  <dd>' +
+        '    <input type="text" name="name">' +
+        '    <a target="help" class="sprite maybe"' +
+        '          href="/+help/structural-subscription-name.html">&nbsp;' +
+        '      <span class="invisible-link">Structural subscription' +
+        '        description help</span></a> ' +
+        '  </dd>' +
+        '  <dt>Receive mail for bugs affecting' +
+        '    <span id="structural-subscription-context-title"></span> '+
+        '    that</dt>' +
+        '  <dd>' +
+        '    <div id="events">' +
+        '      <input type="radio" name="events"' +
+        '          value="added-or-closed"' +
+        '          id="added-or-closed" checked>' +
+        '      <label for="added-or-closed">are added or ' +
+        '        closed</label>' +
+        '      <br>' +
+        '      <input type="radio" name="events"' +
+        '          value="added-or-changed"' +
+        '          id="added-or-changed">' +
+        '      <label for="added-or-changed">are added or changed in' +
+        '        any way' +
+        '        <em id="added-or-changed-more">(more options...)</em>' +
+        '      </label>' +
+        '    </div>' +
+        '    <div id="filter-wrapper" class="ss-collapsible">' +
+        '    <dl style="margin-left:25px;">' +
+        '      <dt></dt>' +
+        '      <dd>' +
+        '        <input type="checkbox" name="filters"' +
+        '            value="filter-comments"' +
+        '            id="filter-comments">' +
+        '        <label for="filter-comments">Don\'t send mail about' +
+        '          comments</label><br>' +
+        '        <input type="checkbox" name="filters"' +
+        '            value="advanced-filter"' +
+        '            id="advanced-filter">' +
+        '        <label for="advanced-filter">Bugs must match this' +
+        '          filter <em id="advanced-filter-more">(...)</em>' +
+        '        </label><br>' +
+        '        <div id="accordion-wrapper" ' +
+        '            class="ss-collapsible">' +
+        '            <dl>' +
+        '                <dt></dt>' +
+        '                <dd style="margin-left:25px;">' +
+        '                    <div id="accordion-overlay"' +
+        '                        style="position:relative; '+
+                                        'overflow:hidden;"></div>' +
+        '                </dd>' +
+        '            </dl>' +
+        '        </div> ' +
+        '      </dd>' +
+        '    </dl>' +
+        '    </div> ' +
+        '  </dd>' +
+        '  <dt></dt>' +
+        '</dl></div>');
+
+    // Assemble some nodes and set the title.
+    content_node
+        .appendChild(container)
+            .one('#structural-subscription-context-title')
+                .set('text', LP.cache.context.title);
+    add_recipient_picker(content_node, hide_recipient_picker);
+
+    var accordion = create_accordion('#accordion-overlay', content_node);
+>>>>>>> MERGE-SOURCE
 
     // Set up click handlers for the events radio buttons.
     var radio_group = Y.all('#events input');
@@ -901,6 +1013,7 @@
 }
 
 /**
+<<<<<<< TREE
  * Sets the recipient label according to the filter on the overlay.
  * Overlay must not have a recipient picker, but a simple recipient label.
  */
@@ -1035,6 +1148,147 @@
 }
 
 /**
+=======
+ * Sets the recipient label according to the filter on the overlay.
+ * Overlay must not have a recipient picker, but a simple recipient label.
+ */
+function set_recipient_label(content_node, filter_info) {
+    var recipient_label = content_node.one('input[name="recipient"] + span'),
+        teams = LP.cache.administratedTeams;
+    if (filter_info !== undefined && filter_info.subscriber_is_team) {
+        var i;
+        for (i=0; i<teams.length; i++) {
+            if (teams[i].link === filter_info.subscriber_link){
+                recipient_label.set('text', teams[i].title);
+                break;
+            }
+        }
+    } else {
+        recipient_label.set('text', 'Yourself');
+    }
+}
+
+/**
+ * Sets filter statuses and importances on the overlay based on the filter
+ * data.
+ */
+function set_filter_statuses_and_importances(content_node, filter) {
+    var is_lifecycle = filter.bug_notification_level==='Lifecycle',
+        statuses = filter.statuses,
+        importances = filter.importances;
+    if (is_lifecycle) {
+        statuses = LP.cache.statuses;
+        importances = LP.cache.importances;
+    } else {
+        // An absence of values is equivalent to all values.
+        if (statuses.length === 0) {
+            statuses = LP.cache.statuses;
+        }
+        if (importances.length === 0) {
+            importances = LP.cache.importances;
+        }
+    }
+    set_checkboxes(content_node, LP.cache.statuses, statuses);
+    set_checkboxes(
+        content_node, LP.cache.importances, importances);
+}
+
+/**
+ * Sets filter tags and tag matching options in the overlay based on the
+ * filter data.
+ */
+function set_filter_tags(content_node, filter) {
+    var is_lifecycle = filter.bug_notification_level==='Lifecycle';
+    content_node.one('[name="tags"]').set(
+        'value', is_lifecycle ? '' : filter.tags.join(' '));
+    set_radio_buttons(
+        content_node, [MATCH_ALL, MATCH_ANY],
+        filter.find_all_tags ? MATCH_ALL : MATCH_ANY);
+}
+
+/**
+ * Sets filter notification level radio/check boxes in the overlay
+ * according to the filter data.
+ */
+function set_filter_notification_options(content_node, filter) {
+    var is_lifecycle = filter.bug_notification_level==='Lifecycle',
+        has_advanced_filters = !is_lifecycle && (
+            filter.statuses.length ||
+                filter.importances.length ||
+                filter.tags.length) > 0,
+        filters = has_advanced_filters ? [ADVANCED_FILTER] : [],
+        event = ADDED_OR_CHANGED;
+    // Chattiness: Lifecycle < Details < Discussion.
+    switch (filter.bug_notification_level) {
+        case 'Lifecycle':
+            event = ADDED_OR_CLOSED;
+            filters = [];
+            break;
+        case 'Details':
+            filters.push(FILTER_COMMENTS);
+            break;
+    }
+    // 'Discussion' case is the default and handled by the declared
+    // values in the code.
+    set_radio_buttons(
+        content_node, [ADDED_OR_CLOSED, ADDED_OR_CHANGED], event);
+    set_checkboxes(
+        content_node, [FILTER_COMMENTS, ADVANCED_FILTER], filters);
+    handle_change(ADDED_OR_CHANGED, FILTER_WRAPPER, {duration: 0});
+    handle_change(ADVANCED_FILTER, ACCORDION_WRAPPER, {duration: 0});
+}
+
+/**
+ * Loads all data from the filter into the overlay for editing.
+ */
+function load_overlay_with_filter_data(content_node, filter_info) {
+    var filter = filter_info.filter;
+    set_recipient_label(content_node, filter_info);
+    content_node.one('[name="name"]').set('value',filter.description);
+    set_filter_statuses_and_importances(content_node, filter);
+    set_filter_tags(content_node, filter);
+    set_filter_notification_options(content_node, filter);
+}
+
+/**
+ * Show an overlay for editing a subscription.
+ */
+function show_edit_overlay(config, subscription, filter_info, filter_id) {
+    Y.one(config.content_box).empty();
+    var content_node = Y.one(config.content_box),
+        overlay_id = setup_overlay(config.content_box, true),
+        submit_button = Y.Node.create(
+            '<button type="submit" name="field.actions.create" ' +
+                'value="Save Changes" class="lazr-pos lazr-btn" ' +
+                '>OK</button>');
+
+    clear_overlay(content_node, true);
+
+    var context = {
+        filter_info: filter_info,
+        filter_id: filter_id
+    };
+    create_overlay(
+        config.content_box, overlay_id, submit_button,
+        function (form_data) {
+            return edit_subscription_handler(context, form_data);});
+
+    load_overlay_with_filter_data(content_node, filter_info);
+    var title = subscription.target_title;
+    Y.one('#structural-subscription-context-title')
+        .set('text', title);
+    Y.one('#subscription-overlay-title')
+        .set('text', 'Edit subscription for '+title+' bugs');
+
+    // We need to initialize the help links.  They may have already been
+    // initialized except for the ones we added, so setupHelpTrigger
+    // is idempotent.  Notice that this is old MochiKit code.
+    forEach(findHelpLinks(), setupHelpTrigger);
+    add_subscription_overlay.show();
+}
+
+/**
+>>>>>>> MERGE-SOURCE
  * Return an edit handler for the specified filter.
  */
 function make_edit_handler(subscription, filter_info, filter_id, config) {
@@ -1049,13 +1303,24 @@
     };
 }
 
+// If set, this will be used instead of Y.io.  This is for testing.
+namespace._Y_io_hook = null;
+
+function do_io(link, config) {
+    var yio = Y.io;
+    if (namespace._Y_io_hook) {
+        yio = namespace._Y_io_hook;
+    }
+    yio(link, config);
+}
+
 /**
  * Construct a handler for an unsubscribe link.
  */
-function make_delete_handler(filter, filter_id, subscriber_id) {
+function make_delete_handler(filter, node, subscriber_id) {
     var error_handler = new Y.lp.client.ErrorHandler();
     error_handler.showError = function(error_msg) {
-      var unsubscribe_node = Y.one('#unsubscribe-'+filter_id.toString());
+      var unsubscribe_node = node.one('a.delete-subscription');
       Y.lp.app.errors.display_error(unsubscribe_node, error_msg);
     };
     return function() {
@@ -1068,6 +1333,7 @@
                         '#subscription-filter-'+filter_id.toString());
                     filter_node.setStyle("margin-top", "0");
                     var subscriber = Y.one(
+<<<<<<< TREE
                         '#subscription-'+subscriber_id.toString());
                     var filters = subscriber.all('.subscription-filter');
 
@@ -1081,6 +1347,53 @@
             }
         };
         Y.io(filter.self_link, y_config);
+=======
+                        '#subscription-'+subscriber_id.toString());
+                    var to_collapse = subscriber;
+                    var filters = subscriber.all('.subscription-filter');
+                    if (!filters.isEmpty()) {
+                        to_collapse = node;
+                    }
+                    collapse_node(to_collapse);
+                    },
+                 failure: error_handler.getFailureHandler()
+                }
+            };
+        do_io(filter.self_link, y_config);
+    };
+}
+
+/**
+ * Construct a handler for a mute link.
+ */
+function make_mute_handler(filter_info, node){
+    var error_handler = new Y.lp.client.ErrorHandler();
+    error_handler.showError = function(error_msg) {
+      var mute_node = node.one('a.mute-subscription');
+      Y.lp.app.errors.display_error(mute_node, error_msg);
+    };
+    return function() {
+        var fname;
+        if (filter_info.is_muted) {
+            fname = 'unmute';
+        } else {
+            fname = 'mute';
+        }
+        var config = {
+            on: {success: function(){
+                    if (fname === 'mute') {
+                        filter_info.is_muted = true;
+                    } else {
+                        filter_info.is_muted = false;
+                    }
+                    handle_mute(node, filter_info.is_muted);
+                    },
+                 failure: error_handler.getFailureHandler()
+                }
+            };
+        namespace.lp_client.named_post(filter_info.filter.self_link,
+            fname, config);
+>>>>>>> MERGE-SOURCE
     };
 }
 
@@ -1118,14 +1431,34 @@
         var sub = subscription_info[i];
         for (j=0; j<sub.filters.length; j++) {
             var filter_info = sub.filters[j];
+<<<<<<< TREE
             wire_up_edit_links_for_filter(
                 config, sub, i, filter_info, filter_id);
+=======
+            var node = Y.one('#subscription-filter-'+filter_id.toString());
+            if (filter_info.can_mute) {
+                var mute_link = node.one('a.mute-subscription');
+                mute_link.on('click', make_mute_handler(filter_info, node));
+            }
+            if (!filter_info.subscriber_is_team ||
+                filter_info.user_is_team_admin) {
+                var edit_link = node.one('a.edit-subscription');
+                var edit_handler = make_edit_handler(
+                    sub, filter_info, filter_id, config);
+                edit_link.on('click', edit_handler);
+                var delete_link = node.one('a.delete-subscription');
+                var delete_handler = make_delete_handler(
+                    filter_info.filter, node, i);
+                delete_link.on('click', delete_handler);
+            }
+>>>>>>> MERGE-SOURCE
             filter_id += 1;
         }
     }
 }
 
 /**
+<<<<<<< TREE
  * Create filter node to include in the subscription's filter listing.
  */
 function create_filter_node(filter_id, filter_info, filter) {
@@ -1208,6 +1541,28 @@
 }
 
 /**
+=======
+ * For a given filter node, set it up properly based on mute state.
+ */
+function handle_mute(node, muted) {
+    var control = node.one('a.mute-subscription');
+    var label = node.one('em.mute-label');
+    var description = node.one('.filter-description');
+    if (muted) {
+        control.set('text', 'Receive emails from this subscription');
+        control.replaceClass('no', 'yes');
+        label.setStyle('display', null);
+        description.setStyle('color', '#bbb');
+    } else {
+        control.set('text', 'Stop your emails from this subscription');
+        control.replaceClass('yes', 'no');
+        label.setStyle('display', 'none');
+        description.setStyle('color', null);
+    }
+}
+
+/**
+>>>>>>> MERGE-SOURCE
  * Populate the subscription list DOM element with subscription descriptions.
  */
 function fill_in_bug_subscriptions(config) {
@@ -1215,19 +1570,111 @@
 
     var listing = Y.one('#subscription-listing');
     var subscription_info = LP.cache.subscription_info;
+<<<<<<< TREE
     var top_node = Y.Node.create(
         '<div class="yui-g"><div id="structural-subscriptions"></div></div>');
     var i;
+=======
+    var top_node = Y.Node.create(
+        '<div class="yui-g"><div id="structural-subscriptions"></div></div>');
+>>>>>>> MERGE-SOURCE
     var filter_id = 0;
     for (i=0; i<subscription_info.length; i++) {
+<<<<<<< TREE
         top_node.appendChild(
             create_subscription_node(i, subscription_info[i], filter_id));
         filter_id += subscription_info[i].filters.length;
+=======
+        var sub = subscription_info[i];
+        var sub_node = top_node.appendChild(Y.Node.create(
+            '<div style="margin-top: 2em; padding: 0 1em 1em 1em; '+
+            '      border: 1px solid #ddd;"></div>')
+            .set('id', 'subscription-'+i.toString()));
+        sub_node.appendChild(Y.Node.create(
+            '  <span style="float: left; margin-top: -0.6em; '+
+            '      padding: 0 1ex; background-color: #fff;"></a>'))
+            .appendChild('<span>Subscriptions to </span>')
+                .appendChild(Y.Node.create('<a></a>')
+                    .set('href', sub.target_url)
+                    .set('text', sub.target_title));
+
+        for (j=0; j<sub.filters.length; j++) {
+            var filter = sub.filters[j].filter;
+            // We put the filters in the cache so that the patch mechanism
+            // can automatically find them and update them on a successful
+            // edit.  This makes it possible to open up a filter after an edit
+            // and see the information you expect to see.
+            LP.cache['structural-subscription-filter-'+filter_id.toString()] =
+                filter;
+            var filter_node = sub_node.appendChild(Y.Node.create(
+                '<div style="margin: 1em 0em 0em 1em"'+
+                '      class="subscription-filter"></div>')
+                .set('id', 'subscription-filter-'+filter_id.toString()))
+                .appendChild(Y.Node.create(
+                    '<div style="margin-top: 1em"></div>'));
+            filter_node.appendChild(Y.Node.create(
+                '<strong class="filter-name"></strong>'))
+                .appendChild(render_filter_title(sub.filters[j], filter));
+            if (sub.filters[j].can_mute) {
+                filter_node.appendChild(Y.Node.create(
+                    '<em class="mute-label" style="padding-left: 1em;">You '+
+                    'do not receive emails from this subscription.</em>'));
+            }
+            var can_edit = (!sub.filters[j].subscriber_is_team ||
+                            sub.filters[j].user_is_team_admin);
+            // Whitespace is stripped from the left and right of the string
+            // when you make a node, so we have to build the string with the
+            // intermediate whitespace and then create the node at the end.
+            var control_template = '';
+            if (sub.filters[j].can_mute) {
+                control_template += (
+                    '<a href="#" class="sprite js-action '+
+                    'mute-subscription"></a>');
+                if (can_edit) {
+                    control_template += ' or ';
+                }
+            }
+            if (can_edit) {
+                // User can edit the subscription.
+                control_template += (
+                    '<a href="#" class="sprite modify edit js-action '+
+                    '    edit-subscription">Edit this subscription</a> or '+
+                    '<a href="#" class="sprite modify remove js-action '+
+                    '    delete-subscription">Unsubscribe</a>');
+            }
+            filter_node.appendChild(Y.Node.create(
+                '<span style="float: right"></span>')
+                ).appendChild(Y.Node.create(control_template));
+            filter_node.appendChild(Y.Node.create(
+                '<div style="padding-left: 1em" '+
+                'class="filter-description"></div>'))
+                .appendChild(create_filter_description(filter));
+            if (sub.filters[j].can_mute) {
+                handle_mute(filter_node, sub.filters[j].is_muted);
+            }
+
+            filter_id += 1;
+        }
+
+        // We can remove this once we enforce at least one filter per
+        // subscription.
+        if (subscription_info[i].filters.length === 0) {
+            sub_node.appendChild(
+                '<div style="clear: both; padding: 1em 0 0 1em"></div>')
+                .appendChild('<strong>All messages</strong>');
+        }
+>>>>>>> MERGE-SOURCE
     }
-
-    listing.appendChild(top_node);
-
-    wire_up_edit_links(config);
+<<<<<<< TREE
+
+    listing.appendChild(top_node);
+
+    wire_up_edit_links(config);
+=======
+    listing.appendChild(top_node);
+
+    wire_up_edit_links(config);
+>>>>>>> MERGE-SOURCE
 }
 
 /**
@@ -1256,9 +1703,16 @@
 /**
  * Construct a textual description of all of filter's properties.
  */
-function create_filter_description(filter) {
-    var description = Y.Node.create('<div></div>');
-    var filter_items = [];
+<<<<<<< TREE
+function create_filter_description(filter) {
+    var description = Y.Node.create('<div></div>');
+    var filter_items = [];
+=======
+function create_filter_description(filter) {
+    var description = Y.Node.create('<div></div>');
+
+    var filter_items = [];
+>>>>>>> MERGE-SOURCE
     // Format status conditions.
     if (filter.statuses.length !== 0) {
         filter_items.push(Y.Node.create('<li></li>')
@@ -1300,13 +1754,25 @@
     // Format event details.
     var events; // When will email be sent?
     if (filter.bug_notification_level === 'Discussion') {
+<<<<<<< TREE
         events = 'You will recieve an email when any change '+
+=======
+        events = 'You will receive an email when any change '+
+>>>>>>> MERGE-SOURCE
             'is made or a comment is added.';
     } else if (filter.bug_notification_level === 'Details') {
+<<<<<<< TREE
         events = 'You will recieve an email when any changes '+
+=======
+        events = 'You will receive an email when any changes '+
+>>>>>>> MERGE-SOURCE
             'are made to the bug.  Bug comments will not be sent.';
     } else if (filter.bug_notification_level === 'Lifecycle') {
+<<<<<<< TREE
         events = 'You will recieve an email when bugs are '+
+=======
+        events = 'You will receive an email when bugs are '+
+>>>>>>> MERGE-SOURCE
             'opened or closed.';
     } else {
         throw new Error('Unrecognized events.');
@@ -1353,6 +1819,7 @@
         // Setup the Launchpad client.
         setup_client();
     }
+<<<<<<< TREE
     return true;
 }
 
@@ -1491,23 +1958,42 @@
 function show_add_overlay(config) {
     var content_node = Y.one(config.content_box);
     content_node.empty();
+=======
+    return true;
+}
+
+/**
+ * Show the overlay for creating a new subscription.
+ */
+function show_add_overlay(config) {
+    Y.one(config.content_box).empty();
+>>>>>>> MERGE-SOURCE
     var overlay_id = setup_overlay(config.content_box);
+<<<<<<< TREE
     clear_overlay(content_node, false);
+=======
+    clear_overlay(Y.one(config.content_box), false);
+>>>>>>> MERGE-SOURCE
 
     var submit_button = Y.Node.create(
         '<button type="submit" name="field.actions.create" ' +
         'value="Create subscription" class="lazr-pos lazr-btn" '+
         '>OK</button>');
+<<<<<<< TREE
 
     var success_callback = make_add_subscription_success_handler(config);
 
     var save_subscription = make_add_subscription_handler(success_callback);
+=======
+
+>>>>>>> MERGE-SOURCE
     create_overlay(config.content_box, overlay_id, submit_button,
                    save_subscription, success_callback);
     // We need to initialize the help links.  They may have already been
     // initialized except for the ones we added, so setupHelpTrigger
     // is idempotent.  Notice that this is old MochiKit code.
     forEach(findHelpLinks(), setupHelpTrigger);
+<<<<<<< TREE
     add_subscription_overlay.show();
     return overlay_id;
 }
@@ -1552,6 +2038,51 @@
     // Create the subscription links on the page.
     setup_subscription_link(config, '.menu-link-subscribe_to_bug_mail');
 }; // setup
+=======
+    add_subscription_overlay.show();
+    return overlay_id;
+}
+namespace._show_add_overlay = show_add_overlay;
+
+/**
+ * Modify a link to pop up a subscription overlay.
+ *
+ * @method setup_subscription_link
+ * @param {String} link_id Id of the link element.
+ * @param {String} overlay_id Id of the overlay element.
+ */
+function setup_subscription_link(config, link_id) {
+    // Modify the menu-link-subscribe-to-bug-mail link to be visible.
+    var link = Y.one(link_id);
+    if (!Y.Lang.isValue(link)) {
+        Y.fail('Link to set as the pop-up link not found.');
+    }
+    link.removeClass('invisible-link');
+    link.addClass('visible-link');
+    link.on('click', function(e) {
+        e.halt();
+        show_add_overlay(config);
+    });
+    link.addClass('js-action');
+}                               // setup_subscription_links
+
+/**
+ * External entry point for configuring the structural subscription.
+ * @method setup
+ * @param {Object} config Object literal of config name/value pairs.
+ *     config.content_box is the name of an element on the page where
+ *         the overlay will be anchored.
+ */
+namespace.setup = function(config) {
+    // Return if pre-setup fails.
+    if (!pre_setup(config)) {
+        return;
+    }
+
+    // Create the subscription links on the page.
+    setup_subscription_link(config, '.menu-link-subscribe_to_bug_mail');
+}; // setup
+>>>>>>> MERGE-SOURCE
 
 }, '0.1', {requires: [
         'dom', 'node', 'lazr.anim', 'lazr.formoverlay',

=== modified file 'lib/lp/registry/javascript/tests/test_distroseriesdifferences_details.js'
--- lib/lp/registry/javascript/tests/test_distroseriesdifferences_details.js	2011-04-05 12:10:01 +0000
+++ lib/lp/registry/javascript/tests/test_distroseriesdifferences_details.js	2011-04-07 15:19:36 +0000
@@ -40,6 +40,7 @@
             {"token": "FAILED", "title": "Failed"}];
         Assert.isUndefined(dsd_details.get_selected(voc_nothing_selected));
     },
+<<<<<<< TREE
 
     test_add_msg_node: function() {
         var placeholder = Y.one('#placeholder');
@@ -52,6 +53,20 @@
             placeholder.one('.package-diff-placeholder').get('innerHTML'),
             msg_txt);
     }
+=======
+
+    test_add_msg_node: function() {
+        var placeholder = Y.one('#placeholder');
+        placeholder.set('innerHTML', placeholder_content);
+        var msg_txt = 'Exemple text';
+        var msg_node = Y.Node.create(msg_txt);
+        var placeholder = Y.one('#placeholder');
+        dsd_details.add_msg_node(placeholder, msg_node);
+        Assert.areEqual(
+            placeholder.one('.package-diff-placeholder').get('innerHTML'),
+            msg_txt);
+    },
+>>>>>>> MERGE-SOURCE
 };
 
 

=== modified file 'lib/lp/registry/javascript/tests/test_structural_subscription.js'
--- lib/lp/registry/javascript/tests/test_structural_subscription.js	2011-04-07 02:14:43 +0000
+++ lib/lp/registry/javascript/tests/test_structural_subscription.js	2011-04-07 15:19:36 +0000
@@ -5,7 +5,7 @@
     filter: 'raw',
     combine: false,
     fetchCSS: false
-    }).use('test', 'console', 'node', 'lp.client',
+    }).use('test', 'console', 'node', 'node-event-simulate', 'lp.client',
         'lp.registry.structural_subscription', function(Y) {
 
     var suite = new Y.Test.Suite("Structural subscription overlay tests");
@@ -22,6 +22,10 @@
     var content_box_name = 'ss-content-box';
     var content_box_id = '#' + content_box_name;
 
+    // Listing node.
+    var subscription_listing_name = 'subscription-listing';
+    var subscription_listing_id = '#' + subscription_listing_name;
+
     var target_link_class = '.menu-link-subscribe_to_bug_mail';
 
     function array_compare(a,b) {
@@ -36,10 +40,13 @@
         return true;
     }
 
-    function create_test_node() {
+    function create_test_node(include_listing) {
         return Y.Node.create(
                 '<div id="test-content">' +
                 '  <div id="' + content_box_name + '"></div>' +
+                (include_listing
+                 ? ('  <div id="' + subscription_listing_name + '" style="width: 50%"></div>')
+                 : '') +
                 '</div>');
     }
 
@@ -58,6 +65,61 @@
         return true;
     }
 
+    function monkeypatch_LP() {
+          // Monkeypatch LP to avoid network traffic and to allow
+          // insertion of test data.
+          var original_lp = window.LP
+          window.LP = {
+            links: {},
+            cache: {}
+          };
+
+          LP.cache.context = {
+            title: 'Test Project',
+            self_link: 'https://launchpad.dev/api/test_project'
+          };
+          LP.cache.administratedTeams = [];
+          LP.cache.importances = ['Unknown', 'Critical', 'High', 'Medium',
+                                  'Low', 'Wishlist', 'Undecided'];
+          LP.cache.statuses = ['New', 'Incomplete', 'Opinion',
+                               'Invalid', 'Won\'t Fix', 'Expired',
+                               'Confirmed', 'Triaged', 'In Progress',
+                               'Fix Committed', 'Fix Released', 'Unknown'];
+          LP.links.me = 'https://launchpad.dev/api/~someone';
+          return original_lp;
+    }
+
+    function LPClient(){
+        if (!(this instanceof arguments.callee))
+            throw new Error("Constructor called as a function");
+        this.received = []
+        // We create new functions every time because we allow them to be
+        // configured.
+        this.named_post = function(url, func, config) {
+            this._call('named_post', config, arguments);
+        };
+        this.patch = function(bug_filter, data, config) {
+            this._call('patch', config, arguments);
+        }
+    };
+    LPClient.prototype._call = function(name, config, args) {
+        this.received.push(
+            [name, Array.prototype.slice.call(args)]);
+        if (!Y.Lang.isValue(args.callee.args))
+            throw new Error("Set call_args on "+name);
+        if (Y.Lang.isValue(args.callee.fail) && args.callee.fail) {
+            config.on.failure.apply(undefined, args.callee.args);
+        } else {
+            config.on.success.apply(undefined, args.callee.args);
+        }
+    };
+    // DELETE uses Y.io directly as of this writing, so we cannot stub it
+    // here.
+
+    function make_lp_client_stub() {
+        return new LPClient();
+    }
+
     test_case = new Y.Test.Case({
         name: 'structural_subscription_overlay',
 
@@ -481,28 +543,11 @@
         setUp: function() {
           // Monkeypatch LP to avoid network traffic and to allow
           // insertion of test data.
-          window.LP = {
-            links: {},
-            cache: {}
-          };
-
-          LP.cache.context = {
-            title: 'Test Project',
-            self_link: 'https://launchpad.dev/api/test_project'
-          };
-          LP.cache.administratedTeams = [];
-          LP.cache.importances = ['Unknown', 'Critical', 'High', 'Medium',
-                                  'Low', 'Wishlist', 'Undecided'];
-          LP.cache.statuses = ['New', 'Incomplete', 'Opinion',
-                               'Invalid', 'Won\'t Fix', 'Expired',
-                               'Confirmed', 'Triaged', 'In Progress',
-                               'Fix Committed', 'Fix Released', 'Unknown'];
-          LP.links.me = 'https://launchpad.dev/api/~someone';
-
-          var lp_client = function() {};
+          this.original_lp = monkeypatch_LP();
+
           this.configuration = {
               content_box: content_box_id,
-              lp_client: lp_client
+              lp_client: make_lp_client_stub()
           };
 
           this.content_node = create_test_node();
@@ -510,17 +555,16 @@
         },
 
         tearDown: function() {
-          remove_test_node();
-          delete this.content_node;
+            window.LP = this.original_lp;
+            remove_test_node();
+            delete this.content_node;
         },
 
         test_overlay_error_handling_adding: function() {
             // Verify that errors generated during adding of a filter are
             // displayed to the user.
-            this.configuration.lp_client.named_post =
-                function(url, func, config) {
-                config.on.failure(true, true);
-                };
+            this.configuration.lp_client.named_post.fail = true;
+            this.configuration.lp_client.named_post.args = [true, true];
             module.setup(this.configuration);
             module._show_add_overlay(this.configuration);
             // After the setup the overlay should be in the DOM.
@@ -540,17 +584,10 @@
             // displayed to the user.
             var original_delete_filter = module._delete_filter;
             module._delete_filter = function() {};
-            this.configuration.lp_client.patch =
-                function(bug_filter, data, config) {
-                    config.on.failure(true, true);
-                };
-            var bug_filter = {
-                'getAttrs': function() { return {}; }
-            };
-            this.configuration.lp_client.named_post =
-                function(url, func, config) {
-                    config.on.success(bug_filter);
-                };
+            this.configuration.lp_client.patch.fail = true;
+            this.configuration.lp_client.patch.args = [true, true];
+            this.configuration.lp_client.named_post.args = [
+                {'getAttrs': function() { return {}; }}];
             module.setup(this.configuration);
             module._show_add_overlay(this.configuration);
             // After the setup the overlay should be in the DOM.
@@ -808,6 +845,373 @@
 
     }));
 
+    suite.add(new Y.Test.Case({
+        name: 'Structural Subscription mute team subscriptions',
+
+        // Verify that the mute controls and labels on the edit block
+        // render and interact properly
+
+        _should: {
+            error: {
+                }
+            },
+
+        setUp: function() {
+            // Monkeypatch LP to avoid network traffic and to allow
+            // insertion of test data.
+            this.original_lp = monkeypatch_LP();
+            this.test_node = create_test_node(true);
+            Y.one('body').appendChild(this.test_node);
+            this.lp_client = make_lp_client_stub();
+            LP.cache.subscription_info = [
+                {target_url: 'http://example.com',
+                 target_title:'Example project',
+                 filters: [
+                    {filter: {
+                        statuses: [],
+                        importances: [],
+                        tags: [],
+                        find_all_tags: true,
+                        bug_notification_level: 'Discussion',
+                        self_link: 'http://example.com/a_filter'
+                        },
+                    can_mute: true,
+                    is_muted: false,
+                    subscriber_is_team: true,
+                    subscriber_url: 'http://example.com/subscriber',
+                    subscriber_title: 'Thidwick',
+                    user_is_team_admin: false,
+                    }
+                    ]
+                }
+                ];
+        },
+
+        tearDown: function() {
+            remove_test_node();
+            window.LP = this.original_lp;
+        },
+
+        test_not_muted_rendering: function() {
+            // Verify that an unmuted subscription is rendered correctly.
+            module.setup_bug_subscriptions(
+                {content_box: content_box_id,
+                 lp_client: this.lp_client});
+            var listing = this.test_node.one(subscription_listing_id);
+            var filter_node = listing.one('#subscription-filter-0');
+            Assert.isNotNull(filter_node);
+            var mute_label_node = filter_node.one('.mute-label');
+            Assert.isNotNull(mute_label_node);
+            Assert.areEqual(mute_label_node.getStyle('display'), 'none');
+            var mute_link = filter_node.one('a.mute-subscription');
+            Assert.isNotNull(mute_link);
+            Assert.isTrue(mute_link.hasClass('no'));
+        },
+
+        test_muted_rendering: function() {
+            // Verify that a muted subscription is rendered correctly.
+            LP.cache.subscription_info[0].filters[0].is_muted = true;
+            module.setup_bug_subscriptions(
+                {content_box: content_box_id,
+                 lp_client: this.lp_client});
+            var listing = this.test_node.one(subscription_listing_id);
+            var filter_node = listing.one('#subscription-filter-0');
+            Assert.isNotNull(filter_node);
+            var mute_label_node = filter_node.one('.mute-label');
+            Assert.isNotNull(mute_label_node);
+            Assert.areEqual(mute_label_node.getStyle('display'), 'inline');
+            var mute_link = filter_node.one('a.mute-subscription');
+            Assert.isNotNull(mute_link);
+            Assert.isTrue(mute_link.hasClass('yes'));
+        },
+
+        test_not_muted_toggle_muted: function() {
+            // Verify that an unmuted subscription can be muted.
+            module.setup_bug_subscriptions(
+                {content_box: content_box_id,
+                 lp_client: this.lp_client});
+            var listing = this.test_node.one(subscription_listing_id);
+            var filter_node = listing.one('#subscription-filter-0');
+            var mute_label_node = filter_node.one('.mute-label');
+            var mute_link = filter_node.one('a.mute-subscription');
+            this.lp_client.named_post.args = []
+            Y.Event.simulate(Y.Node.getDOMNode(mute_link), 'click');
+            Assert.areEqual(this.lp_client.received[0][0], 'named_post');
+            Assert.areEqual(
+                this.lp_client.received[0][1][0],
+                'http://example.com/a_filter');
+            Assert.areEqual(
+                this.lp_client.received[0][1][1], 'mute');
+            Assert.areEqual(mute_label_node.getStyle('display'), 'inline');
+            Assert.isTrue(mute_link.hasClass('yes'));
+        },
+
+        test_muted_toggle_not_muted: function() {
+            // Verify that an muted subscription can be unmuted.
+            LP.cache.subscription_info[0].filters[0].is_muted = true;
+            module.setup_bug_subscriptions(
+                {content_box: content_box_id,
+                 lp_client: this.lp_client});
+            var listing = this.test_node.one(subscription_listing_id);
+            var filter_node = listing.one('#subscription-filter-0');
+            var mute_label_node = filter_node.one('.mute-label');
+            var mute_link = filter_node.one('a.mute-subscription');
+            this.lp_client.named_post.args = []
+            Y.Event.simulate(Y.Node.getDOMNode(mute_link), 'click');
+            Assert.areEqual(this.lp_client.received[0][0], 'named_post');
+            Assert.areEqual(
+                this.lp_client.received[0][1][0],
+                'http://example.com/a_filter');
+            Assert.areEqual(
+                this.lp_client.received[0][1][1], 'unmute');
+            Assert.areEqual(mute_label_node.getStyle('display'), 'none');
+            Assert.isTrue(mute_link.hasClass('no'));
+        }
+
+    }));
+
+    suite.add(new Y.Test.Case({
+        name: 'Structural Subscription: add subcription workflow',
+
+        _should: {error: {}},
+
+        setUp: function() {
+            var TestBugFilter = function() {};
+            TestBugFilter.prototype = {
+                'getAttrs': function () {
+                    return {};
+                }
+            };
+            // We need an lp_client that will appear to succesfully create the
+            // bug filter.
+            var TestClient = function() {};
+            TestClient.prototype = {
+                named_post: function (uri, operation_name, config) {
+                    config.on.success(new TestBugFilter());
+                    this.post_called = true;
+                },
+                patch: function(uri, representation, config, headers) {
+                    config.on.success();
+                    this.patch_called = true;
+                },
+                post_called: false,
+                patch_called: false
+            };
+
+            this.original_lp = monkeypatch_LP();
+
+            this.configuration = {
+                content_box: content_box_id,
+                lp_client: new TestClient()
+            };
+            this.content_node = create_test_node();
+            Y.one('body').appendChild(this.content_node);
+        },
+
+        tearDown: function() {
+            window.LP = this.original_lp;
+            remove_test_node();
+            delete this.content_node;
+        },
+
+        test_simple_add_workflow: function() {
+            // Clicking on the "Subscribe to bug mail" link and then clicking
+            // on the overlay form's "OK" button results in a filter being
+            // created and PATCHed.
+            module.setup(this.configuration);
+            Y.one('a.menu-link-subscribe_to_bug_mail').simulate('click');
+            Assert.isFalse(module.lp_client.post_called);
+            Assert.isFalse(module.lp_client.patch_called);
+            var button = Y.one('.yui3-lazr-formoverlay-actions button');
+            Assert.areEqual(button.get('text'), 'OK');
+            button.simulate('click');
+            Assert.isTrue(module.lp_client.post_called);
+            Assert.isTrue(module.lp_client.patch_called);
+        },
+
+        test_simple_add_workflow_cancled: function() {
+            // Clicking on the "Subscribe to bug mail" link and then clicking
+            // on the overlay form's cancel button results in no filter being
+            // created or PATCHed.
+            module.setup(this.configuration);
+            Y.one('a.menu-link-subscribe_to_bug_mail').simulate('click');
+            Assert.isFalse(module.lp_client.post_called);
+            Assert.isFalse(module.lp_client.patch_called);
+            var button = Y.one(
+                '.yui3-lazr-formoverlay-actions button+button');
+            Assert.areEqual(button.get('text'), 'Cancel');
+            button.simulate('click');
+            Assert.isFalse(module.lp_client.post_called);
+            Assert.isFalse(module.lp_client.patch_called);
+        }
+
+    }));
+
+    suite.add(new Y.Test.Case({
+        name: 'Structural Subscription: edit subcription workflow',
+
+        _should: {error: {}},
+
+        setUp: function() {
+            var TestBugFilter = function(data) {
+                if (data !== undefined) {
+                    this._data = data;
+                } else {
+                    this._data = {};
+                }
+            };
+            TestBugFilter.prototype = {
+                'getAttrs': function () {
+                    return this._data;
+                }
+            };
+            // We need an lp_client that will appear to succesfully create the
+            // bug filter.
+            var TestClient = function() {
+                this.post_called = false;
+                this.patch_called = false;
+            };
+            TestClient.prototype = {
+                named_post: function (uri, operation_name, config) {
+                    config.on.success(new TestBugFilter());
+                    this.post_called = true;
+                },
+                patch: function(uri, representation, config, headers) {
+                    config.on.success(new TestBugFilter(representation));
+                    this.patch_called = true;
+                }
+            };
+
+            this.original_lp = monkeypatch_LP();
+
+            LP.cache.subscription_info = [{
+                target_url: 'http://example.com',
+                target_title:'Example project',
+                filters: [{
+                    filter: {
+                        description: 'DESCRIPTION',
+                        statuses: [],
+                        importances: [],
+                        tags: [],
+                        find_all_tags: true,
+                        bug_notification_level: 'Discussion',
+                        self_link: 'http://example.com/a_filter'
+                        },
+                    can_mute: true,
+                    is_muted: false,
+                    subscriber_is_team: false,
+                    subscriber_url: 'http://example.com/subscriber',
+                    subscriber_title: 'Thidwick',
+                    user_is_team_admin: false
+                }]
+            }];
+
+
+            this.configuration = {
+                content_box: content_box_id,
+                lp_client: new TestClient()
+            };
+            this.content_node = create_test_node(true);
+            Y.one('body').appendChild(this.content_node);
+        },
+
+        tearDown: function() {
+            window.LP = this.original_lp;
+            remove_test_node();
+            delete this.content_node;
+        },
+
+        test_simple_edit_workflow: function() {
+            module.setup_bug_subscriptions(this.configuration);
+
+            // Editing a value via the edit link and dialog causes the
+            // subscription list to reflect the new value.
+            var label = Y.one('.filter-name span').get('text');
+            Assert.isTrue(label.indexOf('DESCRIPTION') !== -1);
+
+            // No PATCHing has happened yet.
+            Assert.isFalse(module.lp_client.patch_called);
+
+            // Click the edit link.
+            Y.one('a.edit-subscription').simulate('click');
+
+            // Set a new name (description) and click OK.
+            Y.one('input[name="name"]').set('value', 'NEW VALUE');
+            var button = Y.one('.yui3-lazr-formoverlay-actions button');
+            Assert.areEqual(button.get('text'), 'OK');
+            button.simulate('click');
+
+            // Clicking OK resulted in the bug filter being PATCHed.
+            Assert.isTrue(module.lp_client.patch_called);
+            // And the new value is reflected in the subscription listing.
+            label = Y.one('.filter-name span').get('text');
+            Assert.isTrue(label.indexOf('NEW VALUE') !== -1);
+        }
+
+    }));
+
+    suite.add(new Y.Test.Case({
+        name: 'Structural Subscription: unsubscribing',
+
+        _should: {error: {}},
+
+        setUp: function() {
+            var TestClient = function() {};
+            this.original_lp = monkeypatch_LP();
+
+            LP.cache.subscription_info = [{
+                target_url: 'http://example.com',
+                target_title:'Example project',
+                filters: [{
+                    filter: {
+                        description: 'DESCRIPTION',
+                        statuses: [],
+                        importances: [],
+                        tags: [],
+                        find_all_tags: true,
+                        bug_notification_level: 'Discussion',
+                        self_link: 'http://example.com/a_filter'
+                        },
+                    can_mute: true,
+                    is_muted: false,
+                    subscriber_is_team: false,
+                    subscriber_url: 'http://example.com/subscriber',
+                    subscriber_title: 'Thidwick',
+                    user_is_team_admin: false
+                }]
+            }];
+
+            this.configuration = {
+                content_box: content_box_id,
+                lp_client: new TestClient()
+            };
+            this.content_node = create_test_node(true);
+            Y.one('body').appendChild(this.content_node);
+        },
+
+        tearDown: function() {
+            window.LP = this.original_lp;
+            remove_test_node();
+            delete this.content_node;
+        },
+
+        test_simple_unsubscribe: function() {
+            // Clicking on the unsubscribe link will result in a DELETE being
+            // sent and the filter description being removed.
+
+            var DELETE_performed = false;
+            // Fake a DELETE that succeeds.
+            module._Y_io_hook = function (link, config) {
+                DELETE_performed = true;
+                config.on.success();
+            };
+
+            module.setup_bug_subscriptions(this.configuration);
+            Y.one('a.delete-subscription').simulate('click');
+            Assert.isTrue(DELETE_performed);
+        }
+
+    }));
     // Lock, stock, and two smoking barrels.
     var handle_complete = function(data) {
         var status_node = Y.Node.create(

=== modified file 'lib/lp/registry/model/distroseriesdifference.py'
=== modified file 'lib/lp/registry/model/person.py'
--- lib/lp/registry/model/person.py	2011-03-31 19:10:35 +0000
+++ lib/lp/registry/model/person.py	2011-04-07 15:19:36 +0000
@@ -3965,6 +3965,9 @@
         self._mergeBugNotificationRecipient(cur, from_id, to_id)
         skip.append(('bugnotificationrecipient', 'person'))
 
+        # We ignore BugSubscriptionFilterMutes.
+        skip.append(('bugsubscriptionfiltermute', 'person'))
+
         self._mergePackageBugSupervisor(cur, from_id, to_id)
         skip.append(('packagebugsupervisor', 'bug_supervisor'))
 

=== modified file 'lib/lp/registry/templates/distroseries-localdifferences.pt'
--- lib/lp/registry/templates/distroseries-localdifferences.pt	2011-04-06 07:52:33 +0000
+++ lib/lp/registry/templates/distroseries-localdifferences.pt	2011-04-07 15:19:36 +0000
@@ -70,6 +70,7 @@
                     id string:field.selected_differences.${src_name}"/>
 
                 <a tal:attributes="href difference/fmt:url" class="toggle-extra"
+<<<<<<< TREE
                    tal:content="src_name">Foo</a>
               </td>
               <td tal:condition="view/show_parent_version">
@@ -102,6 +103,32 @@
                   class="packagesets">
                 <tal:replace replace="difference/@@/packagesets_names" />
               </td>
+=======
+                    tal:content="parent_source_pub/source_package_name">Foo</a>
+              </td>
+              <td tal:define="parent_source_pck_url difference/@@/parent_source_package_url">
+                <a tal:condition="parent_source_pck_url"
+                    tal:attributes="href difference/@@/parent_source_package_url"
+                    class="parent-version">
+                    <tal:replace
+                        replace="difference/parent_source_version"/></a>
+                <span tal:condition="not: parent_source_pck_url"
+                    class="parent-version"
+                    tal:content="difference/parent_source_version">
+                </span>
+              </td>
+              <td tal:define="source_pck_url difference/@@/source_package_url">
+                <a tal:condition="source_pck_url"
+                    tal:attributes="href difference/@@/source_package_url"
+                    class="derived-version">
+                    <tal:replace
+                        replace="difference/source_version"/></a>
+                <span tal:condition="not: source_pck_url"
+                    class="derived-version"
+                    tal:content="difference/source_version">
+                </span>
+               </td>
+>>>>>>> MERGE-SOURCE
               <td>
                 <tal:parent condition="not: view/show_derived_version">
                   <span tal:attributes="title difference/parent_source_pub/datepublished/fmt:datetime"

=== modified file 'lib/lp/registry/tests/test_distroseriesdifference.py'
=== modified file 'lib/lp/services/tests/test_utils.py'
--- lib/lp/services/tests/test_utils.py	2011-02-20 13:26:48 +0000
+++ lib/lp/services/tests/test_utils.py	2011-04-07 15:19:36 +0000
@@ -8,8 +8,8 @@
 from contextlib import contextmanager
 import hashlib
 import itertools
+import os
 import sys
-import unittest
 
 from lp.services.utils import (
     AutoDecorate,
@@ -18,6 +18,7 @@
     CachingIterator,
     decorate_with,
     docstring_dedent,
+    file_exists,
     iter_split,
     run_capturing_output,
     traceback_info,
@@ -25,7 +26,6 @@
 from lp.testing import TestCase
 
 
-
 class TestAutoDecorate(TestCase):
     """Tests for AutoDecorate."""
 
@@ -34,12 +34,14 @@
         self.log = None
 
     def decorator_1(self, f):
+
         def decorated(*args, **kwargs):
             self.log.append(1)
             return f(*args, **kwargs)
         return decorated
 
     def decorator_2(self, f):
+
         def decorated(*args, **kwargs):
             self.log.append(2)
             return f(*args, **kwargs)
@@ -48,11 +50,12 @@
     def test_auto_decorate(self):
         # All of the decorators passed to AutoDecorate are applied as
         # decorators in reverse order.
-
         class AutoDecoratedClass:
             __metaclass__ = AutoDecorate(self.decorator_1, self.decorator_2)
+
             def method_a(s):
                 self.log.append('a')
+
             def method_b(s):
                 self.log.append('b')
 
@@ -172,11 +175,13 @@
         # When run, a function decorated with decorated_with runs with the
         # context given to decorated_with.
         calls = []
+
         @contextmanager
         def appending_twice():
             calls.append('before')
             yield
             calls.append('after')
+
         @decorate_with(appending_twice)
         def function():
             pass
@@ -187,6 +192,7 @@
         # The original function is actually called when we call the result of
         # decoration.
         calls = []
+
         @decorate_with(self.trivialContextManager)
         def function():
             calls.append('foo')
@@ -196,6 +202,7 @@
     def test_decorate_with_call_twice(self):
         # A function decorated with decorate_with can be called twice.
         calls = []
+
         @decorate_with(self.trivialContextManager)
         def function():
             calls.append('foo')
@@ -206,6 +213,7 @@
     def test_decorate_with_arguments(self):
         # decorate_with passes through arguments.
         calls = []
+
         @decorate_with(self.trivialContextManager)
         def function(*args, **kwargs):
             calls.append((args, kwargs))
@@ -258,15 +266,36 @@
     """Test `run_capturing_output`."""
 
     def test_run_capturing_output(self):
+
         def f(a, b):
             sys.stdout.write(str(a))
             sys.stderr.write(str(b))
             return a + b
+
         c, stdout, stderr = run_capturing_output(f, 3, 4)
         self.assertEqual(7, c)
         self.assertEqual('3', stdout)
         self.assertEqual('4', stderr)
 
 
-def test_suite():
-    return unittest.TestLoader().loadTestsFromName(__name__)
+class TestFileExists(TestCase):
+    """Tests for `file_exists`."""
+
+    def setUp(self):
+        super(TestFileExists, self).setUp()
+        self.useTempDir()
+
+    def test_finds_file(self):
+        file("a-real-file.txt", "w").write("Here I am.")
+        self.assertTrue(file_exists("a-real-file.txt"))
+
+    def test_finds_directory(self):
+        os.makedirs("a-real-directory")
+        self.assertTrue(file_exists("a-real-directory"))
+
+    def test_says_no_if_not_found(self):
+        self.assertFalse(file_exists("a-nonexistent-file.txt"))
+
+    def test_is_not_upset_by_missing_directory(self):
+        self.assertFalse(
+            file_exists("a-nonexistent-directory/a-nonexistent-file.txt"))

=== modified file 'lib/lp/services/utils.py'
--- lib/lp/services/utils.py	2011-03-29 13:57:20 +0000
+++ lib/lp/services/utils.py	2011-04-07 15:19:36 +0000
@@ -15,6 +15,7 @@
     'compress_hash',
     'decorate_with',
     'docstring_dedent',
+    'file_exists',
     'iter_list_chunks',
     'iter_split',
     'run_capturing_output',
@@ -25,6 +26,7 @@
     ]
 
 from itertools import tee
+import os
 from StringIO import StringIO
 import string
 import sys
@@ -48,6 +50,7 @@
     """
 
     class AutoDecorateMetaClass(type):
+
         def __new__(cls, class_name, bases, class_dict):
             new_class_dict = {}
             for name, value in class_dict.items():
@@ -217,11 +220,15 @@
 
 def decorate_with(context_factory, *args, **kwargs):
     """Create a decorator that runs decorated functions with 'context'."""
+
     def decorator(function):
+
         def decorated(*a, **kw):
             with context_factory(*args, **kwargs):
                 return function(*a, **kw)
+
         return mergeFunctionMetadata(function, decorated)
+
     return decorator
 
 
@@ -236,6 +243,11 @@
     return (first + '\n' + dedent(rest)).strip()
 
 
+def file_exists(filename):
+    """Does `filename` exist?"""
+    return os.access(filename, os.F_OK)
+
+
 class CapturedOutput(Fixture):
     """A fixture that captures output to stdout and stderr."""
 

=== modified file 'lib/lp/soyuz/enums.py'
--- lib/lp/soyuz/enums.py	2011-03-21 12:55:50 +0000
+++ lib/lp/soyuz/enums.py	2011-04-07 15:19:36 +0000
@@ -1,4 +1,4 @@
-# Copyright 2010 Canonical Ltd.  This software is licensed under the
+# Copyright 2010-2011 Canonical Ltd.  This software is licensed under the
 # GNU Affero General Public License version 3 (see the file LICENSE).
 
 """Enumerations used in the lp/soyuz modules."""
@@ -10,6 +10,7 @@
     'ArchivePurpose',
     'ArchiveStatus',
     'ArchiveSubscriberStatus',
+    'archive_suffixes',
     'BinaryPackageFileType',
     'BinaryPackageFormat',
     'PackageCopyStatus',
@@ -102,6 +103,13 @@
         """)
 
 
+archive_suffixes = {
+    ArchivePurpose.PRIMARY: '',
+    ArchivePurpose.PARTNER: '-partner',
+    ArchivePurpose.DEBUG: '-debug',
+}
+
+
 class ArchiveStatus(DBEnumeratedType):
     """The status of an archive, e.g. active, disabled. """
 
@@ -127,19 +135,19 @@
 
 class ArchiveSubscriberStatus(DBEnumeratedType):
     """The status of an `ArchiveSubscriber`."""
-    
+
     CURRENT = DBItem(1, """
         Active
 
         The subscription is current.
         """)
-    
+
     EXPIRED = DBItem(2, """
         Expired
 
         The subscription has expired.
         """)
-    
+
     CANCELLED = DBItem(3, """
         Cancelled
 
@@ -532,6 +540,3 @@
         Specifies a native package, with a single tar.*. Supports gzip,
         bzip2, and xz compression.
         """)
-
-
-

=== modified file 'lib/lp/soyuz/model/archive.py'
--- lib/lp/soyuz/model/archive.py	2011-03-23 18:29:09 +0000
+++ lib/lp/soyuz/model/archive.py	2011-04-07 15:19:36 +0000
@@ -7,7 +7,10 @@
 
 __metaclass__ = type
 
-__all__ = ['Archive', 'ArchiveSet']
+__all__ = [
+    'Archive',
+    'ArchiveSet',
+    ]
 
 from operator import attrgetter
 import re
@@ -108,6 +111,7 @@
     ArchivePurpose,
     ArchiveStatus,
     ArchiveSubscriberStatus,
+    archive_suffixes,
     PackagePublishingStatus,
     PackageUploadStatus,
     )
@@ -408,12 +412,6 @@
     @property
     def archive_url(self):
         """See `IArchive`."""
-        archive_postfixes = {
-            ArchivePurpose.PRIMARY: '',
-            ArchivePurpose.PARTNER: '-partner',
-            ArchivePurpose.DEBUG: '-debug',
-        }
-
         if self.is_ppa:
             if self.private:
                 url = config.personalpackagearchive.private_base_url
@@ -432,7 +430,7 @@
             return urlappend(url, self.distribution.name)
 
         try:
-            postfix = archive_postfixes[self.purpose]
+            postfix = archive_suffixes[self.purpose]
         except KeyError:
             raise AssertionError(
                 "archive_url unknown for purpose: %s" % self.purpose)
@@ -472,14 +470,16 @@
         # callers are problematic. (Migrate them and test to see).
         clauses = []
         storm_clauses = [
-            SourcePackagePublishingHistory.archiveID==self.id,
-            SourcePackagePublishingHistory.sourcepackagereleaseID==
+            SourcePackagePublishingHistory.archiveID == self.id,
+            SourcePackagePublishingHistory.sourcepackagereleaseID ==
                 SourcePackageRelease.id,
-            SourcePackageRelease.sourcepackagenameID==
-                SourcePackageName.id
-            ]
-        orderBy = [SourcePackageName.name,
-                   Desc(SourcePackagePublishingHistory.id)]
+            SourcePackageRelease.sourcepackagenameID ==
+                SourcePackageName.id,
+            ]
+        orderBy = [
+            SourcePackageName.name,
+            Desc(SourcePackagePublishingHistory.id),
+            ]
 
         if name is not None:
             if exact_match:
@@ -509,7 +509,8 @@
 
         if distroseries is not None:
             storm_clauses.append(
-                SourcePackagePublishingHistory.distroseriesID==distroseries.id)
+                SourcePackagePublishingHistory.distroseriesID ==
+                    distroseries.id)
 
         if pocket is not None:
             storm_clauses.append(

=== modified file 'lib/lp/soyuz/scripts/processaccepted.py'
--- lib/lp/soyuz/scripts/processaccepted.py	2010-09-23 02:12:27 +0000
+++ lib/lp/soyuz/scripts/processaccepted.py	2011-04-07 15:19:36 +0000
@@ -30,7 +30,10 @@
 from lp.bugs.interfaces.bugtask import BugTaskStatus
 from lp.registry.interfaces.distribution import IDistributionSet
 from lp.registry.interfaces.pocket import PackagePublishingPocket
-from lp.services.scripts.base import LaunchpadScript
+from lp.services.scripts.base import (
+    LaunchpadScript,
+    LaunchpadScriptFailure,
+    )
 from lp.soyuz.enums import (
     ArchivePurpose,
     PackageUploadStatus,
@@ -241,6 +244,9 @@
         try:
             self.logger.debug("Finding distribution %s." % distro_name)
             distribution = getUtility(IDistributionSet).getByName(distro_name)
+            if distribution is None:
+                raise LaunchpadScriptFailure(
+                    "Distribution '%s' not found." % distro_name)
 
             # target_archives is a tuple of (archive, description).
             if self.options.ppa:

=== modified file 'lib/lp/testing/__init__.py'
--- lib/lp/testing/__init__.py	2011-04-07 01:08:21 +0000
+++ lib/lp/testing/__init__.py	2011-04-07 15:19:36 +0000
@@ -1092,17 +1092,19 @@
         now += delta
 
 
-def run_script(cmd_line):
+def run_script(cmd_line, env=None):
     """Run the given command line as a subprocess.
 
-    Return a 3-tuple containing stdout, stderr and the process' return code.
-
-    The environment given to the subprocess is the same as the one in the
-    parent process except for the PYTHONPATH, which is removed so that the
-    script, passed as the `cmd_line` parameter, will fail if it doesn't set it
-    up properly.
+    :param cmd_line: A command line suitable for passing to
+        `subprocess.Popen`.
+    :param env: An optional environment dict.  If none is given, the
+        script will get a copy of your present environment.  Either way,
+        PYTHONPATH will be removed from it because it will break the
+        script.
+    :return: A 3-tuple of stdout, stderr, and the process' return code.
     """
-    env = os.environ.copy()
+    if env is None:
+        env = os.environ.copy()
     env.pop('PYTHONPATH', None)
     process = subprocess.Popen(
         cmd_line, shell=True, stdin=subprocess.PIPE, stdout=subprocess.PIPE,

=== modified file 'lib/lp/testing/factory.py'
--- lib/lp/testing/factory.py	2011-04-04 07:21:23 +0000
+++ lib/lp/testing/factory.py	2011-04-07 15:19:36 +0000
@@ -256,11 +256,15 @@
     )
 from lp.soyuz.interfaces.binarypackagebuild import IBinaryPackageBuildSet
 from lp.soyuz.interfaces.binarypackagename import IBinaryPackageNameSet
-from lp.soyuz.interfaces.component import IComponentSet
+from lp.soyuz.interfaces.component import (
+    IComponent,
+    IComponentSet,
+    )
 from lp.soyuz.interfaces.packageset import IPackagesetSet
 from lp.soyuz.interfaces.processor import IProcessorFamilySet
 from lp.soyuz.interfaces.publishing import IPublishingSet
 from lp.soyuz.interfaces.section import ISectionSet
+from lp.soyuz.model.component import ComponentSelection
 from lp.soyuz.model.files import (
     BinaryPackageFile,
     SourcePackageReleaseFile,
@@ -2403,6 +2407,23 @@
             name = self.getUniqueString()
         return getUtility(IComponentSet).ensure(name)
 
+    def makeComponentSelection(self, distroseries=None, component=None):
+        """Make a new `ComponentSelection`.
+
+        :param distroseries: Optional `DistroSeries`.  If none is given,
+            one will be created.
+        :param component: Optional `Component` or a component name.  If
+            none is given, one will be created.
+        """
+        if distroseries is None:
+            distroseries = self.makeDistroSeries()
+
+        if not IComponent.providedBy(component):
+            component = self.makeComponent(component)
+
+        return ComponentSelection(
+            distroseries=distroseries, component=component)
+
     def makeArchive(self, distribution=None, owner=None, name=None,
                     purpose=None, enabled=True, private=False,
                     virtualized=True, description=None, displayname=None):

=== modified file 'lib/lp/testing/tests/test_factory.py'
--- lib/lp/testing/tests/test_factory.py	2011-03-08 11:59:38 +0000
+++ lib/lp/testing/tests/test_factory.py	2011-04-07 15:19:36 +0000
@@ -52,6 +52,7 @@
     )
 from lp.soyuz.interfaces.queue import IPackageUpload
 from lp.soyuz.interfaces.sourcepackagerelease import ISourcePackageRelease
+from lp.soyuz.model.component import ComponentSelection
 from lp.testing import TestCaseWithFactory
 from lp.testing.factory import is_security_proxied_or_harmless
 from lp.testing.matchers import (
@@ -399,6 +400,28 @@
         distroseries = self.factory.makeDistroSeries()
         self.assertThat(distroseries.displayname, StartsWith("Distroseries"))
 
+    # makeComponentSelection
+    def test_makeComponentSelection_makes_ComponentSelection(self):
+        selection = self.factory.makeComponentSelection()
+        self.assertIsInstance(selection, ComponentSelection)
+
+    def test_makeComponentSelection_uses_distroseries(self):
+        distroseries = self.factory.makeDistroSeries()
+        selection = self.factory.makeComponentSelection(
+            distroseries=distroseries)
+        self.assertEqual(distroseries, selection.distroseries)
+
+    def test_makeComponentSelection_uses_component(self):
+        component = self.factory.makeComponent()
+        selection = self.factory.makeComponentSelection(component=component)
+        self.assertEqual(component, selection.component)
+
+    def test_makeComponentSelection_finds_component(self):
+        component = self.factory.makeComponent()
+        selection = self.factory.makeComponentSelection(
+            component=component.name)
+        self.assertEqual(component, selection.component)
+
     # makeLanguage
     def test_makeLanguage(self):
         # Without parameters, makeLanguage creates a language with code

=== modified file 'versions.cfg'