← Back to team overview

launchpad-reviewers team mailing list archive

[Merge] lp:~cjwatson/launchpad/remove-sync-source into lp:launchpad

 

Colin Watson has proposed merging lp:~cjwatson/launchpad/remove-sync-source into lp:launchpad.

Requested reviews:
  Launchpad code reviewers (launchpad-reviewers)
Related bugs:
  Bug #35723 in Launchpad itself: "sync-source should not connect as the 'ro' user"
  https://bugs.launchpad.net/launchpad/+bug/35723

For more details, see:
https://code.launchpad.net/~cjwatson/launchpad/remove-sync-source/+merge/88190

== Summary ==

sync-source.py has been widely considered a wart for ages, and the work of Julian and his team on Archive.copyPackage/Archive.copyPackages and friends effectively replaces it, in combination with the syncpackage tool in ubuntu-dev-tools.  I've just finished converting all the Ubuntu archive administration processes to stop using sync-source.py, so let's remove it.

== Proposed fix ==

Kill it with fire.

It's not a prerequisite as such, but it would be good if https://code.launchpad.net/~cjwatson/launchpad/archive-copy-packages-source-series/+merge/87942 were reviewed first; that fixes a serious problem I found when using the 'sync-source.py -a' replacement.

== Tests ==

bin/test -vvct soyuz.scripts

== Demo and Q/A ==

None.

== lint ==

None.
-- 
https://code.launchpad.net/~cjwatson/launchpad/remove-sync-source/+merge/88190
Your team Launchpad code reviewers is requested to review the proposed merge of lp:~cjwatson/launchpad/remove-sync-source into lp:launchpad.
=== modified file 'lib/lp/soyuz/scripts/ftpmaster.py'
--- lib/lp/soyuz/scripts/ftpmaster.py	2012-01-06 11:08:30 +0000
+++ lib/lp/soyuz/scripts/ftpmaster.py	2012-01-11 12:08:26 +0000
@@ -1,4 +1,4 @@
-# Copyright 2009-2011 Canonical Ltd.  This software is licensed under the
+# Copyright 2009-2012 Canonical Ltd.  This software is licensed under the
 # GNU Affero General Public License version 3 (see the file LICENSE).
 
 """FTPMaster utilities."""
@@ -13,26 +13,17 @@
     'ObsoleteDistroseries',
     'PackageRemover',
     'PubSourceChecker',
-    'SyncSource',
-    'SyncSourceError',
     ]
 
-import hashlib
 from itertools import chain
 import os
-import stat
-import sys
-import time
 
-from debian.deb822 import Changes
 from zope.component import getUtility
 
 from lp.app.errors import NotFoundError
-from lp.archiveuploader.utils import determine_source_file_type
 from lp.registry.interfaces.person import IPersonSet
 from lp.registry.interfaces.pocket import pocketsuffix
 from lp.registry.interfaces.series import SeriesStatus
-from lp.registry.interfaces.sourcepackage import SourcePackageFileType
 from lp.services.browser_helpers import get_plural_text
 from lp.services.database.constants import UTC_NOW
 from lp.services.helpers import filenameToContentType
@@ -405,138 +396,6 @@
         copy_and_close(pocket_chroot.chroot, local_file)
 
 
-class SyncSourceError(Exception):
-    """Raised when an critical error occurs inside SyncSource.
-
-    The entire procedure should be aborted in order to avoid unknown problems.
-    """
-
-
-class SyncSource:
-    """Sync Source procedure helper class.
-
-    It provides the backend for retrieving files from Librarian or the
-    'sync source' location. Also provides a method to check the downloaded
-    files integrity.
-    'aptMD5Sum' is provided as a classmethod during the integration time.
-    """
-
-    def __init__(self, files, origin, logger, downloader, todistro):
-        """Store local context.
-
-        files: a dictionary where the keys are the filename and the
-               value another dictionary with the file informations.
-        origin: a dictionary similar to 'files' but where the values
-                contain information for download files to be synchronized
-        logger: a logger
-        downloader: a callable that fetchs URLs,
-                    'downloader(url, destination)'
-        todistro: target distribution object
-        """
-        self.files = files
-        self.origin = origin
-        self.logger = logger
-        self.downloader = downloader
-        self.todistro = todistro
-
-    @classmethod
-    def generateMD5Sum(self, filename):
-        file_handle = open(filename)
-        md5sum = hashlib.md5(file_handle.read()).hexdigest()
-        file_handle.close()
-        return md5sum
-
-    def fetchFileFromLibrarian(self, filename):
-        """Fetch file from librarian.
-
-        Store the contents in local path with the original filename.
-        Return the fetched filename if it was present in Librarian or None
-        if it wasn't.
-        """
-        try:
-            libraryfilealias = self.todistro.main_archive.getFileByName(
-                filename)
-        except NotFoundError:
-            return None
-
-        self.logger.info(
-            "%s: already in distro - downloading from librarian" %
-            filename)
-
-        output_file = open(filename, 'w')
-        libraryfilealias.open()
-        copy_and_close(libraryfilealias, output_file)
-        return filename
-
-    def fetchLibrarianFiles(self):
-        """Try to fetch files from Librarian.
-
-        It raises SyncSourceError if anything else then an
-        orig tarball was found in Librarian.
-        Return the names of the files retrieved from the librarian.
-        """
-        retrieved = []
-        for filename in self.files.keys():
-            if not self.fetchFileFromLibrarian(filename):
-                continue
-            file_type = determine_source_file_type(filename)
-            # set the return code if an orig was, in fact,
-            # fetched from Librarian
-            orig_types = (
-                SourcePackageFileType.ORIG_TARBALL,
-                SourcePackageFileType.COMPONENT_ORIG_TARBALL)
-            if file_type not in orig_types:
-                raise SyncSourceError(
-                    'Oops, only orig tarball can be retrieved from '
-                    'librarian.')
-            retrieved.append(filename)
-
-        return retrieved
-
-    def fetchSyncFiles(self):
-        """Fetch files from the original sync source.
-
-        Return DSC filename, which should always come via this path.
-        """
-        dsc_filename = None
-        for filename in self.files.keys():
-            file_type = determine_source_file_type(filename)
-            if file_type == SourcePackageFileType.DSC:
-                dsc_filename = filename
-            if os.path.exists(filename):
-                self.logger.info("  - <%s: cached>" % (filename))
-                continue
-            self.logger.info(
-                "  - <%s: downloading from %s>" %
-                (filename, self.origin["url"]))
-            download_f = ("%s%s" % (self.origin["url"],
-                                    self.files[filename]["remote filename"]))
-            sys.stdout.flush()
-            self.downloader(download_f, filename)
-        return dsc_filename
-
-    def checkDownloadedFiles(self):
-        """Check md5sum and size match Source.
-
-        If anything fails SyncSourceError will be raised.
-        """
-        for filename in self.files.keys():
-            actual_md5sum = self.generateMD5Sum(filename)
-            expected_md5sum = self.files[filename]["md5sum"]
-            if actual_md5sum != expected_md5sum:
-                raise SyncSourceError(
-                    "%s: md5sum check failed (%s [actual] "
-                    "vs. %s [expected])."
-                    % (filename, actual_md5sum, expected_md5sum))
-
-            actual_size = os.stat(filename)[stat.ST_SIZE]
-            expected_size = int(self.files[filename]["size"])
-            if actual_size != expected_size:
-                raise SyncSourceError(
-                    "%s: size mismatch (%s [actual] vs. %s [expected])."
-                    % (filename, actual_size, expected_size))
-
-
 class LpQueryDistro(LaunchpadScript):
     """Main class for scripts/ftpmaster-tools/lp-query-distro.py."""
 
@@ -1007,56 +866,3 @@
             # Collect extra debug messages from chroot_manager.
             for debug_message in chroot_manager._messages:
                 self.logger.debug(debug_message)
-
-
-def generate_changes(dsc, dsc_files, suite, changelog, urgency, closes,
-                     lp_closes, section, priority, description,
-                     files_from_librarian, requested_by, origin):
-    """Generate a Changes object.
-
-    :param dsc: A `Dsc` instance for the related source package.
-    :param suite: Distribution name
-    :param changelog: Relevant changelog data
-    :param urgency: Urgency string (low, medium, high, etc)
-    :param closes: Sequence of Debian bug numbers (as strings) fixed by
-        this upload.
-    :param section: Debian section
-    :param priority: Package priority
-    """
-
-    # XXX cprov 2007-07-03:
-    # Changed-By can be extracted from most-recent changelog footer,
-    # but do we care?
-
-    changes = Changes()
-    changes["Origin"] = "%s/%s" % (origin["name"], origin["suite"])
-    changes["Format"] = "1.7"
-    changes["Date"] = time.strftime("%a,  %d %b %Y %H:%M:%S %z")
-    changes["Source"] = dsc["source"]
-    changes["Binary"] = dsc["binary"]
-    changes["Architecture"] = "source"
-    changes["Version"] = dsc["version"]
-    changes["Distribution"] = suite
-    changes["Urgency"] = urgency
-    changes["Maintainer"] = dsc["maintainer"]
-    changes["Changed-By"] = requested_by
-    if description:
-        changes["Description"] = "\n %s" % description
-    if closes:
-        changes["Closes"] = " ".join(closes)
-    if lp_closes:
-        changes["Launchpad-bugs-fixed"] = " ".join(lp_closes)
-    files = []
-    for filename in dsc_files:
-        if filename in files_from_librarian:
-            continue
-        files.append({"md5sum": dsc_files[filename]["md5sum"],
-                      "size": dsc_files[filename]["size"],
-                      "section": section,
-                      "priority": priority,
-                      "name": filename,
-                     })
-
-    changes["Files"] = files
-    changes["Changes"] = "\n%s" % changelog
-    return changes

=== removed file 'lib/lp/soyuz/scripts/tests/test_sync_source.py'
--- lib/lp/soyuz/scripts/tests/test_sync_source.py	2011-12-30 01:48:17 +0000
+++ lib/lp/soyuz/scripts/tests/test_sync_source.py	1970-01-01 00:00:00 +0000
@@ -1,552 +0,0 @@
-# Copyright 2009-2010 Canonical Ltd.  This software is licensed under the
-# GNU Affero General Public License version 3 (see the file LICENSE).
-
-"""SyncSource facilities tests."""
-
-__metaclass__ = type
-
-import os
-import shutil
-import subprocess
-import sys
-import tempfile
-
-from debian.deb822 import (
-    Changes,
-    Deb822Dict,
-    Dsc,
-    )
-import transaction
-from zope.component import getUtility
-
-from lp.archiveuploader.tagfiles import parse_tagfile
-from lp.registry.interfaces.distribution import IDistributionSet
-from lp.services.config import config
-from lp.services.librarianserver.testing.server import fillLibrarianFile
-from lp.services.log.logger import BufferLogger
-from lp.soyuz.scripts.ftpmaster import (
-    generate_changes,
-    SyncSource,
-    SyncSourceError,
-    )
-from lp.testing import (
-    TestCase,
-    TestCaseWithFactory,
-    )
-from lp.testing.layers import (
-    LaunchpadZopelessLayer,
-    LibrarianLayer,
-    )
-
-
-class TestSyncSource(TestCaseWithFactory):
-    layer = LaunchpadZopelessLayer
-    dbuser = 'ro'
-
-    def setUp(self):
-        """Create contents in disk for librarian sampledata.
-
-        Setup and chdir into a temp directory, a jail, where we can
-        control the file creation properly
-        """
-        super(TestSyncSource, self).setUp()
-        fillLibrarianFile(1, content='one')
-        fillLibrarianFile(2, content='two')
-        fillLibrarianFile(54, content='fifty-four')
-        self._home = os.path.abspath('')
-        self._jail = tempfile.mkdtemp()
-        os.chdir(self._jail)
-        self.logger = BufferLogger()
-        self.downloads = []
-
-    def tearDown(self):
-        """Remove test contents from disk.
-
-        chdir back to the previous path (home) and remove the temp
-        directory used as jail.
-        """
-        super(TestSyncSource, self).tearDown()
-        os.chdir(self._home)
-        LibrarianLayer.librarian_fixture.clear()
-        shutil.rmtree(self._jail)
-
-    def _listFiles(self):
-        """Return a list of files present in jail."""
-        return os.listdir(self._jail)
-
-    def get_messages(self):
-        """Retrieve the messages sent using the logger."""
-        return self.logger.getLogBuffer().splitlines()
-
-    def local_downloader(self, url, filename):
-        """Store download requests for future inspections."""
-        self.downloads.append((url, filename))
-        output = open(filename, 'w')
-        output.write('Slartibartfast')
-        output.close()
-
-    def _getSyncSource(self, files, origin):
-        """Return a SyncSource instance with the given parameters
-
-        Uses the local_* methods to capture results so we can verify
-        them later.
-        """
-        sync_source = SyncSource(
-            files=files, origin=origin, logger=self.logger,
-            downloader=self.local_downloader,
-            todistro=getUtility(IDistributionSet)['ubuntu'])
-        return sync_source
-
-    def testInstantiate(self):
-        """Check if SyncSource can be instantiated."""
-        files = {'foobar': {'size': 1}}
-        origin = {'foobar': {'remote-location': 'nowhere'}}
-
-        sync_source = self._getSyncSource(files, origin)
-
-        self.assertEqual(sync_source.files, files)
-        self.assertEqual(sync_source.origin, origin)
-
-        sync_source.logger.debug('opa')
-        self.assertEqual(self.get_messages(), ['DEBUG opa'])
-
-        sync_source.downloader('somewhere', 'foo')
-        self.assertEqual(self.downloads, [('somewhere', 'foo')])
-        self.assertEqual(self._listFiles(), ['foo'])
-        self.assertEqual(open('foo').read(), 'Slartibartfast')
-
-    def testCheckDownloadedFilesOK(self):
-        """Check if checkDownloadFiles really verifies the filesystem
-
-        Pass parameters via 'files' (MD5 & size) that match the file created
-        on disk.
-        """
-        files = {
-            'foo': {'md5sum': 'dd21ab16f950f7ac4f9c78ef1498eee1', 'size': 15},
-            }
-        origin = {}
-        sync_source = self._getSyncSource(files, origin)
-
-        test_file = open('foo', 'w')
-        test_file.write('abcdefghijlmnop')
-        test_file.close()
-
-        sync_source.checkDownloadedFiles()
-
-    def testCheckDownloadedFilesWrongMD5(self):
-        """Expect SyncSourceError to be raised due the wrong MD5."""
-        files = {
-            'foo': {'md5sum': 'duhhhhh', 'size': 15},
-            }
-        origin = {}
-        sync_source = self._getSyncSource(files, origin)
-
-        test_file = open('foo', 'w')
-        test_file.write('abcdefghijlmnop')
-        test_file.close()
-
-        self.assertRaises(
-            SyncSourceError,
-            sync_source.checkDownloadedFiles)
-
-    def testCheckDownloadedFilesWrongSize(self):
-        """Expect SyncSourceError to be raised due the wrong size."""
-        files = {
-            'foo': {'md5sum': 'dd21ab16f950f7ac4f9c78ef1498eee1', 'size': 10},
-            }
-        origin = {}
-        sync_source = self._getSyncSource(files, origin)
-
-        test_file = open('foo', 'w')
-        test_file.write('abcdefghijlmnop')
-        test_file.close()
-
-        self.assertRaises(
-            SyncSourceError,
-            sync_source.checkDownloadedFiles)
-
-    def testSyncSourceMD5Sum(self):
-        """Probe the classmethod provided by SyncSource."""
-        test_file = open('foo', 'w')
-        test_file.write('abcdefghijlmnop')
-        test_file.close()
-        md5 = SyncSource.generateMD5Sum('foo')
-        self.assertEqual(md5, 'dd21ab16f950f7ac4f9c78ef1498eee1')
-
-    def testFetchSyncFiles(self):
-        """Probe fetchSyncFiles.
-
-        It only downloads the files not present in current path, so the
-        test_file is skipped.
-        """
-        files = {
-            'foo_0.1.diff.gz': {'remote filename': 'xxx'},
-            'foo_0.1.dsc': {'remote filename': 'yyy'},
-            'foo_0.1.orig.gz': {'remote filename': 'zzz'},
-            }
-        origin = {'url': 'http://somewhere/'}
-
-        sync_source = self._getSyncSource(files, origin)
-
-        test_file = open('foo_0.1.diff.gz', 'w')
-        test_file.write('nahhh')
-        test_file.close()
-
-        dsc_filename = sync_source.fetchSyncFiles()
-
-        self.assertEqual(dsc_filename, 'foo_0.1.dsc')
-
-        self.assertEqual(
-            self.downloads,
-            [('http://somewhere/zzz', 'foo_0.1.orig.gz'),
-             ('http://somewhere/yyy', 'foo_0.1.dsc')])
-
-        for filename in files.keys():
-            self.assertTrue(os.path.exists(filename))
-
-    def testFetchLibrarianFilesOK(self):
-        """Probe fetchLibrarianFiles.
-
-        Seek on files published from librarian and download matching
-        filenames.
-        """
-        files = {
-            'netapplet_1.0.0.orig.tar.gz': {},
-            'netapplet_1.0.1.dsc': {},
-            'netapplet_1.0.1.diff.gz': {},
-            }
-        origin = {}
-        sync_source = self._getSyncSource(files, origin)
-
-        librarian_files = sync_source.fetchLibrarianFiles()
-
-        self.assertEqual(librarian_files, ['netapplet_1.0.0.orig.tar.gz'])
-        self.assertEqual(self._listFiles(), ['netapplet_1.0.0.orig.tar.gz'])
-        self.assertEqual(
-            self.get_messages(),
-            ['INFO netapplet_1.0.0.orig.tar.gz: already in distro '
-             '- downloading from librarian'])
-
-    def testFetchLibrarianFilesGotDuplicatedDSC(self):
-        """fetchLibrarianFiles fails for an already present version.
-
-        It raises SyncSourceError when it find a DSC or DIFF already
-        published, it means that the upload version is duplicated.
-        """
-        spr = self.factory.makeSourcePackageRelease()
-        lfa = self.factory.makeLibraryFileAlias(filename='foobar_1.0.dsc')
-        self.factory.makeSourcePackageReleaseFile(
-            sourcepackagerelease=spr, library_file=lfa)
-        self.factory.makeSourcePackagePublishingHistory(
-            archive=getUtility(IDistributionSet)['ubuntu'].main_archive,
-            sourcepackagerelease=spr)
-        transaction.commit()
-
-        files = {
-            'foobar_1.0.orig.tar.gz': {},
-            'foobar_1.0.dsc': {},
-            'foobar_1.0.diff.gz': {},
-            }
-        origin = {}
-        sync_source = self._getSyncSource(files, origin)
-
-        self.assertRaises(
-            SyncSourceError,
-            sync_source.fetchLibrarianFiles)
-
-        self.assertEqual(
-            self.get_messages(),
-            ['INFO foobar_1.0.dsc: already in distro '
-             '- downloading from librarian'])
-        self.assertEqual(self._listFiles(), ['foobar_1.0.dsc'])
-
-
-class TestSyncSourceScript(TestCase):
-    layer = LaunchpadZopelessLayer
-    dbuser = 'ro'
-
-    def setUp(self):
-        super(TestSyncSourceScript, self).setUp()
-        self._home = os.getcwd()
-        self._jail = os.path.join(
-            os.path.dirname(__file__), 'sync_source_home')
-        os.chdir(self._jail)
-
-    def tearDown(self):
-        """'chdir' back to the previous path (home)."""
-        super(TestSyncSourceScript, self).tearDown()
-        os.chdir(self._home)
-
-    def runSyncSource(self, extra_args=None):
-        """Run sync-source.py, returning the result and output.
-
-        Returns a tuple of the process's return code, stdout output and
-        stderr output.
-        """
-        if extra_args is None:
-            extra_args = []
-        script = os.path.join(
-            config.root, "scripts", "ftpmaster-tools", "sync-source.py")
-        args = [sys.executable, script]
-        args.extend(extra_args)
-        process = subprocess.Popen(
-            args, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
-        stdout, stderr = process.communicate()
-        return (process.returncode, stdout, stderr)
-
-    def testSyncSourceRunV1(self):
-        """Try a simple sync-source.py run.
-
-        It will run in a special tree prepared to cope with sync-source
-        requirements (see `setUp`). It contains a usable archive index
-        named as '$distribution_$suite_$component_Sources' and the
-        'etherwake' source files.
-
-        Check that:
-         * return code is ZERO,
-         * check standard error and standard output,
-         * check if the expected changesfile was generated,
-         * parse and inspect the changesfile using the archiveuploader
-           component (the same approach adopted by Soyuz).
-         * delete the changesfile.
-        """
-        returncode, out, err = self.runSyncSource(
-            extra_args=['-b', 'cprov', '-D', 'debian', '-C', 'main',
-                        '-S', 'incoming', 'bar'])
-
-        self.assertEqual(
-            0, returncode, "\nScript Failed:%s\nStdout:\n%s\nStderr\n%s\n"
-            % (returncode, out, err))
-
-        self.assertEqual(
-            err.splitlines(),
-            ['INFO    Creating lockfile: '
-             '/var/lock/launchpad-sync-source.lock',
-             'WARNING Could not find blacklist file on '
-             '/srv/launchpad.net/dak/sync-blacklist.txt',
-             'INFO      - <bar_1.0-1.diff.gz: cached>',
-             'INFO      - <bar_1.0.orig.tar.gz: cached>',
-             'INFO      - <bar_1.0-1.dsc: cached>',
-             ])
-        self.assertEqual(
-            out.splitlines(),
-            ['Getting binaries for hoary...',
-             '[Updating] bar (None [Ubuntu] < 1.0-1 [Debian])',
-             ' * Trying to add bar...',
-             ])
-
-        expected_changesfile = 'bar_1.0-1_source.changes'
-        self.assertTrue(
-            os.path.exists(expected_changesfile),
-            "Couldn't find %s." % expected_changesfile)
-
-        # Parse the generated unsigned changesfile.
-        parsed_changes = parse_tagfile(expected_changesfile)
-
-        # It refers to the right source/version.
-        self.assertEqual(parsed_changes['Source'], 'bar')
-        self.assertEqual(parsed_changes['Version'], '1.0-1')
-
-        # It includes the correct 'origin' and 'target' information.
-        self.assertEqual(parsed_changes['Origin'], 'Debian/incoming')
-        self.assertEqual(parsed_changes['Distribution'], 'hoary')
-
-        # 'closes' and 'launchpad-bug-fixed' are filled according to
-        # what is listed in the debian/changelog.
-        self.assertEqual(parsed_changes['Closes'], '1 2 1234 4321')
-        self.assertEqual(parsed_changes['Launchpad-bugs-fixed'], '1234 4321')
-
-        # And finally, 'maintainer' role was preserved and 'changed-by'
-        # role was assigned as specified in the sync-source command-line.
-        self.assertEqual(
-            parsed_changes['Maintainer'],
-            'Launchpad team <launchpad@xxxxxxxxxxxxxxxxxxx>')
-        self.assertEqual(
-            parsed_changes['Changed-By'],
-            'Celso Providelo <celso.providelo@xxxxxxxxxxxxx>')
-
-        os.unlink(expected_changesfile)
-
-    def testSyncSourceRunV3(self):
-        """Try a simple sync-source.py run with a version 3 source format
-        package.
-
-        It will run in a special tree prepared to cope with sync-source
-        requirements (see `setUp`). It contains a usable archive index
-        named as '$distribution_$suite_$component_Sources' and the
-        'etherwake' source files.
-
-        Check that:
-         * return code is ZERO,
-         * check standard error and standard output,
-         * check if the expected changesfile was generated,
-         * parse and inspect the changesfile using the archiveuploader
-           component (the same approach adopted by Soyuz).
-         * delete the changesfile.
-        """
-        returncode, out, err = self.runSyncSource(
-            extra_args=['-b', 'cprov', '-D', 'debian', '-C', 'main',
-                        '-S', 'incoming', 'sample1'])
-
-        self.assertEqual(
-            0, returncode, "\nScript Failed:%s\nStdout:\n%s\nStderr\n%s\n"
-            % (returncode, out, err))
-
-        self.assertEqual(
-            err.splitlines(),
-            ['INFO    Creating lockfile: '
-             '/var/lock/launchpad-sync-source.lock',
-             'WARNING Could not find blacklist file on '
-             '/srv/launchpad.net/dak/sync-blacklist.txt',
-             'INFO      - <sample1_1.0.orig-component3.tar.gz: cached>',
-             'INFO      - <sample1_1.0-1.dsc: cached>',
-             'INFO      - <sample1_1.0-1.debian.tar.gz: cached>',
-             'INFO      - <sample1_1.0.orig-component1.tar.bz2: cached>',
-             'INFO      - <sample1_1.0.orig-component2.tar.lzma: cached>',
-             'INFO      - <sample1_1.0.orig.tar.gz: cached>'])
-        self.assertEqual(
-            out.splitlines(),
-            ['Getting binaries for hoary...',
-             '[Updating] sample1 (None [Ubuntu] < 1.0-1 [Debian])',
-             ' * Trying to add sample1...',
-             ])
-
-        expected_changesfile = 'sample1_1.0-1_source.changes'
-        self.assertTrue(
-            os.path.exists(expected_changesfile),
-            "Couldn't find %s." % expected_changesfile)
-
-        # Parse the generated unsigned changesfile.
-        parsed_changes = parse_tagfile(expected_changesfile)
-
-        # It refers to the right source/version.
-        self.assertEqual(parsed_changes['Source'], 'sample1')
-        self.assertEqual(parsed_changes['Version'], '1.0-1')
-
-        # It includes the correct 'origin' and 'target' information.
-        self.assertEqual(parsed_changes['Origin'], 'Debian/incoming')
-        self.assertEqual(parsed_changes['Distribution'], 'hoary')
-
-        # And finally, 'maintainer' role was preserved and 'changed-by'
-        # role was assigned as specified in the sync-source command-line.
-        self.assertEqual(
-            parsed_changes['Maintainer'],
-            'Raphael Hertzog <hertzog@xxxxxxxxxx>')
-        self.assertEqual(
-            parsed_changes['Changed-By'],
-            'Celso Providelo <celso.providelo@xxxxxxxxxxxxx>')
-
-        os.unlink(expected_changesfile)
-
-
-class TestGenerateChanges(TestCase):
-    """Test generate_changes()."""
-
-    def getBaseDsc(self):
-        """Create a basic Dsc object for use with generate_changes()."""
-        dsc = Dsc()
-        dsc["source"] = "mysrcpkg"
-        dsc["binary"] = "mybinpkg"
-        dsc["version"] = "4.2"
-        dsc["maintainer"] = "Maintainer <maintainer@xxxxxxxxxx>"
-        return dsc
-
-    def getBaseOrigin(self):
-        """Create a basic Origin dict for use with generate_changes()."""
-        origin = Deb822Dict()
-        origin["Name"] = "Debian"
-        origin["Suite"] = "sid"
-        return origin
-
-    def generateChanges(self, dsc=None, dsc_files=None, suite="maverick",
-                        changelog=None, urgency="low", closes=None,
-                        lp_closes=None, section="net", priority="extra",
-                        description=None, files_from_librarian=[],
-                        requested_by="Somebody <somebody@xxxxxxxxxx>",
-                        origin=None):
-        if dsc is None:
-            dsc = self.getBaseDsc()
-        if dsc_files is None:
-            dsc_files = []
-        if origin is None:
-            origin = self.getBaseOrigin()
-        if changelog is None:
-            changelog = 'changelog entry'
-        return generate_changes(
-            dsc=dsc, dsc_files=dsc_files, suite=suite, changelog=changelog,
-            urgency=urgency, closes=closes, lp_closes=lp_closes,
-            section=section, priority=priority, description=description,
-            files_from_librarian=files_from_librarian,
-            requested_by=requested_by, origin=origin)
-
-    def test_minimum_fields(self):
-        # The right (minimum) set of fields are set by generate_changes().
-        changes = self.generateChanges()
-        self.assertEquals("1.7", changes["Format"])
-        self.assertEquals("mysrcpkg", changes["Source"])
-        self.assertEquals("mybinpkg", changes["Binary"])
-        self.assertEquals("source", changes["Architecture"])
-        self.assertEquals("4.2", changes["Version"])
-        self.assertEquals("maverick", changes["Distribution"])
-        self.assertEquals("low", changes["Urgency"])
-        self.assertEquals("\nchangelog entry", changes["Changes"])
-        self.assertEquals(
-            "Maintainer <maintainer@xxxxxxxxxx>", changes["Maintainer"])
-        self.assertNotIn("Description", changes)
-        self.assertNotIn("Closes", changes)
-        self.assertNotIn("Launchpad-bugs-fixed", changes)
-        self.assertEquals([], changes["Files"])
-
-    def test_closes(self):
-        # Closes gets set if any Debian bugs to close were specified.
-        changes = self.generateChanges(closes=["1232", "4323"])
-        self.assertEquals("1232 4323", changes["Closes"])
-        self.assertNotIn("Launchpad-bugs-fixed", changes)
-
-    def test_binary_newline(self):
-        # If the Dsc Binary: line contains newlines those are properly
-        # formatted in the new changes file.
-        dsc = self.getBaseDsc()
-        dsc["Binary"] = "binary1\n binary2 \n binary3"
-        changes = self.generateChanges(dsc=dsc)
-        self.assertEquals("binary1\n binary2 \n binary3", changes["Binary"])
-
-    def test_lp_closes(self):
-        # Launchpad-Bugs-Fixed gets set if any Launchpad bugs to close were
-        # specified.
-        changes = self.generateChanges(lp_closes=["987987"])
-        self.assertEquals("987987", changes["Launchpad-Bugs-Fixed"])
-
-    def test_utf8_changelog(self):
-        # A changelog entry with non-ASCII UTF-8 characters is serialized in
-        # Changes properly.
-        changes = self.generateChanges(
-            changelog="* Updated French translation by J\xc3\xa9lmer.")
-        contents = changes.dump(encoding="utf-8").encode("utf-8")
-        self.assertIn(
-            "Updated French translation by J\xc3\xa9lmer.", contents)
-
-    def test_changelog_whitelines(self):
-        # The changelog entry can contain empty lines, and this should not
-        # mess up the parsing of the changes file.
-        changelog = "* Foo\n\n\n* Bar\n.\nEntries"
-        changes = self.generateChanges(changelog=changelog)
-        contents = changes.dump(encoding="utf-8").encode("utf-8")
-        # Read contents back
-        read_changes = Changes(contents)
-        self.assertEquals("\n%s" % changelog, changes['Changes'])
-        self.assertContentEqual([
-            'Architecture',
-            'Binary',
-            'Changed-By',
-            'Changes',
-            'Date',
-            'Distribution',
-            'Files',
-            'Format',
-            'Maintainer',
-            'Origin',
-            'Source',
-            'Urgency',
-            'Version',
-            ], read_changes.keys())

=== removed file 'scripts/ftpmaster-tools/_syncorigins.py'
--- scripts/ftpmaster-tools/_syncorigins.py	2011-10-07 17:44:22 +0000
+++ scripts/ftpmaster-tools/_syncorigins.py	1970-01-01 00:00:00 +0000
@@ -1,680 +0,0 @@
-#!/usr/bin/env python
-#
-# Copyright 2009 Canonical Ltd.  This software is licensed under the
-# GNU Affero General Public License version 3 (see the file LICENSE).
-
-"""Origins dictionary containing all mirrors used for sync-source.py."""
-
-__all__ = ['origins']
-
-origins = {
-
-"debian": {
-    "name": "Debian",
-    "url": "http://ftp.debian.org/debian/";,
-    "default suite": "testing",
-    "default component": "main",
-    "dsc": "must be signed and valid"
-    },
-
-"security": {
-    "name": "Security",
-    "url": "http://security.debian.org/debian-security/";,
-    "default suite": "etch/updates",
-    "default component": "main",
-    "dsc": "must be signed and valid"
-    },
-
-"incoming": {
-    "name": "Debian",
-    "url": "http://incoming.debian.org/";,
-    "default suite": "incoming",
-    "default component": "main",
-    "dsc": "must be signed and valid"
-    },
-
-"blackdown": {
-    "name": "Blackdown",
-    "url": "http://ftp.gwdg.de/pub/languages/java/linux/debian/";,
-    "default suite": "unstable",
-    "default component": "non-free",
-    "dsc": "must be signed and valid"
-    },
-
-"marillat": {
-    "name": "Marillat",
-    "url": "ftp://ftp.nerim.net/debian-marillat/";,
-    "default suite": "unstable",
-    "default component": "main",
-    "dsc": "can be unsigned"
-    },
-
-"mythtv": {
-    "name": "MythTV",
-    "url": "http://dijkstra.csh.rit.edu/~mdz/debian/";,
-    "default suite": "unstable",
-    "default component": "mythtv",
-    "dsc": "can be unsigned"
-    },
-
-"xfce": {
-    "name": "XFCE",
-    "url": "http://www.os-works.com/debian/";,
-    "default suite": "testing",
-    "default component": "main",
-    "dsc": "must be signed and valid"
-    },
-
-####################################
-
-"apt.logreport.org-pub-debian": {
-    "name": "apt.logreport.org-pub-debian",
-    "url": "http://apt.logreport.org/pub/debian/";,
-    "default suite": "local",
-    "default component": "contrib",
-    "dsc": "can be unsigned"
-    },
-
-"apt.pgpackages.org-debian": {
-    "name": "apt.pgpackages.org-debian",
-    "url": "http://apt.pgpackages.org/debian/";,
-    "default suite": "sid",
-    "default component": "non-free",
-    "dsc": "can be unsigned"
-    },
-
-"arda.lt-p.net-debian": {
-    "name": "arda.LT-P.net-debian",
-    "url": "http://arda.LT-P.net/debian/";,
-    "default suite": "",
-    "default component": "",
-    "dsc": "can be unsigned"
-    },
-
-"colo.khms.westfalen.de-pakete": {
-    "name": "colo.khms.westfalen.de-Pakete",
-    "url": "http://colo.khms.westfalen.de/Pakete/";,
-    "default suite": "unstable",
-    "default component": "",
-    "dsc": "can be unsigned"
-    },
-
-"debian.speedblue.org": {
-    "name": "debian.speedblue.org",
-    "url": "http://debian.speedblue.org/";,
-    "default suite": "",
-    "default component": "",
-    "dsc": "can be unsigned"
-    },
-
-"dl.gna.org-kazehakase": {
-    "name": "dl.gna.org-kazehakase",
-    "url": "http://dl.gna.org/kazehakase/";,
-    "default suite": "debian",
-    "default component": "",
-    "dsc": "can be unsigned"
-    },
-
-"elonen.iki.fi-code-unofficial-debs": {
-    "name": "elonen.iki.fi-code-unofficial-debs",
-    "url": "http://elonen.iki.fi/code/unofficial-debs/";,
-    "default suite": "",
-    "default component": "",
-    "dsc": "can be unsigned"
-    },
-
-"erlug.linux.it-%7eda-deb": {
-    "name": "erlug.linux.it-%7Eda-deb",
-    "url": "http://erlug.linux.it/~da/deb/";,
-    "default suite": "",
-    "default component": "",
-    "dsc": "can be unsigned"
-    },
-
-"ftp.arege.jp-debian-arege": {
-    "name": "ftp.arege.jp-debian-arege",
-    "url": "http://ftp.arege.jp/debian-arege/";,
-    "default suite": "sid",
-    "default component": "ALL",
-    "dsc": "can be unsigned"
-    },
-
-"instantafs.cbs.mpg.de-instantafs-sid": {
-    "name": "instantafs.cbs.mpg.de-instantafs-sid",
-    "url": "ftp://instantafs.cbs.mpg.de/instantafs/sid/";,
-    "default suite": "",
-    "default component": "",
-    "dsc": "can be unsigned"
-    },
-
-"jeroen.coekaerts.be-debian": {
-    "name": "jeroen.coekaerts.be-debian",
-    "url": "http://jeroen.coekaerts.be/debian/";,
-    "default suite": "unstable",
-    "default component": "non-free",
-    "dsc": "can be unsigned"
-    },
-
-"laylward.com-debian": {
-    "name": "laylward.com-debian",
-    "url": "http://laylward.com/debian/";,
-    "default suite": "unstable",
-    "default component": "",
-    "dsc": "can be unsigned"
-    },
-
-"mherrn.de-debian": {
-    "name": "mherrn.de-debian",
-    "url": "http://mherrn.de/debian/";,
-    "default suite": "sid",
-    "default component": "exim",
-    "dsc": "can be unsigned"
-    },
-
-"mulk.dyndns.org-apt": {
-    "name": "mulk.dyndns.org-apt",
-    "url": "http://mulk.dyndns.org/apt/";,
-    "default suite": "unstable",
-    "default component": "main",
-    "dsc": "can be unsigned"
-    },
-
-"opensource.polytechnique.org-debian": {
-    "name": "opensource.polytechnique.org-debian",
-    "url": "http://opensource.polytechnique.org/debian/";,
-    "default suite": "",
-    "default component": "",
-    "dsc": "can be unsigned"
-    },
-
-"people.debian.org-%7eamaya-debian": {
-    "name": "people.debian.org-%7Eamaya-debian",
-    "url": "http://people.debian.org/~amaya/debian/";,
-    "default suite": "",
-    "default component": "",
-    "dsc": "can be unsigned"
-    },
-
-"people.debian.org-%7ecostela-debian": {
-    "name": "people.debian.org-%7Ecostela-debian",
-    "url": "http://people.debian.org/~costela/debian/";,
-    "default suite": "",
-    "default component": "",
-    "dsc": "can be unsigned"
-    },
-
-"people.debian.org-%7ercardenes": {
-    "name": "people.debian.org-%7Ercardenes",
-    "url": "http://people.debian.org/~rcardenes/";,
-    "default suite": "sid",
-    "default component": "main",
-    "dsc": "can be unsigned"
-    },
-
-"piem.homeip.net-%7epiem-debian": {
-    "name": "piem.homeip.net-%7Epiem-debian",
-    "url": "http://piem.homeip.net/~piem/debian/";,
-    "default suite": "source",
-    "default component": "",
-    "dsc": "can be unsigned"
-    },
-
-"progn.org-debian": {
-    "name": "progn.org-debian",
-    "url": "ftp://progn.org/debian/";,
-    "default suite": "unstable",
-    "default component": "main",
-    "dsc": "can be unsigned"
-    },
-
-"ressukka.net-%7eressu-deb": {
-    "name": "ressukka.net-%7Eressu-deb",
-    "url": "http://ressukka.net/~ressu/deb/";,
-    "default suite": "unstable",
-    "default component": "",
-    "dsc": "can be unsigned"
-    },
-
-"sadleder.de-debian": {
-    "name": "sadleder.de-debian",
-    "url": "http://sadleder.de/debian/";,
-    "default suite": "",
-    "default component": "",
-    "dsc": "can be unsigned"
-    },
-
-"security.dsi.unimi.it-%7elorenzo-debian": {
-    "name": "security.dsi.unimi.it-%7Elorenzo-debian",
-    "url": "http://security.dsi.unimi.it/~lorenzo/debian/";,
-    "default suite": "",
-    "default component": "",
-    "dsc": "can be unsigned"
-    },
-
-"src.braincells.com-debian": {
-    "name": "src.braincells.com-debian",
-    "url": "http://src.braincells.com/debian/";,
-    "default suite": "sid",
-    "default component": "",
-    "dsc": "can be unsigned"
-    },
-
-"themind.altervista.org-debian": {
-    "name": "themind.altervista.org-debian",
-    "url": "http://themind.altervista.org/debian/";,
-    "default suite": "unstable",
-    "default component": "main",
-    "dsc": "can be unsigned"
-    },
-
-"www.cps-project.org-debian-unstable": {
-    "name": "www.cps-project.org-debian-unstable",
-    "url": "http://www.cps-project.org/debian/unstable/";,
-    "default suite": "",
-    "default component": "",
-    "dsc": "can be unsigned"
-    },
-
-"www.gwhere.org-download-debian": {
-    "name": "www.gwhere.org-download-debian",
-    "url": "http://www.gwhere.org/download/debian/";,
-    "default suite": "unstable",
-    "default component": "main",
-    "dsc": "can be unsigned"
-    },
-
-"www.knizefamily.net-russ-software-debian": {
-    "name": "www.knizefamily.net-russ-software-debian",
-    "url": "http://www.knizefamily.net/russ/software/debian/";,
-    "default suite": "",
-    "default component": "",
-    "dsc": "can be unsigned"
-    },
-
-"www.litux.org-debian": {
-    "name": "www.litux.org-debian",
-    "url": "http://www.litux.org/debian/";,
-    "default suite": "unstable",
-    "default component": "",
-    "dsc": "can be unsigned"
-    },
-
-"www.steve.org.uk-apt": {
-    "name": "www.steve.org.uk-apt",
-    "url": "http://www.steve.org.uk/apt/";,
-    "default suite": "",
-    "default component": "",
-    "dsc": "can be unsigned"
-    },
-
-"www.stuff.demon.co.uk-apt": {
-    "name": "www.stuff.demon.co.uk-apt",
-    "url": "http://www.stuff.demon.co.uk/apt/";,
-    "default suite": "source",
-    "default component": "",
-    "dsc": "can be unsigned"
-    },
-
-"www.thomas-alfeld.de-frank-download-debian": {
-    "name": "www.thomas-alfeld.de-frank-download-debian",
-    "url": "http://www.thomas-alfeld.de/frank/download/debian/";,
-    "default suite": "",
-    "default component": "",
-    "dsc": "can be unsigned"
-    },
-
-"www.zero-based.org-debian": {
-    "name": "www.zero-based.org-debian",
-    "url": "http://www.zero-based.org/debian/";,
-    "default suite": "packagessource",
-    "default component": "",
-    "dsc": "can be unsigned"
-    },
-
-"kitenet.net-%7ejoey-debian": {
-    "name": "kitenet.net-%7Ejoey-debian",
-    "url": "http://kitenet.net/~joey/debian/";,
-    "default suite": "unstable",
-    "default component": "",
-    "dsc": "can be unsigned"
-    },
-
-"www.roughtrade.net-debian": {
-    "name": "www.roughtrade.net-debian",
-    "url": "http://www.roughtrade.net/debian/";,
-    "default suite": "sid",
-    "default component": "main",
-    "dsc": "can be unsigned"
-    },
-
-"telepathy.freedesktop.org-debian": {
-    "name": "telepathy.freedesktop.org-debian",
-    "url": "http://telepathy.freedesktop.org/debian/";,
-    "default suite": "sid",
-    "default component": "",
-    "dsc": "can be unsigned"
-    },
-
-####################################
-
-"ftp.mowgli.ch-pub-debian": {
-    "name": "ftp.mowgli.ch-pub-debian",
-    "url": "ftp://ftp.mowgli.ch/pub/debian/";,
-    "default suite": "sid",
-    "default component": "unofficial",
-    "dsc": "can be unsigned"
-    },
-
-"http.debian.or.jp-debian-jp": {
-    "name": "http.debian.or.jp-debian-jp",
-    "url": "http://http.debian.or.jp/debian-jp/";,
-    "default suite": "unstable-jp",
-    "default component": "non-free",
-    "dsc": "can be unsigned"
-    },
-
-"mywebpages.comcast.net-ddamian-deb": {
-    "name": "mywebpages.comcast.net-ddamian-deb",
-    "url": "http://mywebpages.comcast.net/ddamian/deb/";,
-    "default suite": "",
-    "default component": "",
-    "dsc": "can be unsigned"
-    },
-
-"people.debian.org-%7etora-deb": {
-    "name": "people.debian.org-%7Etora-deb",
-    "url": "http://people.debian.org/~tora/deb/";,
-    "default suite": "",
-    "default component": "",
-    "dsc": "can be unsigned"
-    },
-
-"silcnet.org-download-client-deb": {
-    "name": "silcnet.org-download-client-deb",
-    "url": "http://silcnet.org/download/client/deb/";,
-    "default suite": "",
-    "default component": "",
-    "dsc": "can be unsigned"
-    },
-
-"www.h.shuttle.de-mitch-stuff": {
-    "name": "www.h.shuttle.de-mitch-stuff",
-    "url": "http://www.h.shuttle.de/mitch/stuff/";,
-    "default suite": "",
-    "default component": "",
-    "dsc": "can be unsigned"
-    },
-
-"www.assist.media.nagoya-u.ac.jp-%7ekatsu-debian": {
-    "name": "www.assist.media.nagoya-u.ac.jp-%7Ekatsu-debian",
-    "url": "http://www.assist.media.nagoya-u.ac.jp/~katsu/debian/";,
-    "default suite": "unstable",
-    "default component": "ALL",
-    "dsc": "can be unsigned"
-    },
-
-"www.stud.tu-ilmenau.de-%7ethsc-in-debian": {
-    "name": "www.stud.tu-ilmenau.de-%7Ethsc-in-debian",
-    "url": "http://www.stud.tu-ilmenau.de/~thsc-in/debian/";,
-    "default suite": "unstable",
-    "default component": "main",
-    "dsc": "can be unsigned"
-    },
-
-"debian.hinterhof.net": {
-    "name": "debian.hinterhof.net",
-    "url": "http://debian.hinterhof.net/";,
-    "default suite": "unstable",
-    "default component": "",
-    "dsc": "can be unsigned"
-    },
-
-"home.planet.nl-%7eautar022": {
-    "name": "home.planet.nl-%7Eautar022",
-    "url": "http://home.planet.nl/~autar022/";,
-    "default suite": "",
-    "default component": "",
-    "dsc": "can be unsigned"
-    },
-
-"dept-info.labri.fr-%7edanjean-debian": {
-    "name": "dept-info.labri.fr-%7Edanjean-debian",
-    "url": "http://dept-info.labri.fr/~danjean/debian/";,
-    "default suite": "unstable",
-    "default component": "main",
-    "dsc": "can be unsigned"
-    },
-
-"noxa.de-%7esbeyer-debian": {
-    "name": "noxa.de-%7Esbeyer-debian",
-    "url": "http://noxa.de/~sbeyer/debian/";,
-    "default suite": "unstable",
-    "default component": "main",
-    "dsc": "can be unsigned"
-    },
-
-"debian.wgdd.de-debian": {
-    "name": "debian.wgdd.de-debian",
-    "url": "http://debian.wgdd.de/debian/";,
-    "default suite": "sid",
-    "default component": "non-free",
-    "dsc": "can be unsigned"
-    },
-
-"luca.pca.it-debian": {
-    "name": "luca.pca.it-debian",
-    "url": "http://luca.pca.it/debian/";,
-    "default suite": "",
-    "default component": "",
-    "dsc": "can be unsigned"
-    },
-
-"www.pcxperience.org-apt-debian": {
-    "name": "www.pcxperience.org-apt-debian",
-    "url": "http://www.pcxperience.org/apt/debian/";,
-    "default suite": "unstable",
-    "default component": "",
-    "dsc": "can be unsigned"
-    },
-
-"ftp.berlios.de-pub-gift-fasttrack": {
-    "name": "ftp.berlios.de-pub-gift-fasttrack",
-    "url": "ftp://ftp.berlios.de/pub/gift-fasttrack/";,
-    "default suite": "unstable",
-    "default component": "main",
-    "dsc": "can be unsigned"
-    },
-
-"www.webalice.it-hayarms-debian": {
-    "name": "www.webalice.it-hayarms-debian",
-    "url": "http://www.webalice.it/hayarms/debian/";,
-    "default suite": "unstable",
-    "default component": "non-free",
-    "dsc": "can be unsigned"
-    },
-
-"users.adelphia.net-%7edavid.everly": {
-    "name": "users.adelphia.net-%7Edavid.everly",
-    "url": "http://users.adelphia.net/~david.everly/";,
-    "default suite": "emilda/sarge",
-    "default component": "",
-    "dsc": "can be unsigned"
-    },
-
-"debian.thk-systems.de-debian": {
-    "name": "debian.thk-systems.de-debian",
-    "url": "http://debian.thk-systems.de/debian/";,
-    "default suite": "unstable",
-    "default component": "",
-    "dsc": "can be unsigned"
-    },
-
-"www.adebenham.com-debian": {
-    "name": "www.adebenham.com-debian",
-    "url": "http://www.adebenham.com/debian/";,
-    "default suite": "",
-    "default component": "",
-    "dsc": "can be unsigned"
-    },
-
-"eric.lavar.de-comp-linux-debian": {
-    "name": "eric.lavar.de-comp-linux-debian",
-    "url": "http://eric.lavar.de/comp/linux/debian/";,
-    "default suite": "experimental",
-    "default component": "",
-    "dsc": "can be unsigned"
-    },
-
-"einsteinmg.dyndns.org-debian": {
-    "name": "einsteinmg.dyndns.org-debian",
-    "url": "http://einsteinmg.dyndns.org/debian/";,
-    "default suite": "unstable",
-    "default component": "",
-    "dsc": "can be unsigned"
-    },
-
-"www.toastfreeware.priv.at-debian": {
-    "name": "www.toastfreeware.priv.at-debian",
-    "url": "http://www.toastfreeware.priv.at/debian/";,
-    "default suite": "unstable",
-    "default component": "",
-    "dsc": "can be unsigned"
-    },
-
-"www.riteh.hr-%7evedranf-debian-unstable": {
-    "name": "www.riteh.hr-%7Evedranf-debian-unstable",
-    "url": "http://www.riteh.hr/~vedranf/debian_unstable/";,
-    "default suite": "",
-    "default component": "",
-    "dsc": "can be unsigned"
-    },
-
-"ftp.unixdev.net-pub-debian-udev": {
-    "name": "ftp.unixdev.net-pub-debian-udev",
-    "url": "http://ftp.unixdev.net/pub/debian-udev/";,
-    "default suite": "unixdev",
-    "default component": "non-free",
-    "dsc": "can be unsigned"
-    },
-
-"packages.kirya.net": {
-    "name": "packages.kirya.net",
-    "url": "http://packages.kirya.net/";,
-    "default suite": "unstable",
-    "default component": "non-free",
-    "dsc": "can be unsigned"
-    },
-
-"repos.knio.it": {
-    "name": "repos.knio.it",
-    "url": "http://repos.knio.it/";,
-    "default suite": "unstable",
-    "default component": "non-free",
-    "dsc": "can be unsigned"
-    },
-
-"www.wakhok.ac.jp-%7efujimura-debian": {
-    "name": "www.wakhok.ac.jp-%7Efujimura-debian",
-    "url": "http://www.wakhok.ac.jp/~fujimura/debian/";,
-    "default suite": "",
-    "default component": "",
-    "dsc": "can be unsigned"
-    },
-
-"www.eto.to-deb": {
-    "name": "www.eto.to-deb",
-    "url": "http://www.eto.to/deb/";,
-    "default suite": "",
-    "default component": "",
-    "dsc": "can be unsigned"
-    },
-
-"y-imai.good-day.net-debian": {
-    "name": "y-imai.good-day.net-debian",
-    "url": "http://y-imai.good-day.net/debian/";,
-    "default suite": "",
-    "default component": "",
-    "dsc": "can be unsigned"
-    },
-
-"people.realnode.com-%7emnordstr": {
-    "name": "people.realnode.com-%7Emnordstr",
-    "url": "http://people.realnode.com/~mnordstr/";,
-    "default suite": "package",
-    "default component": "",
-    "dsc": "can be unsigned"
-    },
-
-"rapid.dotsrc.org": {
-    "name": "rapid.dotsrc.org",
-    "url": "http://rapid.dotsrc.org/";,
-    "default suite": "unstable",
-    "default component": "",
-    "dsc": "can be unsigned"
-    },
-
-"debian-eclipse.wfrag.org-debian": {
-    "name": "debian-eclipse.wfrag.org-debian",
-    "url": "http://debian-eclipse.wfrag.org/debian/";,
-    "default suite": "sid",
-    "default component": "non-free",
-    "dsc": "can be unsigned"
-    },
-
-"www.stanchina.net-%7eflavio-debian": {
-    "name": "www.stanchina.net-%7Eflavio-debian",
-    "url": "http://www.stanchina.net/~flavio/debian/";,
-    "default suite": "",
-    "default component": "",
-    "dsc": "can be unsigned"
-    },
-
-"kibi.dyndns.org-packages": {
-    "name": "kibi.dyndns.org-packages",
-    "url": "http://kibi.dyndns.org/packages/";,
-    "default suite": "",
-    "default component": "",
-    "dsc": "can be unsigned"
-    },
-
-"download.gna.org-wormux-debs": {
-    "name": "download.gna.org-wormux-debs",
-    "url": "http://download.gna.org/wormux/debs/";,
-    "default suite": "dapper",
-    "default component": "",
-    "dsc": "can be unsigned"
-    },
-
-"apt.alittletooquiet.net-staging": {
-    "name": "apt.alittletooquiet.net-staging",
-    "url": "http://apt.alittletooquiet.net/staging/";,
-    "default suite": "dapper",
-    "default component": "main",
-    "dsc": "can be unsigned"
-    },
-
-"www.debian-multimedia.org": {
-    "name": "www.debian-multimedia.org",
-    "url": "http://www.debian-multimedia.org/";,
-    "default suite": "unstable",
-    "default component": "main",
-    "dsc": "can be unsigned",
-    },
-
-"repository.maemo.org": {
-    "name": "Maemo",
-    "url": "http://repository.maemo.org/";,
-    "default suite": "bora",
-    "default component": "free",
-    "dsc": "can be unsigned",
-    },
-
-"mirror.err.no-uqm": {
-    "name": "mirror.err.no-uqm",
-    "url": "http://mirror.err.no/uqm/";,
-    "default suite": "unstable",
-    "default component": "",
-    "dsc": "can be unsigned"
-    },
-
-}

=== removed file 'scripts/ftpmaster-tools/sync-source.py'
--- scripts/ftpmaster-tools/sync-source.py	2012-01-06 11:08:30 +0000
+++ scripts/ftpmaster-tools/sync-source.py	1970-01-01 00:00:00 +0000
@@ -1,782 +0,0 @@
-#!/usr/bin/python -S
-#
-# Copyright 2009-2011 Canonical Ltd.  This software is licensed under the
-# GNU Affero General Public License version 3 (see the file LICENSE).
-
-# <james.troup@xxxxxxxxxxxxx>
-# pylint: disable-msg=W0403
-
-""" 'Sync' a source package by generating an upload.
-
-This is a straight port of the original dak 'josie' tool to soyuz.
-
-Long term once soyuz is monitoring other archives regularly, syncing
-will become a matter of simply 'publishing' source from Debian unstable
-wherever) into Ubuntu dapper and the whole fake upload trick can go away.
-"""
-
-import _pythonpath
-
-import errno
-import os
-import re
-import shutil
-import stat
-import string
-import tempfile
-import urllib
-
-from _syncorigins import origins
-import apt_pkg
-from debian.deb822 import Dsc
-from zope.component import getUtility
-
-from lp.archiveuploader.utils import (
-    DpkgSourceError,
-    extract_dpkg_source,
-    )
-from lp.registry.interfaces.distribution import IDistributionSet
-from lp.registry.interfaces.person import IPersonSet
-from lp.registry.interfaces.pocket import PackagePublishingPocket
-from lp.services.database.sqlbase import (
-    cursor,
-    sqlvalues,
-    )
-from lp.services.librarian.client import LibrarianClient
-from lp.services.scripts.base import (
-    LaunchpadScript,
-    LaunchpadScriptFailure,
-    )
-from lp.soyuz.enums import (
-    PackagePublishingStatus,
-    re_bug_numbers,
-    re_closes,
-    re_lp_closes,
-    )
-from lp.soyuz.scripts.ftpmaster import (
-    generate_changes,
-    SyncSource,
-    SyncSourceError,
-    )
-
-
-reject_message = ""
-re_no_epoch = re.compile(r"^\d+\:")
-re_strip_revision = re.compile(r"-([^-]+)$")
-re_changelog_header = re.compile(
-    r"^\S+ \((?P<version>.*)\) .*;.*urgency=(?P<urgency>\w+).*")
-
-
-Blacklisted = None
-Library = None
-Log = None
-Options = None
-
-
-def md5sum_file(filename):
-    file_handle = open(filename)
-    md5sum = apt_pkg.md5sum(file_handle)
-    file_handle.close()
-    return md5sum
-
-
-def reject(str, prefix="Rejected: "):
-    global reject_message
-    if str:
-        reject_message += prefix + str + "\n"
-
-
-# Following two functions are borrowed and (modified) from apt-listchanges
-def urgency_to_numeric(u):
-    urgency_map = {
-        'low': 1,
-        'medium': 2,
-        'high': 3,
-        'emergency': 4,
-        'critical': 4,
-        }
-    return urgency_map.get(u.lower(), 1)
-
-
-def urgency_from_numeric(n):
-    urgency_map = {
-        1: 'low',
-        2: 'medium',
-        3: 'high',
-        4: 'critical',
-        }
-    return urgency_map.get(n, 'low')
-
-
-def parse_changelog(changelog_filename, previous_version):
-    if not os.path.exists(changelog_filename):
-        raise LaunchpadScriptFailure(
-            "debian/changelog not found in extracted source.")
-    urgency = urgency_to_numeric('low')
-    changes = ""
-    is_debian_changelog = 0
-    changelog_file = open(changelog_filename)
-    for line in changelog_file.readlines():
-        match = re_changelog_header.match(line)
-        if match:
-            is_debian_changelog = 1
-            if previous_version is None:
-                previous_version = "9999:9999"
-            elif apt_pkg.version_compare(
-                match.group('version'), previous_version) > 0:
-                urgency = max(
-                    urgency_to_numeric(match.group('urgency')), urgency)
-            else:
-                break
-        changes += line
-
-    if not is_debian_changelog:
-        raise LaunchpadScriptFailure("header not found in debian/changelog")
-
-    closes = []
-    for match in re_closes.finditer(changes):
-        bug_match = re_bug_numbers.findall(match.group(0))
-        closes += map(int, bug_match)
-
-    l = map(int, closes)
-    l.sort()
-    closes = map(str, l)
-
-    lp_closes = []
-    for match in re_lp_closes.finditer(changes):
-        bug_match = re_bug_numbers.findall(match.group(0))
-        lp_closes += map(int, bug_match)
-
-    l = map(int, lp_closes)
-    l.sort()
-    lp_closes = map(str, l)
-
-    return (changes, urgency_from_numeric(urgency), closes, lp_closes)
-
-
-def fix_changelog(changelog):
-    """Fix debian/changelog entries to be in .changes compatible format."""
-    fixed = []
-    fixed_idx = -1
-    for line in changelog.split("\n"):
-        if line == "":
-            fixed += [" ."]
-            fixed_idx += 1
-        elif line.startswith(" --"):
-            # Strip any 'blank' lines preceding the footer
-            while fixed[fixed_idx] == " .":
-                fixed.pop()
-                fixed_idx -= 1
-        else:
-            fixed += [" %s" % (line)]
-            fixed_idx += 1
-    # Strip trailing 'blank' lines
-    while fixed[fixed_idx] == " .":
-        fixed.pop()
-        fixed_idx -= 1
-    fixed_changelog = "\n".join(fixed)
-    fixed_changelog += "\n"
-    return fixed_changelog
-
-
-def parse_control(control_filename):
-    """Parse a debian/control file.
-
-    Extract section, priority and description if possible.
-    """
-    source_name = ""
-    source_section = "-"
-    source_priority = "-"
-    source_description = ""
-
-    if not os.path.exists(control_filename):
-        raise LaunchpadScriptFailure(
-            "debian/control not found in extracted source.")
-    control_filehandle = open(control_filename)
-    control = apt_pkg.TagFile(control_filehandle)
-    for control_section in control:
-        source = control_section.find("Source")
-        package = control_section.find("Package")
-        section = control_section.find("Section")
-        priority = control_section.find("Priority")
-        description = control_section.find("Description")
-        if source is not None:
-            if section is not None:
-                source_section = section
-            if priority is not None:
-                source_priority = priority
-            source_name = source
-        if package is not None and package == source_name:
-            source_description = (
-                "%-10s - %-.65s" % (package, description.split("\n")[0]))
-    control_filehandle.close()
-
-    return (source_section, source_priority, source_description)
-
-
-def extract_source(dsc_filename):
-    # Create and move into a temporary directory
-    tmpdir = tempfile.mkdtemp()
-    old_cwd = os.getcwd()
-
-    # Extract the source package
-    try:
-        extract_dpkg_source(dsc_filename, tmpdir)
-    except DpkgSourceError, e:
-        print " * command was '%s'" % (e.command)
-        print e.output
-        raise LaunchpadScriptFailure(
-            "'dpkg-source -x' failed for %s [return code: %s]." %
-            (dsc_filename, e.result))
-
-    os.chdir(tmpdir)
-    return (old_cwd, tmpdir)
-
-
-def cleanup_source(tmpdir, old_cwd, dsc):
-    # Sanity check that'll probably break if people set $TMPDIR, but
-    # WTH, shutil.rmtree scares me
-    if not tmpdir.startswith("/tmp/"):
-        raise LaunchpadScriptFailure(
-            "%s: tmpdir doesn't start with /tmp" % (tmpdir))
-
-    # Move back and cleanup the temporary tree
-    os.chdir(old_cwd)
-    try:
-        shutil.rmtree(tmpdir)
-    except OSError, e:
-        if errno.errorcode[e.errno] != 'EACCES':
-            raise LaunchpadScriptFailure(
-                "%s: couldn't remove tmp dir for source tree."
-                % (dsc["source"]))
-
-        reject("%s: source tree could not be cleanly removed."
-               % (dsc["source"]))
-        # We probably have u-r or u-w directories so chmod everything
-        # and try again.
-        cmd = "chmod -R u+rwx %s" % (tmpdir)
-        result = os.system(cmd)
-        if result != 0:
-            raise LaunchpadScriptFailure(
-                "'%s' failed with result %s." % (cmd, result))
-        shutil.rmtree(tmpdir)
-    except:
-        raise LaunchpadScriptFailure(
-            "%s: couldn't remove tmp dir for source tree." % (dsc["source"]))
-
-
-def check_dsc(dsc, current_sources, current_binaries):
-    source = dsc["source"]
-    if source in current_sources:
-        source_component = current_sources[source][1]
-    else:
-        source_component = "universe"
-    for binary in map(string.strip, dsc["binary"].split(',')):
-        if binary in current_binaries:
-            (current_version, current_component) = current_binaries[binary]
-
-            # Check that a non-main source package is not trying to
-            # override a main binary package
-            if current_component == "main" and source_component != "main":
-                if not Options.forcemore:
-                    raise LaunchpadScriptFailure(
-                        "%s is in main but its source (%s) is not." %
-                        (binary, source))
-                else:
-                    Log.warning(
-                        "%s is in main but its source (%s) is not - "
-                        "continuing anyway." % (binary, source))
-
-            # Check that a source package is not trying to override an
-            # ubuntu-modified binary package
-            ubuntu_bin = current_binaries[binary][0].find("ubuntu")
-            if not Options.force and ubuntu_bin != -1:
-                raise LaunchpadScriptFailure(
-                    "%s is trying to override %s_%s without -f/--force." %
-                    (source, binary, current_version))
-            print "I: %s [%s] -> %s_%s [%s]." % (
-                source, source_component, binary, current_version,
-                current_component)
-
-
-def import_dsc(dsc_filename, suite, previous_version, signing_rules,
-               files_from_librarian, requested_by, origin, current_sources,
-               current_binaries):
-    dsc_file = open(dsc_filename, 'r')
-    dsc = Dsc(dsc_file)
-
-    if signing_rules.startswith("must be signed"):
-        dsc_file.seek(0)
-        (gpg_pre, payload, gpg_post) = Dsc.split_gpg_and_payload(dsc_file)
-        if gpg_pre == [] and gpg_post == []:
-            raise LaunchpadScriptFailure(
-                "signature required for %s but not present" % dsc_filename)
-        if signing_rules == "must be signed and valid":
-            if (gpg_pre[0] != "-----BEGIN PGP SIGNED MESSAGE-----" or
-                gpg_post[0] != "-----BEGIN PGP SIGNATURE-----"):
-                raise LaunchpadScriptFailure(
-                    "signature for %s invalid %r %r" %
-                    (dsc_filename, gpg_pre, gpg_post))
-
-    dsc_files = dict((entry['name'], entry) for entry in dsc['files'])
-    check_dsc(dsc, current_sources, current_binaries)
-
-    # Add the .dsc itself to dsc_files so it's listed in the Files: field
-    dsc_base_filename = os.path.basename(dsc_filename)
-    dsc_files.setdefault(dsc_base_filename, {})
-    dsc_files[dsc_base_filename]["md5sum"] = md5sum_file(dsc_filename)
-    dsc_files[dsc_base_filename]["size"] = os.stat(dsc_filename)[stat.ST_SIZE]
-
-    (old_cwd, tmpdir) = extract_source(dsc_filename)
-
-    # Get the upstream version
-    upstr_version = re_no_epoch.sub('', dsc["version"])
-    if re_strip_revision.search(upstr_version):
-        upstr_version = re_strip_revision.sub('', upstr_version)
-
-    # Ensure the changelog file exists
-    changelog_filename = (
-        "%s-%s/debian/changelog" % (dsc["source"], upstr_version))
-
-    # Parse it and then adapt it for .changes
-    (changelog, urgency, closes, lp_closes) = parse_changelog(
-        changelog_filename, previous_version)
-    changelog = fix_changelog(changelog)
-
-    # Parse the control file
-    control_filename = "%s-%s/debian/control" % (dsc["source"], upstr_version)
-    (section, priority, description) = parse_control(control_filename)
-
-    cleanup_source(tmpdir, old_cwd, dsc)
-
-    changes = generate_changes(
-        dsc, dsc_files, suite, changelog, urgency, closes, lp_closes,
-        section, priority, description, files_from_librarian, requested_by,
-        origin)
-
-    output_filename = "%s_%s_source.changes" % (
-        dsc["source"], re_no_epoch.sub('', dsc["version"]))
-
-    filehandle = open(output_filename, 'w')
-    try:
-        changes.dump(filehandle, encoding="utf-8")
-    finally:
-        filehandle.close()
-
-
-def read_current_source(distro_series, valid_component=None, arguments=None):
-    """Returns a dictionary of packages in 'suite'.
-
-    The dictionary contains their version as the attribute.
-    'component' is an optional list of (comma or whitespace separated)
-    components to restrict the search to.
-    """
-    S = {}
-
-    # XXX cprov 2007-07-10: This searches all pockets of the
-    #     distro_series which is not what we want.
-    if Options.all:
-        spp = distro_series.getSourcePackagePublishing(
-            status=PackagePublishingStatus.PUBLISHED,
-            pocket=PackagePublishingPocket.RELEASE)
-    else:
-        spp = []
-        for package in arguments:
-            spp.extend(distro_series.getPublishedSources(package))
-
-    for sp in spp:
-        component = sp.component.name
-        version = sp.sourcepackagerelease.version
-        pkg = sp.sourcepackagerelease.sourcepackagename.name
-
-        if (valid_component is not None and
-            component != valid_component.name):
-            Log.warning(
-                "%s/%s: skipping because it is not in %s component" % (
-                pkg, version, component))
-            continue
-
-        if pkg not in S:
-            S[pkg] = [version, component]
-        else:
-            if apt_pkg.version_compare(S[pkg][0], version) < 0:
-                Log.warning(
-                    "%s: skipping because %s is < %s" % (
-                    pkg, version, S[pkg][0]))
-                S[pkg] = [version, component]
-    return S
-
-
-def read_current_binaries(distro_series):
-    """Returns a dictionary of binaries packages in 'distro_series'.
-
-    The dictionary contains their version and component as the attributes.
-    """
-    B = {}
-
-    # XXX cprov 2007-07-10: This searches all pockets of the
-    #     distro_series which is not what we want.
-
-    # XXX James Troup 2006-02-03: this is insanely slow due to how It
-    #     SQLObject works. Can be limited, but only if we know what
-    #     binaries we want to check against, which we don't know till
-    #     we have the .dsc file and currently this function is
-    #     run well before that.
-    #
-    #     for distroarchseries in distro_series.architectures:
-    #         bpp = distroarchseries.getAllReleasesByStatus(
-    #             PackagePublishingStatus.PUBLISHED)
-    #
-    #         for bp in bpp:
-    #             component = bp.component.name
-    #             version = bp.binarypackagerelease.version
-    #             pkg = bp.binarypackagerelease.binarypackagename.name
-    #
-    #             if pkg not in B:
-    #                 B[pkg] = [version, component]
-    #             else:
-    #                 if apt_pkg.version_compare(B[pkg][0], version) < 0:
-    #                     B[pkg] = [version, component]
-
-    # XXX James Troup 2006-02-22: so... let's fall back on raw SQL
-    das_ids = [das.id for das in distro_series.architectures]
-    archive_ids = [a.id for a in Options.todistro.all_distro_archives]
-    cur = cursor()
-    query = """
-    SELECT bpn.name, bpr.version, c.name
-    FROM binarypackagerelease bpr, binarypackagename bpn, component c,
-        binarypackagepublishinghistory sbpph, distroarchseries dar
-    WHERE
-        bpr.binarypackagename = bpn.id AND
-             sbpph.binarypackagerelease = bpr.id AND
-        sbpph.component = c.id AND
-        sbpph.distroarchseries = dar.id AND
-        sbpph.status = %s AND
-        sbpph.archive IN %s AND
-        dar.id IN %s
-     """ % sqlvalues(
-        PackagePublishingStatus.PUBLISHED, archive_ids, das_ids)
-    cur.execute(query)
-
-    print "Getting binaries for %s..." % (distro_series.name)
-    for (pkg, version, component) in cur.fetchall():
-        if pkg not in B:
-            B[pkg] = [version, component]
-        else:
-            if apt_pkg.version_compare(B[pkg][0], version) < 0:
-                B[pkg] = [version, component]
-    return B
-
-
-def read_Sources(filename, origin):
-    S = {}
-
-    suite = origin["suite"]
-    component = origin["component"]
-    if suite:
-        suite = "_%s" % (suite)
-    if component:
-        component = "_%s" % (component)
-
-    filename = "%s%s%s_%s" % (origin["name"], suite, component, filename)
-    sources_filehandle = open(filename)
-    sources = apt_pkg.TagFile(sources_filehandle)
-    for sources_section in sources:
-        pkg = sources_section.find("Package")
-        version = sources_section.find("Version")
-
-        if pkg in S and apt_pkg.version_compare(
-            S[pkg]["version"], version) > 0:
-            continue
-
-        S[pkg] = {}
-        S[pkg]["version"] = version
-
-        directory = sources_section.find("Directory", "")
-        files = {}
-        for line in sources_section.find("Files").split('\n'):
-            (md5sum, size, filename) = line.strip().split()
-            files[filename] = {}
-            files[filename]["md5sum"] = md5sum
-            files[filename]["size"] = int(size)
-            files[filename]["remote filename"] = (
-                os.path.join(directory, filename))
-        S[pkg]["files"] = files
-    sources_filehandle.close()
-    return S
-
-
-def add_source(pkg, Sources, previous_version, suite, requested_by, origin,
-               current_sources, current_binaries):
-    print " * Trying to add %s..." % (pkg)
-
-    # Check it's in the Sources file
-    if pkg not in Sources:
-        raise LaunchpadScriptFailure(
-            "%s doesn't exist in the Sources file." % (pkg))
-
-    syncsource = SyncSource(Sources[pkg]["files"], origin, Log,
-        urllib.urlretrieve, Options.todistro)
-    try:
-        files_from_librarian = syncsource.fetchLibrarianFiles()
-        dsc_filename = syncsource.fetchSyncFiles()
-        syncsource.checkDownloadedFiles()
-    except SyncSourceError, e:
-        raise LaunchpadScriptFailure("Fetching files failed: %s" % (str(e),))
-
-    if dsc_filename is None:
-        raise LaunchpadScriptFailure(
-            "No dsc filename in %r" % Sources[pkg]["files"].keys())
-
-    import_dsc(os.path.abspath(dsc_filename), suite, previous_version,
-               origin["dsc"], files_from_librarian, requested_by, origin,
-               current_sources, current_binaries)
-
-
-class Percentages:
-    """Helper to compute percentage ratios compared to a fixed total."""
-
-    def __init__(self, total):
-        self.total = total
-
-    def get_ratio(self, number):
-        """Report the ration of `number` to `self.total`, as a percentage."""
-        return (float(number) / self.total) * 100
-
-
-def do_diff(Sources, Suite, origin, arguments, current_binaries):
-    stat_us = 0
-    stat_cant_update = 0
-    stat_updated = 0
-    stat_uptodate_modified = 0
-    stat_uptodate = 0
-    stat_count = 0
-    stat_broken = 0
-    stat_blacklisted = 0
-
-    if Options.all:
-        packages = Suite.keys()
-    else:
-        packages = arguments
-    packages.sort()
-    for pkg in packages:
-        stat_count += 1
-        dest_version = Suite.get(pkg, [None, ""])[0]
-
-        if pkg not in Sources:
-            if not Options.all:
-                raise LaunchpadScriptFailure("%s: not found" % (pkg))
-            else:
-                print "[Ubuntu Specific] %s_%s" % (pkg, dest_version)
-                stat_us += 1
-                continue
-
-        if pkg in Blacklisted:
-            print "[BLACKLISTED] %s_%s" % (pkg, dest_version)
-            stat_blacklisted += 1
-            continue
-
-        source_version = Sources[pkg]["version"]
-        if (dest_version is None
-                or apt_pkg.version_compare(dest_version, source_version) < 0):
-            if (dest_version is not None
-                    and (not Options.force
-                        and dest_version.find("ubuntu") != -1)):
-                stat_cant_update += 1
-                print ("[NOT Updating - Modified] %s_%s (vs %s)"
-                       % (pkg, dest_version, source_version))
-            else:
-                stat_updated += 1
-                print ("[Updating] %s (%s [Ubuntu] < %s [%s])"
-                       % (pkg, dest_version, source_version, origin["name"]))
-                if Options.action:
-                    add_source(
-                        pkg, Sources,
-                        Suite.get(pkg, ["0", ""])[0], Options.tosuite.name,
-                        Options.requestor, origin, Suite, current_binaries)
-        else:
-            if dest_version.find("ubuntu") != -1:
-                stat_uptodate_modified += 1
-                if Options.verbose or not Options.all:
-                    print ("[Nothing to update (Modified)] %s_%s (vs %s)"
-                           % (pkg, dest_version, source_version))
-            else:
-                stat_uptodate += 1
-                if Options.verbose or not Options.all:
-                    print (
-                        "[Nothing to update] %s (%s [ubuntu] >= %s [debian])"
-                        % (pkg, dest_version, source_version))
-
-    if Options.all:
-        percentages = Percentages(stat_count)
-        print
-        print ("Out-of-date BUT modified: %3d (%.2f%%)"
-            % (stat_cant_update, percentages.get_ratio(stat_cant_update)))
-        print ("Updated:                  %3d (%.2f%%)"
-            % (stat_updated, percentages.get_ratio(stat_updated)))
-        print ("Ubuntu Specific:          %3d (%.2f%%)"
-            % (stat_us, percentages.get_ratio(stat_us)))
-        print ("Up-to-date [Modified]:    %3d (%.2f%%)"
-            % (stat_uptodate_modified, percentages.get_ratio(
-                stat_uptodate_modified)))
-        print ("Up-to-date:               %3d (%.2f%%)"
-               % (stat_uptodate, percentages.get_ratio(stat_uptodate)))
-        print ("Blacklisted:              %3d (%.2f%%)"
-               % (stat_blacklisted, percentages.get_ratio(stat_blacklisted)))
-        print ("Broken:                   %3d (%.2f%%)"
-               % (stat_broken, percentages.get_ratio(stat_broken)))
-        print "                          -----------"
-        print "Total:                    %s" % (stat_count)
-
-
-def objectize_options():
-    """Parse given options.
-
-    Convert 'target_distro', 'target_suite' and 'target_component' to objects
-    rather than strings.
-    """
-    Options.todistro = getUtility(IDistributionSet)[Options.todistro]
-
-    if not Options.tosuite:
-        Options.tosuite = Options.todistro.currentseries.name
-    Options.tosuite = Options.todistro.getSeries(Options.tosuite)
-
-    valid_components = (
-        dict([(component.name, component)
-              for component in Options.tosuite.components]))
-
-    if Options.tocomponent is not None:
-
-        if Options.tocomponent not in valid_components:
-            raise LaunchpadScriptFailure(
-                "%s is not a valid component for %s/%s."
-                % (Options.tocomponent, Options.todistro.name,
-                   Options.tosuite.name))
-
-        Options.tocomponent = valid_components[Options.tocomponent]
-
-    # Fix up Options.requestor
-    if not Options.requestor:
-        Options.requestor = "katie"
-
-    PersonSet = getUtility(IPersonSet)
-    person = PersonSet.getByName(Options.requestor)
-    if not person:
-        raise LaunchpadScriptFailure(
-            "Unknown LaunchPad user id '%s'." % (Options.requestor))
-    Options.requestor = "%s <%s>" % (person.displayname,
-                                     person.preferredemail.email)
-    Options.requestor = Options.requestor.encode("ascii", "replace")
-
-
-def parseBlacklist(path):
-    """Parse given file path as a 'blacklist'.
-
-    Format:
-
-    {{{
-    # [comment]
-    <sourcename> # [comment]
-    }}}
-
-    Return a blacklist dictionary where the keys are blacklisted source
-    package names.
-
-    Return an empty dictionary if the given 'path' doesn't exist.
-    """
-    blacklist = {}
-
-    try:
-        blacklist_file = open(path)
-    except IOError:
-        Log.warning('Could not find blacklist file on %s' % path)
-        return blacklist
-
-    for line in blacklist_file:
-        try:
-            line = line[:line.index("#")]
-        except ValueError:
-            pass
-        line = line.strip()
-        if not line:
-            continue
-        blacklist[line] = ""
-    blacklist_file.close()
-
-    return blacklist
-
-
-class SyncSourceScript(LaunchpadScript):
-
-    def add_my_options(self):
-        self.parser.add_option("-a", "--all", dest="all",
-                        default=False, action="store_true",
-                        help="sync all packages")
-        self.parser.add_option("-b", "--requested-by", dest="requestor",
-                        help="who the sync was requested by")
-        self.parser.add_option("-f", "--force", dest="force",
-                        default=False, action="store_true",
-                        help="force sync over the top of Ubuntu changes")
-        self.parser.add_option("-F", "--force-more", dest="forcemore",
-                        default=False, action="store_true",
-                        help="force sync even when components don't match")
-        self.parser.add_option("-n", "--noaction", dest="action",
-                        default=True, action="store_false",
-                        help="don't do anything")
-
-        # Options controlling where to sync packages to:
-        self.parser.add_option("-c", "--to-component", dest="tocomponent",
-                        help="limit syncs to packages in COMPONENT")
-        self.parser.add_option("-d", "--to-distro", dest="todistro",
-                        default='ubuntu', help="sync to DISTRO")
-        self.parser.add_option("-s", "--to-suite", dest="tosuite",
-                        help="sync to SUITE (aka distroseries)")
-
-        # Options controlling where to sync packages from:
-        self.parser.add_option("-C", "--from-component", dest="fromcomponent",
-                        help="sync from COMPONENT")
-        self.parser.add_option("-D", "--from-distro", dest="fromdistro",
-                        default='debian', help="sync from DISTRO")
-        self.parser.add_option("-S", "--from-suite", dest="fromsuite",
-                        help="sync from SUITE (aka distroseries)")
-        self.parser.add_option("-B", "--blacklist", dest="blacklist_path",
-                        default="/srv/launchpad.net/dak/sync-blacklist.txt",
-                        help="Blacklist file path.")
-
-    def main(self):
-        global Blacklisted, Library, Log, Options
-
-        Log = self.logger
-        Options = self.options
-
-        distro = Options.fromdistro.lower()
-        if not Options.fromcomponent:
-            Options.fromcomponent = origins[distro]["default component"]
-        if not Options.fromsuite:
-            Options.fromsuite = origins[distro]["default suite"]
-
-        # Sanity checks on options
-        if not Options.all and not self.args:
-            raise LaunchpadScriptFailure(
-                "Need -a/--all or at least one package name as an argument.")
-
-        apt_pkg.init()
-        Library = LibrarianClient()
-
-        objectize_options()
-
-        Blacklisted = parseBlacklist(Options.blacklist_path)
-
-        origin = origins[Options.fromdistro]
-        origin["suite"] = Options.fromsuite
-        origin["component"] = Options.fromcomponent
-
-        Sources = read_Sources("Sources", origin)
-        Suite = read_current_source(
-            Options.tosuite, Options.tocomponent, self.args)
-        current_binaries = read_current_binaries(Options.tosuite)
-        do_diff(Sources, Suite, origin, self.args, current_binaries)
-
-
-if __name__ == '__main__':
-    SyncSourceScript('sync-source', 'ro').lock_and_run()