← Back to team overview

launchpad-reviewers team mailing list archive

[Merge] lp:~wgrant/launchpad/delete-old-scripts into lp:launchpad

 

William Grant has proposed merging lp:~wgrant/launchpad/delete-old-scripts into lp:launchpad.

Requested reviews:
  Launchpad code reviewers (launchpad-reviewers)

For more details, see:
https://code.launchpad.net/~wgrant/launchpad/delete-old-scripts/+merge/75891

Remove some unused, untested, mostly broken scripts.
-- 
https://code.launchpad.net/~wgrant/launchpad/delete-old-scripts/+merge/75891
Your team Launchpad code reviewers is requested to review the proposed merge of lp:~wgrant/launchpad/delete-old-scripts into lp:launchpad.
=== modified file 'lib/canonical/librarian/libraryprotocol.py'
--- lib/canonical/librarian/libraryprotocol.py	2009-06-25 05:30:52 +0000
+++ lib/canonical/librarian/libraryprotocol.py	2011-09-18 10:58:26 +0000
@@ -242,20 +242,3 @@
     protocol = FileUploadProtocol
     def __init__(self, fileLibrary):
         self.fileLibrary = fileLibrary
-
-
-if __name__ == '__main__':
-    import os, sys
-    from twisted.internet import reactor
-    log.startLogging(sys.stdout)
-    from canonical.librarian import db, storage
-    from canonical.lp import initZopeless
-    initZopeless()
-
-    try:
-        os.mkdir('/tmp/fatsam')
-    except:
-        pass
-    f = FileUploadFactory(storage.LibrarianStorage('/tmp/fatsam', db.Library()))
-    reactor.listenTCP(9090, f)
-    reactor.run()

=== modified file 'lib/lp/services/mail/sendmail.py'
--- lib/lp/services/mail/sendmail.py	2011-09-16 01:12:30 +0000
+++ lib/lp/services/mail/sendmail.py	2011-09-18 10:58:26 +0000
@@ -491,12 +491,3 @@
         return mailer.send(from_addr, to_addrs, raw_message)
     finally:
         action.finish()
-
-
-if __name__ == '__main__':
-    from canonical.lp import initZopeless
-    tm = initZopeless()
-    simple_sendmail(
-            'stuart.bishop@xxxxxxxxxxxxx', ['stuart@xxxxxxxxxxxxxxxx'],
-            'Testing Zopeless', 'This is the body')
-    tm.uninstall()

=== modified file 'lib/lp/services/scripts/tests/__init__.py'
--- lib/lp/services/scripts/tests/__init__.py	2011-08-03 00:48:40 +0000
+++ lib/lp/services/scripts/tests/__init__.py	2011-09-18 10:58:26 +0000
@@ -25,9 +25,6 @@
 KNOWN_BROKEN = [
     # Needs mysqldb module
     'scripts/migrate-bugzilla-initialcontacts.py',
-    # circular import from hell (IHasOwner).
-    'scripts/clean-sourceforge-project-entries.py',
-    'scripts/import-zope-specs.py',
     'scripts/rosetta/gettext_check_messages.py',
     # sqlobject.DatbaseIndex ?
     'scripts/linkreport.py',

=== removed file 'scripts/clean-sourceforge-project-entries.py'
--- scripts/clean-sourceforge-project-entries.py	2010-04-27 19:48:39 +0000
+++ scripts/clean-sourceforge-project-entries.py	1970-01-01 00:00:00 +0000
@@ -1,67 +0,0 @@
-#!/usr/bin/python -S
-#
-# Copyright 2009 Canonical Ltd.  This software is licensed under the
-# GNU Affero General Public License version 3 (see the file LICENSE).
-
-import re
-import sys
-
-import _pythonpath
-
-from zope.component import getUtility
-
-from canonical.lp import initZopeless
-from canonical.launchpad.database.product import Product
-from canonical.launchpad.scripts import execute_zcml_for_scripts
-from canonical.launchpad.interfaces.product import (
-    valid_sourceforge_project_name)
-from canonical.launchpad.webapp.interfaces import (
-    IStoreSelector, MAIN_STORE, MASTER_FLAVOR)
-
-
-re_find_project_names = [
-    re.compile(r'(?:sou?rcefor..|sf)[.]net/projects?/([^/]+)'),
-    re.compile(r'([a-zA-Z0-9-]+)[.](?:sou?rceforge|sf)[.]net'),
-    ]
-
-
-def extract_project_name(project_name):
-    # Remove whitespace and slashes.
-    project_name = project_name.strip().strip('/')
-    if valid_sourceforge_project_name(project_name):
-        return project_name
-
-    # Try to pattern match.
-    for regex in re_find_project_names:
-        match = regex.search(project_name)
-        if match is not None:
-            if valid_sourceforge_project_name(match.group(1)):
-                return match.group(1)
-
-    # No luck.
-    return None
-
-
-def main(argv):
-    execute_zcml_for_scripts()
-    ztm = initZopeless()
-    store = getUtility(IStoreSelector).get(MAIN_STORE, MASTER_FLAVOR)
-
-    # Get all products with a sourceforgeproject.
-    products = store.find(Product,
-                          Product.sourceforgeproject != None,
-                          Product.sourceforgeproject != '')
-
-    for product in products:
-        if not valid_sourceforge_project_name(product.sourceforgeproject):
-            extracted_project_name = (
-                extract_project_name(product.sourceforgeproject))
-            print '%r ==> %r' % (
-                product.sourceforgeproject, extracted_project_name)
-            product.sourceforgeproject = extracted_project_name
-
-    ztm.commit()
-
-
-if __name__ == '__main__':
-    sys.exit(main(sys.argv))

=== removed file 'scripts/ftpmaster-tools/remove-package.py'
--- scripts/ftpmaster-tools/remove-package.py	2011-06-09 10:50:25 +0000
+++ scripts/ftpmaster-tools/remove-package.py	1970-01-01 00:00:00 +0000
@@ -1,465 +0,0 @@
-#!/usr/bin/python -S
-#
-# Copyright 2009 Canonical Ltd.  This software is licensed under the
-# GNU Affero General Public License version 3 (see the file LICENSE).
-
-# General purpose package removal tool for ftpmaster
-
-################################################################################
-
-import _pythonpath
-
-import commands
-import optparse
-import os
-import re
-import sys
-
-import dak_utils
-
-import apt_pkg
-
-from zope.component import getUtility
-
-from canonical.config import config
-from canonical.database.constants import UTC_NOW
-from canonical.launchpad.scripts import (
-    execute_zcml_for_scripts,
-    logger,
-    logger_options,
-    )
-from canonical.lp import initZopeless
-from lp.registry.interfaces.distribution import IDistributionSet
-from lp.soyuz.enums import PackagePublishingStatus
-from lp.soyuz.model.publishing import (
-    BinaryPackagePublishingHistory,
-    SourcePackagePublishingHistory,
-    )
-
-from contrib.glock import GlobalLock
-
-################################################################################
-
-re_strip_source_version = re.compile (r'\s+.*$')
-re_build_dep_arch = re.compile(r"\[[^]]+\]")
-
-################################################################################
-
-Options = None
-Lock = None
-Log = None
-ztm = None
-
-################################################################################
-
-def game_over():
-    answer = dak_utils.our_raw_input("Continue (y/N)? ").lower()
-    if answer != "y":
-        print "Aborted."
-        sys.exit(1)
-
-################################################################################
-
-# def reverse_depends_check(removals, suites):
-#     print "Checking reverse dependencies..."
-#     components = Cnf.ValueList("Suite::%s::Components" % suites[0])
-#     dep_problem = 0
-#     p2c = {}
-#     for architecture in Cnf.ValueList("Suite::%s::Architectures" % suites[0]):
-#         if architecture in ["source", "all"]:
-#             continue
-#         deps = {}
-#         virtual_packages = {}
-#         for component in components:
-#             filename = "%s/dists/%s/%s/binary-%s/Packages.gz" \
-#                        % (Cnf["Dir::Root"], suites[0], component,
-#                           architecture)
-#             # apt_pkg.ParseTagFile needs a real file handle and can't
-#             # handle a GzipFile instance...
-#             temp_filename = dak_utils.temp_filename()
-#             (result, output) = commands.getstatusoutput("gunzip -c %s > %s" \
-#                                                         % (filename, temp_filename))
-#             if (result != 0):
-#                 dak_utils.fubar("Gunzip invocation failed!\n%s\n" \
-#                                 % (output), result)
-#             packages = open(temp_filename)
-#             Packages = apt_pkg.ParseTagFile(packages)
-#             while Packages.Step():
-#                 package = Packages.Section.Find("Package")
-#                 depends = Packages.Section.Find("Depends")
-#                 if depends:
-#                     deps[package] = depends
-#                 provides = Packages.Section.Find("Provides")
-#                 # Maintain a counter for each virtual package.  If a
-#                 # Provides: exists, set the counter to 0 and count all
-#                 # provides by a package not in the list for removal.
-#                 # If the counter stays 0 at the end, we know that only
-#                 # the to-be-removed packages provided this virtual
-#                 # package.
-#                 if provides:
-#                     for virtual_pkg in provides.split(","):
-#                         virtual_pkg = virtual_pkg.strip()
-#                         if virtual_pkg == package: continue
-#                         if not virtual_packages.has_key(virtual_pkg):
-#                             virtual_packages[virtual_pkg] = 0
-#                         if package not in removals:
-#                             virtual_packages[virtual_pkg] += 1
-#                 p2c[package] = component
-#             packages.close()
-#             os.unlink(temp_filename)
-
-#         # If a virtual package is only provided by the to-be-removed
-#         # packages, treat the virtual package as to-be-removed too.
-#         for virtual_pkg in virtual_packages.keys():
-#             if virtual_packages[virtual_pkg] == 0:
-#                 removals.append(virtual_pkg)
-
-#         # Check binary dependencies (Depends)
-#         for package in deps.keys():
-#             if package in removals: continue
-#             parsed_dep = []
-#             try:
-#                 parsed_dep += apt_pkg.ParseDepends(deps[package])
-#             except ValueError, e:
-#                 print "Error for package %s: %s" % (package, e)
-#             for dep in parsed_dep:
-#                 # Check for partial breakage.  If a package has a ORed
-#                 # dependency, there is only a dependency problem if all
-#                 # packages in the ORed depends will be removed.
-#                 unsat = 0
-#                 for dep_package, _, _ in dep:
-#                     if dep_package in removals:
-#                             unsat += 1
-#                 if unsat == len(dep):
-#                     component = p2c[package]
-#                     if component != "main":
-#                         what = "%s/%s" % (package, component)
-#                     else:
-#                         what = "** %s" % (package)
-#                     print "%s has an unsatisfied dependency on %s: %s" \
-#                           % (what, architecture, dak_utils.pp_deps(dep))
-#                     dep_problem = 1
-
-#     # Check source dependencies (Build-Depends and Build-Depends-Indep)
-#     for component in components:
-#         filename = "%s/dists/%s/%s/source/Sources.gz" \
-#                    % (Cnf["Dir::Root"], suites[0], component)
-#         # apt_pkg.ParseTagFile needs a real file handle and can't
-#         # handle a GzipFile instance...
-#         temp_filename = dak_utils.temp_filename()
-#         result, output = commands.getstatusoutput("gunzip -c %s > %s" \
-#                                                   % (filename, temp_filename))
-#         if result != 0:
-#             sys.stderr.write("Gunzip invocation failed!\n%s\n" \
-#                              % (output))
-#             sys.exit(result)
-#         sources = open(temp_filename)
-#         Sources = apt_pkg.ParseTagFile(sources)
-#         while Sources.Step():
-#             source = Sources.Section.Find("Package")
-#             if source in removals: continue
-#             parsed_dep = []
-#             for build_dep_type in ["Build-Depends", "Build-Depends-Indep"]:
-#                 build_dep = Sources.Section.get(build_dep_type)
-#                 if build_dep:
-#                     # Remove [arch] information since we want to see
-#                     # breakage on all arches
-#                     build_dep = re_build_dep_arch.sub("", build_dep)
-#                     try:
-#                         parsed_dep += apt_pkg.ParseDepends(build_dep)
-#                     except ValueError, e:
-#                         print "Error for source %s: %s" % (source, e)
-#             for dep in parsed_dep:
-#                 unsat = 0
-#                 for dep_package, _, _ in dep:
-#                     if dep_package in removals:
-#                             unsat += 1
-#                 if unsat == len(dep):
-#                     if component != "main":
-#                         source = "%s/%s" % (source, component)
-#                     else:
-#                         source = "** %s" % (source)
-#                     print "%s has an unsatisfied build-dependency: %s" \
-#                           % (source, dak_utils.pp_deps(dep))
-#                     dep_problem = 1
-#         sources.close()
-#         os.unlink(temp_filename)
-
-#     if dep_problem:
-#         print "Dependency problem found."
-#         if Options.action:
-#             game_over()
-#     else:
-#         print "No dependency problem found."
-#     print
-
-################################################################################
-
-def options_init():
-    global Options
-
-    parser = optparse.OptionParser()
-    logger_options(parser)
-    parser.add_option("-a", "--architecture", dest="architecture",
-                      help="only act on ARCHITECTURE")
-    parser.add_option("-b", "--binary", dest="binaryonly",
-                      default=False, action="store_true",
-                      help="remove binaries only")
-    parser.add_option("-c", "--component", dest="component",
-                      help="only act on COMPONENT")
-    parser.add_option("-d", "--distro", dest="distro",
-                      help="remove from DISTRO")
-    parser.add_option("-m", "--reason", dest="reason",
-                      help="reason for removal")
-    parser.add_option("-n", "--no-action", dest="action",
-                      default=True, action="store_false",
-                      help="don't do anything")
-    parser.add_option("-R", "--rdep-check", dest="rdepcheck",
-                      default=False, action="store_true",
-                      help="check reverse dependencies")
-    parser.add_option("-s", "--suite", dest="suite",
-                      help="only act on SUITE")
-    parser.add_option("-S", "--source-only", dest="sourceonly",
-                      default=False, action="store_true",
-                      help="remove source only")
-
-    (Options, arguments) = parser.parse_args()
-
-    # Sanity check options
-    if not arguments:
-        dak_utils.fubar("need at least one package name as an argument.")
-    if Options.architecture and Options.sourceonly:
-        dak_utils.fubar("can't use -a/--architecutre and -S/"
-                        "--source-only options simultaneously.")
-    if Options.binaryonly and Options.sourceonly:
-        dak_utils.fubar("can't use -b/--binary-only and -S/"
-                        "--source-only options simultaneously.")
-
-    if not Options.reason:
-        Options.reason = ""
-
-    # XXX malcc 2006-08-03: 'dak rm' used to check here whether or not we're
-    # removing from anything other than << unstable.  This never got ported
-    # to ubuntu anyway, but it might be nice someday.
-
-    # Additional architecture checks
-    # XXX James Troup 2006-01-30: parse_args.
-    if Options.architecture and 0:
-        dak_utils.warn("'source' in -a/--argument makes no sense and is ignored.")
-
-    return arguments
-
-################################################################################
-def init():
-    global Lock, Log, ztm
-
-    apt_pkg.init()
-
-    arguments = options_init()
-
-    Log = logger(Options, "remove-package")
-
-    Log.debug("Acquiring lock")
-    Lock = GlobalLock('/var/lock/launchpad-remove-package.lock')
-    Lock.acquire(blocking=True)
-
-    Log.debug("Initializing connection.")
-    execute_zcml_for_scripts()
-    ztm = initZopeless(dbuser=config.archivepublisher.dbuser)
-
-    if not Options.distro:
-        Options.distro = "ubuntu"
-    Options.distro = getUtility(IDistributionSet)[Options.distro]
-
-    if not Options.suite:
-        Options.suite = Options.distro.currentseries.name
-
-    Options.architecture = dak_utils.split_args(Options.architecture)
-    Options.component = dak_utils.split_args(Options.component)
-    Options.suite = dak_utils.split_args(Options.suite)
-
-    return arguments
-
-################################################################################
-
-def summary_to_remove(to_remove):
-    # Generate the summary of what's to be removed
-    d = {}
-    for removal in to_remove:
-        package = removal["package"]
-        version = removal["version"]
-        architecture = removal["architecture"]
-        if not d.has_key(package):
-            d[package] = {}
-        if not d[package].has_key(version):
-            d[package][version] = []
-        if architecture not in d[package][version]:
-            d[package][version].append(architecture)
-
-    summary = ""
-    removals = d.keys()
-    removals.sort()
-    for package in removals:
-        versions = d[package].keys()
-        versions.sort(apt_pkg.VersionCompare)
-        for version in versions:
-            d[package][version].sort(dak_utils.arch_compare_sw)
-            summary += "%10s | %10s | %s\n" % (package, version,
-                                               ", ".join(d[package][version]))
-
-    suites_list = dak_utils.join_with_commas_and(Options.suite);
-    print "Will remove the following packages from %s:" % (suites_list)
-    print
-    print summary
-    print
-    print "------------------- Reason -------------------"
-    print Options.reason
-    print "----------------------------------------------"
-    print
-
-    return summary
-
-################################################################################
-
-def what_to_remove(packages):
-    to_remove = []
-
-    # We have 3 modes of package selection: binary-only, source-only
-    # and source+binary.  The first two are trivial and obvious; the
-    # latter is a nasty mess, but very nice from a UI perspective so
-    # we try to support it.
-
-    for removal in packages:
-        for suite in Options.suite:
-            distro_series = Options.distro.getSeries(suite)
-
-            if Options.sourceonly:
-                bpp_list = []
-            else:
-                if Options.binaryonly:
-                    bpp_list = distro_series.getBinaryPackagePublishing(removal)
-                else:
-                    bpp_list = distro_series.getBinaryPackagePublishing(
-                        sourcename=removal)
-
-            for bpp in bpp_list:
-                package=bpp.binarypackagerelease.binarypackagename.name
-                version=bpp.binarypackagerelease.version
-                architecture=bpp.distroarchseries.architecturetag
-                if (Options.architecture and
-                    architecture not in Options.architecture):
-                    continue
-                if (Options.component and
-                    bpp.component.name not in Options.component):
-                    continue
-                d = dak_utils.Dict(
-                    type="binary", publishing=bpp, package=package,
-                    version=version, architecture=architecture)
-                to_remove.append(d)
-
-            if not Options.binaryonly:
-                for spp in distro_series.getPublishedSources(removal):
-                    package = spp.sourcepackagerelease.sourcepackagename.name
-                    version = spp.sourcepackagerelease.version
-                    if (Options.component and
-                        spp.component.name not in Options.component):
-                        continue
-                    d = dak_utils.Dict(
-                        type="source",publishing=spp, package=package,
-                        version=version, architecture="source")
-                    to_remove.append(d)
-
-    return to_remove
-
-################################################################################
-
-def do_removal(removal):
-    """Perform published package removal.
-
-    Mark provided publishing record as SUPERSEDED, such that the Domination
-    procedure will sort out its eventual removal appropriately; obeying the
-    rules for archive consistency.
-    """
-    current = removal["publishing"]
-    if removal["type"] == "binary":
-        real_current = BinaryPackagePublishingHistory.get(current.id)
-    else:
-        real_current = SourcePackagePublishingHistory.get(current.id)
-    real_current.status = PackagePublishingStatus.SUPERSEDED
-    real_current.datesuperseded = UTC_NOW
-
-################################################################################
-
-def main ():
-    packages = init()
-
-    print "Working...",
-    sys.stdout.flush()
-    to_remove = what_to_remove(packages)
-    print "done."
-
-    if not to_remove:
-        print "Nothing to do."
-        sys.exit(0)
-
-    # If we don't have a reason; spawn an editor so the user can add one
-    # Write the rejection email out as the <foo>.reason file
-    if not Options.reason and Options.action:
-        temp_filename = dak_utils.temp_filename()
-        editor = os.environ.get("EDITOR","vi")
-        result = os.system("%s %s" % (editor, temp_filename))
-        if result != 0:
-            dak_utils.fubar ("vi invocation failed for `%s'!" % (temp_filename),
-                             result)
-        temp_file = open(temp_filename)
-        for line in temp_file.readlines():
-            Options.reason += line
-        temp_file.close()
-        os.unlink(temp_filename)
-
-    summary = summary_to_remove(to_remove)
-
-    if Options.rdepcheck:
-        dak_utils.fubar("Unimplemented, sucks to be you.")
-        #reverse_depends_check(removals, suites)
-
-    # If -n/--no-action, drop out here
-    if not Options.action:
-        sys.exit(0)
-
-    print "Going to remove the packages now."
-    game_over()
-
-    whoami = dak_utils.whoami()
-    date = commands.getoutput('date -R')
-    suites_list = dak_utils.join_with_commas_and(Options.suite);
-
-    # Log first; if it all falls apart I want a record that we at least tried.
-    # XXX malcc 2006-08-03: de-hardcode me harder
-    logfile = open("/srv/launchpad.net/dak/removals.txt", 'a')
-    logfile.write("==================================="
-                  "======================================\n")
-    logfile.write("[Date: %s] [ftpmaster: %s]\n" % (date, whoami))
-    logfile.write("Removed the following packages from %s:\n\n%s"
-                  % (suites_list, summary))
-    logfile.write("\n------------------- Reason -------------------\n%s\n"
-                  % (Options.reason))
-    logfile.write("----------------------------------------------\n")
-    logfile.flush()
-
-    # Do the actual deletion
-    print "Deleting...",
-    ztm.begin()
-    for removal in to_remove:
-        do_removal(removal)
-    print "done."
-    ztm.commit()
-
-    logfile.write("==================================="
-                  "======================================\n")
-    logfile.close()
-
-################################################################################
-
-if __name__ == '__main__':
-    main()

=== removed file 'scripts/import-packagenames.py'
--- scripts/import-packagenames.py	2010-11-06 12:45:26 +0000
+++ scripts/import-packagenames.py	1970-01-01 00:00:00 +0000
@@ -1,155 +0,0 @@
-#!/usr/bin/python -S
-#
-# Copyright 2009 Canonical Ltd.  This software is licensed under the
-# GNU Affero General Public License version 3 (see the file LICENSE).
-
-# pylint: disable-msg=W0403
-import _pythonpath
-
-import sys
-import re
-from optparse import OptionParser
-
-from canonical.lp import initZopeless
-from lp.soyuz.model.binarypackagename import BinaryPackageName
-
-
-class BaseNameList:
-    """Base for Packages name list"""
-
-    def __init__(self, filename):
-        self.filename = filename
-        self.list = []
-        self._buildlist()
-        self.list.sort()
-
-    def _buildlist(self):
-        try:
-            f = open(self.filename)
-        except IOError:
-            print 'file %s not found. Exiting...' % self.filename
-            sys.exit(1)
-
-        for line in f:
-            line = self._check_format(line.strip())
-            if line:
-                if not self._valid_name(line):
-                    print ' - Invalid package name: %s' % line
-                    continue
-                self.list.append(line)
-
-    def _check_format(self, name):
-        assert isinstance(name, basestring), repr(name)
-        try:
-            # check that this is unicode data
-            name.decode("utf-8").encode("utf-8")
-            return name
-        except UnicodeError:
-            # check that this is latin-1 data
-            s = name.decode("latin-1").encode("utf-8")
-            s.decode("utf-8")
-            return s
-
-    def _valid_name(self, name):
-        pat = r"^[a-z0-9][a-z0-9\\+\\.\\-]+$"
-        if re.match(pat, name):
-            return True
-
-class SourcePackageNameList(BaseNameList):
-    """Build a sourcepackagename list from a given file"""
-
-class BinaryPackageNameList(BaseNameList):
-    """Build a binarypackagename list from a given file"""
-
-class Counter:
-    def __init__(self, interval):
-        self._count = 0
-        self.interval = interval
-
-        if not interval:
-            setattr(self, 'step', self._fake_step)
-        else:
-            setattr(self, 'step', self._step)
-
-    def _step(self):
-        self._count += 1
-        if self._count > self.interval:
-            self.reset()
-            return True
-
-    def _fake_step(self):
-        return
-
-    def reset(self):
-        self._count = 0
-
-class ProcessNames:
-    def __init__(self, source_list, binary_list, commit_interval=0):
-        self.source_list = source_list
-        self.binary_list = binary_list
-        self.ztm = initZopeless()
-        self.interval = commit_interval
-        self.count = Counter(commit_interval)
-
-    def commit(self):
-        print '\t\t@ Commiting...'
-        self.ztm.commit()
-
-
-    def processSource(self):
-        from lp.registry.model.sourcepackagename import SourcePackageName
-        if not self.source_list:
-            return
-
-        spnl = SourcePackageNameList(self.source_list).list
-
-        for name in spnl:
-            print '\t@ Evaluationg SourcePackageName %s' % name
-            SourcePackageName.ensure(name)
-            if self.count.step():
-                self.commit()
-
-        if self.interval:
-            self.commit()
-        self.count.reset()
-
-    def processBinary(self):
-        if not self.binary_list:
-            return
-
-        bpnl = BinaryPackageNameList(self.binary_list).list
-
-        for name in bpnl:
-            print '\t@ Evaluationg BinaryPackageName %s' % name
-            BinaryPackageName.ensure(name)
-            if self.count.step():
-                self.commit()
-
-        if self.interval:
-            self.commit()
-        self.count.reset()
-
-if __name__ == '__main__':
-
-    parser = OptionParser()
-
-    parser.add_option(
-        "-s", "--source-file", dest="source_file",
-        help="SourcePackageName list file")
-
-    parser.add_option(
-        "-b", "--binary-file", dest="binary_file",
-        help="BinaryPackageName list file")
-
-    parser.add_option(
-        "-c", "--commit-interval", dest="commit_interval", default=0,
-        help="DB commit interval. Default 0 performs not commit.")
-
-    (options, args) = parser.parse_args()
-
-    processor = ProcessNames(
-        options.source_file, options.binary_file,
-        int(options.commit_interval))
-
-    processor.processSource()
-    processor.processBinary()

=== removed file 'scripts/import-zope-specs.py'
--- scripts/import-zope-specs.py	2010-11-01 03:43:58 +0000
+++ scripts/import-zope-specs.py	1970-01-01 00:00:00 +0000
@@ -1,283 +0,0 @@
-#!/usr/bin/python -S
-#
-# Copyright 2009 Canonical Ltd.  This software is licensed under the
-# GNU Affero General Public License version 3 (see the file LICENSE).
-
-# A script to import metadata about the Zope 3 specs into Launchpad
-
-__metaclass__ = type
-
-import itertools
-import re
-import sys
-import urllib2
-
-import _pythonpath
-from zope.component import getUtility
-from BeautifulSoup import BeautifulSoup
-
-from canonical.launchpad.scripts import execute_zcml_for_scripts
-from canonical.lp import initZopeless
-from lp.blueprints.enums import (
-    SpecificationStatus,
-    SpecificationGoalStatus,
-    SpecificationDelivery,
-    SpecificationPriority,
-    )
-from lp.blueprints.interfaces.specification import ISpecificationSet
-from lp.registry.interfaces.person import IPersonSet
-from lp.registry.interfaces.product import IProductSet
-
-
-WIKI_BASE = 'http://wiki.zope.org/zope3/'
-PROPOSAL_LISTS = ['Zope3Proposals', 'OldProposals', 'DraftProposals']
-specroot = WIKI_BASE + 'Zope3Proposals'
-
-at_replacements = ['_at_', '(at)', '&#64;']
-author_email_pat = re.compile('[-.A-Za-z0-9]+(?:@|%s)[-.A-Za-z0-9]+' %
-                              '|'.join([re.escape(replacement)
-                                        for replacement in at_replacements]))
-
-def getTextContent(tag):
-    if tag is None:
-        return ''
-    if isinstance(tag, basestring):
-        return tag
-    return ''.join([e for e in tag.recursiveChildGenerator()
-                    if isinstance(e, basestring)])
-
-
-class ZopeSpec:
-
-    def __init__(self, url, title, summary):
-        self.url = url
-        self.name = self.url.split('/')[-1]
-        self.title = title
-        self.summary = summary
-        self.authors = set()
-        self.statuses = set()
-
-    def parseAuthorEmails(self, text):
-        author_email_list = author_email_pat.findall(text)
-        for author in author_email_list:
-            # unmangle at symbol in email:
-            for replacement in at_replacements:
-                author = author.replace(replacement, '@')
-            self.authors.add(author)
-
-    def parseStatuses(self, soup):
-        wiki_badges = [
-            'IsWorkInProgress',
-
-            'IsProposal',
-            'IsRejectedProposal',
-            'IsSupercededProposal',
-            'IsRetractedProposal',
-            'IsAcceptedProposal',
-            'IsImplementedProposal',
-            'IsExpiredProposal',
-            'IsDraftProposal',
-
-            'IsPlanned',
-            'IsResolved',
-            'IsImplemented',
-
-            'IsReplaced',
-            'IsOutdated',
-            'IsDraft',
-            'IsEditedDraft',
-            'IsRoughDraft',
-            ]
-        for badge in wiki_badges:
-            url = WIKI_BASE + badge
-            if soup.fetch('a', {'href': url}):
-                self.statuses.add(badge)
-
-    def parseSpec(self):
-        contents = urllib2.urlopen(self.url).read()
-        soup = BeautifulSoup(contents)
-        contentdivs = soup('div', {'class': 'content'})
-        assert len(contentdivs) == 1
-        contentdiv = contentdivs[0]
-
-        # Specification statuses are represented by "wiki badges",
-        # which are just hyperlinks to particular pages.
-        self.parseStatuses(soup)
-
-        # There are two styles of spec.  One of them has a table with
-        # RFC-822 style headers in it.  The other has minor level headings
-        # with text under the heading.
-        tables = soup('table')
-        # Every page has one table, for the main page layout.  So, if the page
-        # has two tables, it means that it will be using the RFC-822 style.
-        if len(tables) >= 2:
-            # This is a spec with RFC-822 style headers.
-            docinfo = tables[1]
-            for row in docinfo('tr'):
-                if len(row('th')) < 1 or len(row('td')) < 1:
-                    continue
-                key = row('th')[0].renderContents()
-                if key.endswith(':'):
-                    key = key[:-1]
-                value = row('td')[0].renderContents()
-
-                if 'Author' in key:
-                    self.parseAuthorEmails(value)
-        else:
-            # This is a spec with minor level headings, or perhaps with no
-            # headings at all.
-
-            # Look for an author heading.
-            author_headers = soup(text=re.compile('Author.*', re.I))
-            if author_headers:
-                author = author_headers[0].findNext().renderContents()
-                self.parseAuthorEmails(author)
-
-    @property
-    def lpname(self):
-        # add dashes before capitalised words
-        name = re.sub(r'([^A-Z])([A-Z])', r'\1-\2', self.name)
-        # lower case name
-        name = name.lower()
-        # remove leading dashes
-        while name.startswith('-'):
-            name = name[1:]
-        # if name doesn't begin with an alphabetical character prefix it
-        if not name[0].isalpha():
-            name = 'x-' + name
-        return name
-
-    @property
-    def lpstatus(self):
-        # implemented and accepted specs => APPROVED
-        for status in ['IsImplemented',
-                       'IsImplementedProposal',
-                       'IsAcceptedProposal']:
-            if status in self.statuses:
-                return SpecificationStatus.APPROVED
-        # WIP => DISCUSSION
-        if 'IsWorkInProgress' in self.statuses:
-            return SpecificationStatus.DISCUSSION
-        for status in ['IsSupercededProposal', 'IsReplaced']:
-            if status in self.statuses:
-                return SpecificationStatus.SUPERSEDED
-        for status in ['IsExpiredProposal', 'IsOutdated']:
-            if status in self.statuses:
-                return SpecificationStatus.OBSOLETE
-        # draft statuses:
-        for status in ['IsDraftProposal',
-                       'IsDraft',
-                       'IsEditedDraft',
-                       'IsRoughDraft']:
-            if status in self.statuses:
-                return SpecificationStatus.DRAFT
-        # otherwise ...
-        return SpecificationStatus.PENDINGREVIEW
-
-    @property
-    def lpgoalstatus(self):
-        # implemented and accepted specs => ACCEPTED
-        for status in ['IsImplemented',
-                       'IsImplementedProposal',
-                       'IsAcceptedProposal']:
-            if status in self.statuses:
-                return SpecificationGoalStatus.ACCEPTED
-        # rejected or retracted => DECLINED
-        for status in ['IsRetractedProposal', 'IsRejectedProposal']:
-            if status in self.statuses:
-                return SpecificationGoalStatus.DECLINED
-
-        # otherwise ...
-        return SpecificationGoalStatus.PROPOSED
-
-    @property
-    def lpdelivery(self):
-        for status in ['IsImplemented',
-                       'IsImplementedProposal']:
-            if status in self.statuses:
-                return SpecificationDelivery.IMPLEMENTED
-        # otherwise ...
-        return SpecificationDelivery.UNKNOWN
-
-    def syncSpec(self):
-        zope = getUtility(IProductSet).getByName('zope')
-        zope_dev = getUtility(IPersonSet).getByName('zope-dev')
-        # has the spec been created?
-        lpspec = getUtility(ISpecificationSet).getByURL(self.url)
-        if not lpspec:
-            lpspec = getUtility(ISpecificationSet).new(
-                name=self.lpname,
-                title=self.title,
-                specurl=self.url,
-                summary=self.summary,
-                priority=SpecificationPriority.UNDEFINED,
-                status=SpecificationStatus.NEW,
-                owner=zope_dev,
-                product=zope)
-
-        # synchronise
-        lpspec.title = self.title
-        lpspec.summary = self.summary
-        lpspec.status = self.lpstatus
-        newgoalstatus = self.lpgoalstatus
-        if newgoalstatus != lpspec.goalstatus:
-            if newgoalstatus == SpecificationGoalStatus.PROPOSED:
-                lpspec.proposeGoal(None, zope_dev)
-            elif newgoalstatus == SpecificationGoalStatus.ACCEPTED:
-                lpspec.acceptBy(zope_dev)
-            elif newgoalstatus == SpecificationGoalStatus.DECLINED:
-                lpspec.declineBy(zope_dev)
-        lpspec.delivery = self.lpdelivery
-        lpspec.updateLifecycleStatus(zope_dev)
-
-        # set the assignee to the first author email with an LP account
-        for author in sorted(self.authors):
-            person = getUtility(IPersonSet).getByEmail(author)
-            if person is not None:
-                lpspec.assignee = person
-                break
-
-
-def iter_spec_urls(url=specroot):
-    contents = urllib2.urlopen(url)
-    soup = BeautifulSoup(contents)
-    contentdivs = soup('div', {'class': 'content'})
-    assert len(contentdivs) == 1
-    contentdiv = contentdivs[0]
-    listofspecs = contentdiv('ul')[0]
-
-    for listitem in listofspecs('li', recursive=False):
-        anchors = listitem('a')
-        if not anchors:
-            continue
-        specanchor = anchors[0]
-        href = specanchor['href']
-        # broken wiki link => ignore
-        if 'createform?page=' in href:
-            continue
-        title = getTextContent(specanchor)
-        summary = ''.join([getTextContent(tag)
-                               for tag in specanchor.nextSiblingGenerator()])
-        yield ZopeSpec(href, title, summary.strip())
-
-
-def main(argv):
-    execute_zcml_for_scripts()
-    ztm = initZopeless()
-
-    for spec in itertools.chain(*[iter_spec_urls(WIKI_BASE + page)
-                                  for page in PROPOSAL_LISTS]):
-        # parse extra information from the spec body
-        spec.parseSpec()
-        # add its metadata to LP
-        print 'Synchronising', spec.name
-        ztm.begin()
-        try:
-            spec.syncSpec()
-            ztm.commit()
-        except:
-            ztm.abort()
-            raise
-
-if __name__ == '__main__':
-    sys.exit(main(sys.argv))

=== removed file 'scripts/linkreport.py'
--- scripts/linkreport.py	2011-08-12 14:49:34 +0000
+++ scripts/linkreport.py	1970-01-01 00:00:00 +0000
@@ -1,223 +0,0 @@
-#!/usr/bin/python -S
-#
-# Copyright 2009 Canonical Ltd.  This software is licensed under the
-# GNU Affero General Public License version 3 (see the file LICENSE).
-
-"""
-Process LinkChecker .csv results for the staging server, stuff them into
-a database and generate a report suitable for spamming developers with.
-"""
-
-__metaclass__ = type
-
-# pylint: disable-msg=W0403
-import _pythonpath
-
-import csv, re, sys
-from StringIO import StringIO
-from optparse import OptionParser
-from sqlobject import StringCol, IntCol, BoolCol, FloatCol, DatabaseIndex
-from canonical.database.datetimecol import UtcDateTimeCol
-from canonical.database.constants import UTC_NOW
-from canonical.launchpad.scripts import db_options, logger_options, logger
-from canonical.lp import initZopeless
-from canonical.database.sqlbase import SQLBase
-from canonical.config import config
-from lp.services.mail.sendmail import simple_sendmail
-
-
-class CheckedLink(SQLBase):
-    _table = 'CheckedLink'
-    urlname = StringCol(notNull=True)
-    recursionlevel = IntCol(notNull=True)
-    parentname = StringCol(notNull=True)
-    baseref = StringCol(notNull=True)
-    result = StringCol(notNull=True)
-    resultcode = IntCol(notNull=True)
-    warningstring = StringCol(notNull=True)
-    infostring = StringCol(notNull=True)
-    valid = BoolCol(notNull=True)
-    url = StringCol(notNull=True, unique=True, alternateID=True)
-    line = IntCol(notNull=True)
-    col = IntCol(notNull=True)
-    name = StringCol(notNull=True)
-    dltime = FloatCol()
-    dlsize = IntCol()
-    checktime = FloatCol(notNull=True)
-    brokensince = UtcDateTimeCol(notNull=False, default=UTC_NOW)
-    #cached = BoolCol(notNull=True)
-
-    resultcode_index = DatabaseIndex('resultcode')
-    recursionlevel_index = DatabaseIndex('recursionlevel')
-
-
-def main(csvfile, log):
-
-    # Where we store broken links
-    broken = set()
-
-    # Suck in the csv file, updating the database and adding to the broken set
-    reader = csv.DictReader(
-            (line.replace('\0','') for line in csvfile
-                if not line.startswith('#'))
-            )
-    for row in reader:
-        # Get the result code
-        if row['valid']:
-            row['resultcode'] = 200
-            row['result'] = '200 Ok'
-        else:
-            m = re.search('^(\d+)', row['result'] or '')
-            if m is None:
-                if row['result'] == 'URL is empty':
-                    continue
-                elif 'The read operation timed out' in row['result']:
-                    row['result'] = '601 %s' % row['result']
-                    row['resultcode'] = 601
-                else:
-                    row['result'] = '602 %s' % row['result']
-                    row['resultcode'] = 602
-            else:
-                row['resultcode'] = int(m.group(1))
-
-        # Cast input and nuke crap (to avoid confusing SQLObject)
-        row['recursionlevel'] = int(row['recursionlevel'])
-        row['valid'] = row['valid'] in ('True', 'true')
-        row['line'] = int(row['line'])
-        row['col'] = int(row['column']) # Renamed - column is a SQL keyword
-        del row['column']
-        row['dltime'] = float(row['dltime'])
-        row['dlsize'] = int(row['dlsize'])
-        row['checktime'] = float(row['checktime'])
-        del row['cached']
-        if row['resultcode'] < 400:
-            row['brokensince'] = None
-
-        try:
-            link = CheckedLink.byUrl(row['url'])
-            link.set(**row)
-        except LookupError:
-            link = CheckedLink(**row)
-        broken.add(link)
-
-    total = len(broken)
-
-    # Delete any entries that were not spidered
-    # XXX StuartBishop 2005-07-04: Only if older than a threshold.
-    for link in CheckedLink.select():
-        if link in broken:
-            continue
-        link.destroySelf()
-
-    new_broken_links = CheckedLink.select("""
-        resultcode in (404, 500, 601)
-        AND brokensince > CURRENT_TIMESTAMP AT TIME ZONE 'UTC'
-            - '1 day 12 hours'::interval
-        """, orderBy=["recursionlevel", "parentname", "url"])
-
-    rep = report("New Arrivals", new_broken_links, total, brokensince=False)
-
-    old_broken_links = CheckedLink.select("""
-        resultcode in (404, 500, 601)
-        AND brokensince <= CURRENT_TIMESTAMP AT TIME ZONE 'UTC'
-            - '1 day 12 hours'::interval
-        AND brokensince >
-            CURRENT_TIMESTAMP AT TIME ZONE 'UTC' - '14 days'::interval
-        """, orderBy=["recursionlevel", "parentname", "url"])
-
-    rep += report("Old Favorites", old_broken_links, total, brokensince=True)
-
-    antique_broken_links = CheckedLink.select("""
-        resultcode in (404, 500, 601)
-        AND brokensince <=
-            CURRENT_TIMESTAMP AT TIME ZONE 'UTC' - '14 days'::interval
-        """, orderBy=["brokensince", "recursionlevel", "parentname", "url"])
-
-    rep += report(
-            "Hall of Shame", antique_broken_links, total, brokensince=True
-            )
-
-    if not options.email:
-        # Print to stdout in system encoding - might raise UnicodeError on
-        # some systems. Tough.
-        print rep
-    else:
-        # Override this setting - we are only here if email explicitly
-        # requested on the command line.
-        send_email_data = """
-            [zopeless]
-            send_email: True
-            """
-        config.push('send_email_data', send_email_data)
-        simple_sendmail(
-                "noreply@xxxxxxxxxxxxx", [options.email], options.subject,
-                rep, {'Keywords': 'LinkChecker', 'X-Fnord': 'Fnord'}
-                )
-        config.pop('send_email_data')
-
-
-def report(title, links, total, brokensince=True):
-
-    out = StringIO()
-
-    heading = "%s (%d/%d)" % (title, links.count(), total)
-    print >> out, heading
-    print >> out, "=" * len(heading)
-
-    def print_row(title, value):
-        print >> out, "%-7s: %s" % (title, str(value))
-
-    for link in links:
-        print_row("Link", link.url)
-        print_row("Parent", link.parentname)
-        print_row("Result", link.result)
-        if link.warningstring:
-            print_row("Warning", link.warningstring)
-        if brokensince:
-            print_row("Since", link.since.strftime('%A %d %B %Y'))
-        print >> out
-    print >> out
-
-    return out.getvalue()
-
-
-if __name__ == '__main__':
-    parser = OptionParser("Usage: %prog [OPTIONS] [input.csv]")
-    db_options(parser)
-    logger_options(parser)
-
-    parser.add_option(
-            "-c", "--create", action="store_true", dest="create",
-            default=False, help="Create the database tables"
-            )
-
-    parser.add_option(
-            "-s", "--subject", dest="subject", help="Email using SUBJECT",
-            metavar="SUBJECT", default="LinkChecker report"
-            )
-
-    parser.add_option(
-            "-t", "--to", dest="email", help="Email to ADDRESS",
-            metavar="ADDRESS", default=None
-            )
-
-    options, args = parser.parse_args()
-
-    log = logger(options)
-
-    if len(args) == 0 or args[0] == '-':
-        log.debug("Reading from stdin")
-        csvfile = sys.stdin
-    else:
-        csvfile = open(args[0], 'rb')
-
-    ztm = initZopeless()
-
-    if options.create:
-        # Create the table if it doesn't exist. Unfortunately, this is broken
-        # so we only create the table if requested on the command line
-        CheckedLink.createTable(ifNotExists=True)
-
-    main(csvfile, log)
-    ztm.commit()
-

=== removed file 'scripts/rosetta/check-distroseries-translations-diffs.py'
--- scripts/rosetta/check-distroseries-translations-diffs.py	2010-11-08 12:52:43 +0000
+++ scripts/rosetta/check-distroseries-translations-diffs.py	1970-01-01 00:00:00 +0000
@@ -1,94 +0,0 @@
-#!/usr/bin/python -S
-#
-# Copyright 2009 Canonical Ltd.  This software is licensed under the
-# GNU Affero General Public License version 3 (see the file LICENSE).
-
-import _pythonpath
-import sys
-
-from optparse import OptionParser
-from zope.component import getUtility
-
-from canonical.lp import initZopeless
-from lp.registry.interfaces.distribution import IDistributionSet
-from canonical.launchpad.scripts import execute_zcml_for_scripts
-from canonical.launchpad.scripts import logger, logger_options
-
-
-def parse_options(args):
-    """Parse a set of command line options.
-
-    Return an optparse.Values object.
-    """
-    parser = OptionParser()
-    parser.add_option("-d", "--distribution", dest="distro",
-        default='ubuntu',
-        help="The distribution we want to check.")
-    parser.add_option("-r", "--release", dest="release",
-        help="The distroseries that we want to check.")
-
-    logger_options(parser)
-
-    (options, args) = parser.parse_args(args)
-
-    return options
-
-def compare_translations(orig_distroseries, dest_distroseries):
-
-    from difflib import unified_diff
-
-    orig_templates = sorted(
-        orig_distroseries.potemplates,
-        key=lambda x: (x.name, x.sourcepackagename.name))
-    dest_templates = sorted(
-        dest_distroseries.potemplates,
-        key=lambda x: (x.name, x.sourcepackagename.name))
-
-    for i in range(len(orig_templates)):
-        old_template = orig_templates[i]
-        new_template = dest_templates[i]
-        output = '\n'.join(list(unified_diff(
-            old_template.export().split('\n'),
-            new_template.export().split('\n'))))
-        output = output.decode('UTF-8')
-        if len(output) > 0:
-            return u'%s is different than its parent %s:\n%s' % (
-                new_template.title, old_template.title, output)
-        for old_pofile in old_template.pofiles:
-            new_pofile = new_template.getPOFileByLang(
-                old_pofile.language.code)
-            old_pofile_content = old_pofile.uncachedExport(
-                    included_obsolete=False,
-                    force_utf8=True).split('\n')
-            new_pofile_content = new_pofile.uncachedExport(
-                    included_obsolete=False,
-                    force_utf8=True).split('\n')
-            output = '\n'.join(list(unified_diff(
-                old_pofile_content, new_pofile_content)))
-            output = output.decode('UTF-8')
-            if len(output) > 0:
-                return u'%s is different than its parent %s:\n%s' % (
-                    new_pofile.title, old_pofile.title, output)
-    return None
-
-def main(argv):
-    options = parse_options(argv[1:])
-
-    logger_object = logger(options, 'check')
-
-    # Setup zcml machinery to be able to use getUtility
-    execute_zcml_for_scripts()
-    ztm = initZopeless()
-
-    distribution = getUtility(IDistributionSet)[options.distro]
-    release = distribution[options.release]
-
-    logger_object.info('Starting...')
-    output = compare_translations(release.parent_series, release)
-    if output is not None:
-        logger_object.error(output)
-    logger_object.info('Done...')
-
-
-if __name__ == '__main__':
-    main(sys.argv)

=== removed file 'utilities/list-person-references.py'
--- utilities/list-person-references.py	2010-09-28 14:59:25 +0000
+++ utilities/list-person-references.py	1970-01-01 00:00:00 +0000
@@ -1,58 +0,0 @@
-#!/usr/bin/python -S
-#
-# Copyright 2009-2010 Canonical Ltd.  This software is licensed under the
-# GNU Affero General Public License version 3 (see the file LICENSE).
-
-import _pythonpath
-
-import sys
-from canonical.lp import initZopeless
-from canonical.database import postgresql
-from canonical.database.sqlbase import cursor
-from lp.registry.model.person import Person
-from lp.registry.model.teammembership import TeamParticipation
-
-
-person_handle = sys.argv[1]
-txn = initZopeless()
-try:
-    int(person_handle)
-except ValueError:
-    if "@" in person_handle:
-        person = Person.selectOne("EmailAddress.person = Person.id AND "
-                               "emailaddress.email = %s" % person_handle)
-    else:
-        person = Person.selectOneBy(name=person_handle)
-else:
-    person = Person.selectOneBy(id=person_handle)
-
-if person is None:
-    print "Person %s not found" % person_handle
-    sys.exit(1)
-
-
-skip = []
-cur = cursor()
-references = list(postgresql.listReferences(cur, 'person', 'id'))
-
-print ("Listing references for %s (ID %s, preferred email %s):\n" %
-       (person.name, person.id,
-        person.preferredemail and person.preferredemail.email))
-for src_tab, src_col, ref_tab, ref_col, updact, delact in references:
-    if (src_tab, src_col) in skip:
-        continue
-    query = "SELECT id FROM %s WHERE %s=%s" % (src_tab, src_col, person.id)
-    cur.execute(query)
-    rows = cur.fetchall()
-    for row in rows:
-        if src_tab.lower() == 'teamparticipation':
-            tp = TeamParticipation.selectOneBy(
-                personID=person.id, teamID=person.id)
-            if tp.id == row[0]:
-                # Every person has a teamparticipation entry for itself,
-                # and we already know this. No need to output it, then.
-                continue
-        print ("\tColumn %s of table %s with id %s points to this "
-               "person." % (src_col, src_tab, row[0]))
-
-print

=== removed file 'utilities/update-revisionauthor-email.py'
--- utilities/update-revisionauthor-email.py	2010-04-27 19:48:39 +0000
+++ utilities/update-revisionauthor-email.py	1970-01-01 00:00:00 +0000
@@ -1,46 +0,0 @@
-#!/usr/bin/python -S
-#
-# Copyright 2009 Canonical Ltd.  This software is licensed under the
-# GNU Affero General Public License version 3 (see the file LICENSE).
-
-# Disable pylint complaining about relative import of _pythonpath
-# pylint: disable-msg=W0403
-
-"""Try to link RevionAuthors to Launchpad Person entries.
-
-Iterate through the RevisionAuthors and extract their email address.
-Then use that email address to link to a Person.
-"""
-
-import _pythonpath
-
-import email.Utils
-import sys
-
-from canonical.launchpad.scripts import execute_zcml_for_scripts
-from canonical.lp import initZopeless
-
-from canonical.launchpad.database.revision import RevisionAuthor
-
-def main(argv):
-    execute_zcml_for_scripts()
-    ztm = initZopeless()
-    try:
-        total = RevisionAuthor.select().count()
-        for number, author in enumerate(RevisionAuthor.select()):
-            if author.email is None:
-                email_address = email.Utils.parseaddr(author.name)[1]
-                # If there is no @, then it isn't a real email address.
-                if '@' in email_address:
-                    author.email = email_address
-                    if author.linkToLaunchpadPerson():
-                        print "%s linked to %s" % (
-                            author.name.encode('ascii', 'replace'),
-                            author.person.name)
-        ztm.commit()
-    finally:
-        ztm.abort()
-
-
-if __name__ == '__main__':
-    main(sys.argv)