← Back to team overview

launchpad-reviewers team mailing list archive

[Merge] lp:~jtv/launchpad/pre-830890 into lp:launchpad

 

Jeroen T. Vermeulen has proposed merging lp:~jtv/launchpad/pre-830890 into lp:launchpad.

Requested reviews:
  Launchpad code reviewers (launchpad-reviewers)

For more details, see:
https://code.launchpad.net/~jtv/launchpad/pre-830890/+merge/72535

= Summary =

This branch does nothing.

I'm preparing to make a change to gina, and gina needs some cleaning up.  While reading through the code I may have picked up an odd piece of lint and swept some dirty surfaces.  There's no time for a proper cleanup right now though.


= Launchpad lint =

Checking for conflicts and issues in changed files.

Linting changed files:
  lib/lp/soyuz/scripts/gina/packages.py
  lib/lp/soyuz/scripts/gina/archive.py
  scripts/gina.py

./scripts/gina.py
      26: '_pythonpath' imported but unused
-- 
https://code.launchpad.net/~jtv/launchpad/pre-830890/+merge/72535
Your team Launchpad code reviewers is requested to review the proposed merge of lp:~jtv/launchpad/pre-830890 into lp:launchpad.
=== modified file 'lib/lp/soyuz/scripts/gina/archive.py'
--- lib/lp/soyuz/scripts/gina/archive.py	2010-08-20 20:31:18 +0000
+++ lib/lp/soyuz/scripts/gina/archive.py	2011-08-23 08:48:28 +0000
@@ -1,4 +1,4 @@
-# Copyright 2009 Canonical Ltd.  This software is licensed under the
+# Copyright 2009-2011 Canonical Ltd.  This software is licensed under the
 # GNU Affero General Public License version 3 (see the file LICENSE).
 
 """Archive pool classes.
@@ -196,7 +196,7 @@
 
             # Run over the source stanzas and store info in src_map. We
             # make just one source map (instead of one per architecture)
-            # because most of then are the same for all architectures,
+            # because most of them are the same for all architectures,
             # but we go over it to also cover source packages that only
             # compile for one architecture.
             sources = apt_pkg.ParseTagFile(info_set.srcfile)
@@ -206,19 +206,19 @@
                     src_tmp['Component'] = info_set.component
                     src_name = src_tmp['Package']
                 except KeyError:
-                    log.exception("Invalid Sources stanza in %s" %
-                                  info_set.sources_tagfile)
+                    log.exception(
+                        "Invalid Sources stanza in %s",
+                        info_set.sources_tagfile)
                     continue
                 self.src_map[src_name].append(src_tmp)
 
-            # Check if it's in source-only mode, if so, skip binary index
+            # Check if it's in source-only mode.  If so, skip binary index
             # mapping.
             if info_set.source_only:
                 continue
 
-            # Create a tmp map for binaries for one arch/component pair
-            if not self.bin_map.has_key(info_set.arch):
-                self.bin_map[info_set.arch] = {}
+            # Create a tmp map for binaries for one arch/component pair.
+            self.bin_map.setdefault(info_set.arch, {})
 
             tmpbin_map = self.bin_map[info_set.arch]
 
@@ -226,12 +226,13 @@
             while binaries.Step():
                 try:
                     bin_tmp = dict(binaries.Section)
-                    # The component isn't listed in the tagfile
+                    # The component isn't listed in the tagfile.
                     bin_tmp['Component'] = info_set.component
                     bin_name = bin_tmp['Package']
                 except KeyError:
-                    log.exception("Invalid Releases stanza in %s" %
-                                  info_set.binaries_tagfile)
+                    log.exception(
+                        "Invalid Releases stanza in %s",
+                        info_set.binaries_tagfile)
                     continue
                 tmpbin_map[bin_name] = bin_tmp
 
@@ -247,4 +248,3 @@
                                   info_set.difile)
                     continue
                 tmpbin_map[dibin_name] = dibin_tmp
-

=== modified file 'lib/lp/soyuz/scripts/gina/packages.py'
--- lib/lp/soyuz/scripts/gina/packages.py	2011-02-23 20:26:53 +0000
+++ lib/lp/soyuz/scripts/gina/packages.py	2011-08-23 08:48:28 +0000
@@ -1,4 +1,4 @@
-# Copyright 2009-2010 Canonical Ltd.  This software is licensed under the
+# Copyright 2009-2011 Canonical Ltd.  This software is licensed under the
 # GNU Affero General Public License version 3 (see the file LICENSE).
 
 # pylint: disable-msg=W0631
@@ -61,9 +61,8 @@
     "source": PackagePublishingPriority.EXTRA,
 }
 
-GPGALGOS = {}
-for item in GPGKeyAlgorithm.items:
-    GPGALGOS[item.value] = item.name
+GPGALGOS = dict((item.value, item.name) for item in GPGKeyAlgorithm.items)
+
 
 #
 # Helper functions
@@ -147,11 +146,11 @@
 
 
 def parse_person(val):
-    if "," in val:
-        # Some emails have ',' like "Adam C. Powell, IV
-        # <hazelsct@xxxxxxxxxx>". rfc822.parseaddr seems to do not
-        # handle this properly, so we munge them here
-        val = val.replace(',', '')
+    """Parse a full email address into human-readable name and address."""
+    # Some addresses have commas in them, as in: "Adam C. Powell, IV
+    # <hazelsct@xxxxxxxxxxxxxxxxxx>". rfc822.parseaddr seems not to
+    # handle this properly, so we munge them here.
+    val = val.replace(',', '')
     return rfc822.parseaddr(val)
 
 
@@ -199,6 +198,7 @@
     else:
         return None
 
+
 #
 # Exception classes
 #

=== modified file 'scripts/gina.py'
--- scripts/gina.py	2010-11-26 22:44:00 +0000
+++ scripts/gina.py	2011-08-23 08:48:28 +0000
@@ -1,6 +1,6 @@
 #!/usr/bin/python -S
 #
-# Copyright 2009,2010 Canonical Ltd.  This software is licensed under the
+# Copyright 2009-2011 Canonical Ltd.  This software is licensed under the
 # GNU Affero General Public License version 3 (see the file LICENSE).
 
 # This module uses relative imports.
@@ -19,33 +19,43 @@
 
 __metaclass__ = type
 
-import _pythonpath
-
-# Set to non-zero if you'd like to be warned every so often
-COUNTDOWN = 0
-
 import os
-import psycopg2
 import sys
 import time
 
+import _pythonpath
+import psycopg2
 from zope.component import getUtility
 
 from canonical import lp
 from canonical.config import config
 from canonical.launchpad.scripts import log
-
 from lp.services.scripts.base import LaunchpadCronScript
 from lp.soyuz.interfaces.component import IComponentSet
 from lp.soyuz.scripts.gina import ExecutionError
+from lp.soyuz.scripts.gina.archive import (
+    ArchiveComponentItems,
+    MangledArchiveError,
+    PackagesMap,
+    )
+from lp.soyuz.scripts.gina.handlers import (
+    DataSetupError,
+    ImporterHandler,
+    MultiplePackageReleaseError,
+    NoSourcePackageError,
+    )
 from lp.soyuz.scripts.gina.katie import Katie
-from lp.soyuz.scripts.gina.archive import (ArchiveComponentItems,
-    PackagesMap, MangledArchiveError)
-from lp.soyuz.scripts.gina.handlers import (ImporterHandler,
-    MultiplePackageReleaseError, NoSourcePackageError, DataSetupError)
-from lp.soyuz.scripts.gina.packages import (SourcePackageData,
-    BinaryPackageData, MissingRequiredArguments, DisplayNameDecodingError,
-    PoolFileNotFound, InvalidVersionError)
+from lp.soyuz.scripts.gina.packages import (
+    BinaryPackageData,
+    DisplayNameDecodingError,
+    InvalidVersionError,
+    MissingRequiredArguments,
+    PoolFileNotFound,
+    SourcePackageData,
+    )
+
+# Set to non-zero if you'd like to be warned every so often
+COUNTDOWN = 0
 
 
 def _get_keyring(keyrings_root):
@@ -88,34 +98,34 @@
     LIBRPORT = config.librarian.upload_port
 
     log.info("")
-    log.info("=== Processing %s/%s/%s ===" % (distro, distroseries, pocket))
-    log.debug("Packages read from: %s" % package_root)
-    log.debug("Keyrings read from: %s" % keyrings_root)
-    log.info("Components to import: %s" % ", ".join(components))
+    log.info("=== Processing %s/%s/%s ===", distro, distroseries, pocket)
+    log.debug("Packages read from: %s", package_root)
+    log.debug("Keyrings read from: %s", keyrings_root)
+    log.info("Components to import: %s", ", ".join(components))
     if component_override is not None:
-        log.info("Override components to: %s" % component_override)
-    log.info("Architectures to import: %s" % ", ".join(archs))
-    log.debug("Launchpad database: %s" % LPDB)
-    log.debug("Launchpad database host: %s" % LPDB_HOST)
-    log.debug("Launchpad database user: %s" % LPDB_USER)
-    log.info("Katie database: %s" % KTDB)
-    log.info("SourcePackage Only: %s" % source_only)
-    log.info("SourcePackageName Only: %s" % spnames_only)
-    log.debug("Librarian: %s:%s" % (LIBRHOST, LIBRPORT))
-    log.info("Dry run: %s" % (dry_run))
+        log.info("Override components to: %s", component_override)
+    log.info("Architectures to import: %s", ", ".join(archs))
+    log.debug("Launchpad database: %s", LPDB)
+    log.debug("Launchpad database host: %s", LPDB_HOST)
+    log.debug("Launchpad database user: %s", LPDB_USER)
+    log.info("Katie database: %s", KTDB)
+    log.info("SourcePackage Only: %s", source_only)
+    log.info("SourcePackageName Only: %s", spnames_only)
+    log.debug("Librarian: %s:%s", LIBRHOST, LIBRPORT)
+    log.info("Dry run: %s", dry_run)
     log.info("")
 
-    if hasattr(PackagePublishingPocket, pocket.upper()):
-        pocket = getattr(PackagePublishingPocket, pocket.upper())
-    else:
-        log.error("Could not find a pocket schema for %s" % pocket)
+    if not hasattr(PackagePublishingPocket, pocket.upper()):
+        log.error("Could not find a pocket schema for %s", pocket)
         sys.exit(1)
 
+    pocket = getattr(PackagePublishingPocket, pocket.upper())
+
     if component_override:
         valid_components = [
             component.name for component in getUtility(IComponentSet)]
         if component_override not in valid_components:
-            log.error("Could not find component %s" % component_override)
+            log.error("Could not find component %s", component_override)
             sys.exit(1)
 
     kdb = None
@@ -130,16 +140,16 @@
             source_only)
     except MangledArchiveError:
         log.exception(
-            "Failed to analyze archive for %s" % pocket_distroseries)
+            "Failed to analyze archive for %s", pocket_distroseries)
         sys.exit(1)
 
     packages_map = PackagesMap(arch_component_items)
-    importer_handler = ImporterHandler(ztm, distro, distroseries,
-                                       dry_run, kdb, package_root, keyrings,
-                                       pocket, component_override)
+    importer_handler = ImporterHandler(
+        ztm, distro, distroseries, dry_run, kdb, package_root, keyrings,
+        pocket, component_override)
 
-    import_sourcepackages(packages_map, kdb, package_root, keyrings,
-                          importer_handler)
+    import_sourcepackages(
+        packages_map, kdb, package_root, keyrings, importer_handler)
     importer_handler.commit()
 
     if source_only:
@@ -150,63 +160,63 @@
         try:
             importer_handler.ensure_archinfo(archtag)
         except DataSetupError:
-            log.exception("Database setup required for run on %s" % archtag)
+            log.exception("Database setup required for run on %s", archtag)
             sys.exit(1)
 
-    import_binarypackages(packages_map, kdb, package_root, keyrings,
-                          importer_handler)
+    import_binarypackages(
+        packages_map, kdb, package_root, keyrings, importer_handler)
     importer_handler.commit()
 
 
+def attempt_source_package_import(source, kdb, package_root, keyrings,
+                                  importer_handler):
+    """Attempt to import a source package, and handle typical errors."""
+    package_name = source.get("Package", "unknown")
+    try:
+        try:
+            do_one_sourcepackage(
+                source, kdb, package_root, keyrings, importer_handler)
+        except psycopg2.Error:
+            log.exception(
+                "Database error: unable to create SourcePackage for %s. "
+                "Retrying once..", package_name)
+            importer_handler.abort()
+            time.sleep(15)
+            do_one_sourcepackage(
+                source, kdb, package_root, keyrings, importer_handler)
+    except (
+        InvalidVersionError, MissingRequiredArguments,
+        DisplayNameDecodingError):
+        log.exception(
+            "Unable to create SourcePackageData for %s", package_name)
+    except (PoolFileNotFound, ExecutionError):
+        # Problems with katie db stuff of opening files
+        log.exception("Error processing package files for %s", package_name)
+    except psycopg2.Error:
+        log.exception(
+            "Database errors made me give up: unable to create "
+            "SourcePackage for %s", package_name)
+        importer_handler.abort()
+    except MultiplePackageReleaseError:
+        log.exception(
+            "Database duplication processing %s", package_name)
+
+
 def import_sourcepackages(packages_map, kdb, package_root,
                           keyrings, importer_handler):
     # Goes over src_map importing the sourcepackages packages.
     count = 0
     npacks = len(packages_map.src_map)
-    log.info('%i Source Packages to be imported' % npacks)
+    log.info('%i Source Packages to be imported', npacks)
 
     for list_source in sorted(
         packages_map.src_map.values(), key=lambda x: x[0].get("Package")):
         for source in list_source:
             count += 1
-            package_name = source.get("Package", "unknown")
-            try:
-                try:
-                    do_one_sourcepackage(
-                        source, kdb, package_root, keyrings, importer_handler)
-                except psycopg2.Error:
-                    log.exception(
-                        "Database error: unable to create SourcePackage "
-                        "for %s. Retrying once.." % package_name)
-                    importer_handler.abort()
-                    time.sleep(15)
-                    do_one_sourcepackage(
-                        source, kdb, package_root, keyrings, importer_handler)
-            except (
-                InvalidVersionError, MissingRequiredArguments,
-                DisplayNameDecodingError):
-                log.exception(
-                    "Unable to create SourcePackageData for %s" %
-                    package_name)
-                continue
-            except (PoolFileNotFound, ExecutionError):
-                # Problems with katie db stuff of opening files
-                log.exception(
-                    "Error processing package files for %s" % package_name)
-                continue
-            except psycopg2.Error:
-                log.exception(
-                    "Database errors made me give up: unable to create "
-                    "SourcePackage for %s" % package_name)
-                importer_handler.abort()
-                continue
-            except MultiplePackageReleaseError:
-                log.exception(
-                    "Database duplication processing %s" % package_name)
-                continue
-
-            if COUNTDOWN and count % COUNTDOWN == 0:
-                log.warn('%i/%i sourcepackages processed' % (count, npacks))
+            attempt_source_package_import(
+                source, kdb, package_root, keyrings, importer_handler)
+            if COUNTDOWN and (count % COUNTDOWN == 0):
+                log.warn('%i/%i sourcepackages processed', count, npacks)
 
 
 def do_one_sourcepackage(source, kdb, package_root, keyrings,
@@ -215,7 +225,7 @@
     if importer_handler.preimport_sourcecheck(source_data):
         # Don't bother reading package information if the source package
         # already exists in the database
-        log.info('%s already exists in the archive' % source_data.package)
+        log.info('%s already exists in the archive', source_data.package)
         return
     source_data.process_package(kdb, package_root, keyrings)
     source_data.ensure_complete(kdb)
@@ -231,8 +241,8 @@
     for archtag in packages_map.bin_map.keys():
         count = 0
         npacks = len(packages_map.bin_map[archtag])
-        log.info('%i Binary Packages to be imported for %s' %
-                 (npacks, archtag))
+        log.info(
+            '%i Binary Packages to be imported for %s', npacks, archtag)
         # Go over binarypackages importing them for this architecture
         for binary in sorted(packages_map.bin_map[archtag].values(),
                              key=lambda x: x.get("Package")):
@@ -243,44 +253,45 @@
                     do_one_binarypackage(binary, archtag, kdb, package_root,
                                          keyrings, importer_handler)
                 except psycopg2.Error:
-                    log.exception("Database errors when importing a "
-                                  "BinaryPackage for %s. Retrying once.."
-                                  % package_name)
+                    log.exception(
+                        "Database errors when importing a BinaryPackage "
+                        "for %s. Retrying once..", package_name)
                     importer_handler.abort()
                     time.sleep(15)
                     do_one_binarypackage(binary, archtag, kdb, package_root,
                                          keyrings, importer_handler)
             except (InvalidVersionError, MissingRequiredArguments):
-                log.exception("Unable to create BinaryPackageData for %s" %
-                              package_name)
+                log.exception(
+                    "Unable to create BinaryPackageData for %s", package_name)
                 continue
             except (PoolFileNotFound, ExecutionError):
                 # Problems with katie db stuff of opening files
-                log.exception("Error processing package files for %s" %
-                              package_name)
+                log.exception(
+                    "Error processing package files for %s", package_name)
                 continue
             except MultiplePackageReleaseError:
-                log.exception("Database duplication processing %s" %
-                              package_name)
+                log.exception(
+                    "Database duplication processing %s", package_name)
                 continue
             except psycopg2.Error:
-                log.exception("Database errors made me give up: unable to "
-                              "create BinaryPackage for %s" % package_name)
+                log.exception(
+                    "Database errors made me give up: unable to create "
+                    "BinaryPackage for %s", package_name)
                 importer_handler.abort()
                 continue
             except NoSourcePackageError:
-                log.exception("Failed to create Binary Package for %s" %
-                              package_name)
+                log.exception(
+                    "Failed to create Binary Package for %s", package_name)
                 nosource.append(binary)
                 continue
 
             if COUNTDOWN and count % COUNTDOWN == 0:
                 # XXX kiko 2005-10-23: untested
-                log.warn('%i/%i binary packages processed' % (count, npacks))
+                log.warn('%i/%i binary packages processed', count, npacks)
 
         if nosource:
             # XXX kiko 2005-10-23: untested
-            log.warn('%i source packages not found' % len(nosource))
+            log.warn('%i source packages not found', len(nosource))
             for pkg in nosource:
                 log.warn(pkg)
 
@@ -289,7 +300,7 @@
                          importer_handler):
     binary_data = BinaryPackageData(**binary)
     if importer_handler.preimport_binarycheck(archtag, binary_data):
-        log.info('%s already exists in the archive' % binary_data.package)
+        log.info('%s already exists in the archive', binary_data.package)
         return
     binary_data.process_package(kdb, package_root, keyrings)
     importer_handler.import_binarypackage(archtag, binary_data)