← Back to team overview

keryx team mailing list archive

[Merge] lp:~mac9416/keryx/devel into lp:keryx/devel

 

mac9416 has proposed merging lp:~mac9416/keryx/devel into lp:keryx/devel.

Requested reviews:
    Keryx Admins (keryx-admins)

Changes:
 * Added minideblib/
 * Modified PackageListManager to use minideblib

PackageListManager is now basically functional.
-- 
https://code.launchpad.net/~mac9416/keryx/devel/+merge/10580
Your team Keryx Development Team is subscribed to branch lp:keryx/devel.
=== modified file 'doc/README'
--- doc/README	2009-08-11 18:24:30 +0000
+++ doc/README	2009-08-23 18:39:29 +0000
@@ -5,3 +5,7 @@
 python keryx.py -c <project>
 python keryx.py -o <project> -s <package>
 python keryx.py -o <project> -S <package>
+
+Credits:
+
+Uses code adapted from minideblib by Alexandr Kanevskiy (http://www.bifh.org/wiki/python-minideblib)

=== modified file 'libkeryx/definitions/dpkg.py'
--- libkeryx/definitions/dpkg.py	2009-08-14 23:24:01 +0000
+++ libkeryx/definitions/dpkg.py	2009-08-23 18:39:29 +0000
@@ -1,3 +1,6 @@
+# Some code adapted from Alexandr Kanevskiy's <packages@xxxxxxxx> minideblib
+# (AptRepoClient.py)
+
 ''' Dpkg plugin '''
 
 __appname__  = 'dpkg'
@@ -5,12 +8,16 @@
 __supports__ = '0'
 __author__   = 'Buran Ayuthia'
 
+import logging
+logging.basicConfig(level=logging.DEBUG)
+
 from libkeryx import Definition, gen_table, queue_table
 import gzip
 import os.path
 from commands import getstatusoutput
 from sqlalchemy import *
 from sqlalchemy.orm import mapper, sessionmaker
+from minideblib import AptRepoClient
 
 class pkg_table(object):
     def __init__(self, project, package_name, version, section, \
@@ -119,8 +126,9 @@
         mapper(pkg_table, self.packages_table)
 
     def on_create(self):
-        pm = PackageManager()
-        pm.fetch_from_local(self.package_list_path)
+        plm = PackageListManager('local', self.project_folder_dir)
+        while not plm.at_end():
+            plm.get_next_record()
 
     def update_internet(self):
         query = self.session.query(gen_table)
@@ -130,106 +138,88 @@
         pm = PackageManager()
         pm.fetch_from_internet(['a', 'b'], 'lists')
 
-class PackageManager:
-    def __init__(self):
-        pass
-
-    def fetch_from_local(self, list_path):
-        print 'fetching from local'
-# No longer going to be storing the local data
-#        # Check to see if the dest_path directory exists
-#        if not os.path.isdir(dest_path):
-#            print 'pkgmgr.py: Directory \'%s\' does not exist.  Cannot copy files.' % dest_path
-#            return None
-#
-#        # Copy files if list_path directory exists
-#        if os.path.isdir(list_path):
-#            dir_list = os.listdir(list_path)
-#            for file in dir_list:
-#                test_file = list_path + '/' + file
-#                if os.path.isfile(test_file) and file != 'lock':
-#                    copy_string = 'cp -v %s %s' % (test_file, dest_path)
-#                    copy_value = getstatusoutput(copy_string)
-#                    if not copy_value:
-#                        print 'pkgmgr.py: Unable to copy %s.  ' \
-#                              'Error code= %d Error message=%s' % \
-#                              (test_file, copy_value[0], copy_value[1])
-#                        return None
-#                    else:
-#                        print file + ' copied.'
-#        else:
-#            print 'pkgmgr.py: Could not find list_path %s' % list_path
-#            return None
-
-    def fetch_from_internet(self, url_list, dest_path):
-        print 'fetch_from_internet'
-
 class PackageListManager:
 
-    def __init__(self, option):
+    def __init__(self, option, lists_dir, sources_file='/etc/apt/sources.list', package_list_dir='/var/lib/apt/lists/'):
         self.option = option
-        if self.option == 'local':
-            self.package_list_dir = '/var/lib/apt/lists/'
-            self.package_file_list = []
-            for list_file in os.listdir(self.package_list_dir):
-                if list_file.endswith('_Packages'):
-                    list_file = os.path.join(self.package_list_dir, list_file)
-                    self.package_file_list.append(list_file)
-        elif self.option == 'internet':
-            print "ERROR: Internet package loading not yet implemented."
+        self.lists_dir = lists_dir
+        self.sources_file = sources_file
+        self.package_list_dir = package_list_dir
         self.end = False
-        self.list_file = self.open_list_file()
+        self.packnames = self.load_packages(self.option)
 
-    def open_list_file(self):
+    def load_packages(self, option):
         '''Returns next list file in self.package_file_list as list of package
            entries.
         '''
-        # I add one to self.list_file_number now, then subtract one when 
-        # opening so I can return the object, not the obect in a variable.
-        return open(self.package_file_list.pop(0), 'rb').read().split('\n\n')
+        sources_file = open(self.sources_file, 'rb').read().split('\n')
+        #sources = [ "deb http://repository.maemo.org/ bora free non-free extras", "deb-src http://repository.maemo.org/ bora free non-free extras" ]
+        sources = []
+        for line in sources_file:
+            if not line.startswith('#') and not line.startswith(' ') and line != '':
+                sources.append(line)
+        self.repo_client = AptRepoClient.AptRepoClient(sources, ['i386'])
+        if option == 'local':            
+            self.repo_client.update_local(self.package_list_dir)
+        elif option == 'internet':            
+            self.repo_client.update()
+        packnames = self.repo_client.get_available_binaries()
+        return packnames
+#        for pack in packnames:
+#            version = self.repo_client.get_best_binary_version(pack)[1]
+#            packages += self.repo_client.get_binary_name_version(pack, version)
 
     def get_next_record(self):
         '''Returns the next package as an instance of pkg_table.'''
+
         pkg_fields = pkg_table
 
-        project = ''  #FIXME
-
+        # Set all values to empty strings
         (project, package_name, version, section, 
          installed_size, maintainer, original_maintainer, \
          architecture, replaces, provides, depends, recommends, \
          suggests, conflicts, filename, size, md5sum, sha256, \
          shortdesc, longdesc, homepage, bugs, origin, task) = [''] * 24
 
-        record = self.list_file.pop(0)
-
-        for line in record.split('\n'):
-            if line.startswith('Package:'): package_name = line[9:-1]
-            elif line.startswith('Version:'): version = line[9:-1]
-            elif line.startswith('Section:'): section = line[9:-1]
-            elif line.startswith('Installed Size:'): installed_size = line[16:-1]
-            elif line.startswith('Maintainer:'): maintainer = line[12:-1]
-            elif line.startswith('Original Maintainer:'): original_maintainer = line[21:-1]
-            elif line.startswith('Architecture:'): architecture = line[14:-1]
-            elif line.startswith('Replaces:'): replaces = line[10:-1]
-            elif line.startswith('Provides:'): provides = line[10:-1]
-            elif line.startswith('Depends:'): depends = line[9:-1]
-            elif line.startswith('Recommends:'): recommends = line[12:-1]
-            elif line.startswith('Suggests:'): suggests = line[10:-1]
-            elif line.startswith('Conflicts:'): conflicts = line[11:-1]
-            elif line.startswith('Filename:'): filename = line[10:-1]
-            elif line.startswith('Size:'): size = line[6:-1]
-            elif line.startswith('Description:'): shortdesc = line[13:-1]
-            elif line.startswith(' '): longdesc += ' %s' % (line[1:-1])
-            elif line.startswith('Homepage:'): homepage = line[10:-1]
-            elif line.startswith('Bugs:'): bugs = line[6:-1]
-            elif line.startswith('Origin:'): origin = line[8:-1]
-            elif line.startswith('Task:'): size = line[6:-1]
-
-        if len(self.list_file) == 0:
-            if not len(self.package_file_list) == 0:
-                self.list_file = self.open_list_file()
-            else:
-                self.end = True
+        project = ''  #FIXME
+
+        pack = self.packnames.pop(0)
+        version = self.repo_client.get_best_binary_version(pack)[1]
+        record = self.repo_client.get_binary_name_version(pack, version)
+        # Sometimes minideblib spits out an empty list
+        try:
+            record = record[0]
+        except:
+            record = {}
+
+        package_name = pack
+
+        for item in record.keys():
+            if item == 'version': version = record['version']
+            elif item == 'section': section = record['section']
+            elif item == 'installed_size': installed_size = record['installed_size']
+            elif item == 'maintainer': maintainer = record['maintainer']
+            elif item == 'original_maintainer': original_maintainer = record['original_maintainer']
+            elif item == 'architecture': architecture = record['architecture']
+            elif item == 'replaces': replaces = record['replaces']
+            elif item == 'provides': provides = record['provides']
+            elif item == 'depends': depends = record['depends']
+            elif item == 'recommends': recommends = record['recommends']
+            elif item == 'suggests': suggests = record['suggests']
+            elif item == 'conflicts': conflicts = record['conflicts']
+            elif item == 'filename': filename = record['filename']
+            elif item == 'size': size = record['size']
+            elif item == 'md5sum': md5sum = record['md5sum']
+            elif item == 'sha256': sha256 = record['sha256']
+            elif item == 'shortdesc': shortdesc = record['shortdesc']
+            elif item == 'longdesc': longdesc = record['longdesc']
+            elif item == 'homepage': homepage = record['homepage']
+            elif item == 'bugs': bugs = record['bugs']
+            elif item == 'origin': origin = record['origin']
+            elif item == 'task': task = record['task']
+
+        if len(self.packnames) == 0:
+            self.end = True
 
         return pkg_fields(project, package_name, version, section, \
                  installed_size, maintainer, original_maintainer, \
@@ -241,3 +231,4 @@
         '''Returns self.at_end indicating whether we're at the end of the lists
         '''
         return self.end
+

=== added directory 'minideblib'
=== added directory 'minideblib/.fr-NN3JMo'
=== added directory 'minideblib/.fr-jBshne'
=== added file 'minideblib/AptRepoClient.py'
--- minideblib/AptRepoClient.py	1970-01-01 00:00:00 +0000
+++ minideblib/AptRepoClient.py	2009-08-23 18:39:29 +0000
@@ -0,0 +1,618 @@
+#!/usr/bin/python -tt
+# -*- coding: UTF-8 -*-
+# vim: sw=4 ts=4 expandtab ai
+#
+# AptRepoClient.py
+#
+# This module implements class for access APT repository metadata.
+#
+# Copyright (C) 2006,2007 Alexandr Kanevskiy
+#
+# Contact: Alexandr Kanevskiy <packages@xxxxxxxx>
+#
+# This program is free software; you can redistribute it and/or
+# modify it under the terms of the GNU General Public License 
+# version 2 as published by the Free Software Foundation. 
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+#
+# $Id$
+
+__revision__ = "r"+"$Revision$"[11:-2]
+__all__ = [ 'AptRepoClient', 'AptRepoException' ]
+
+from minideblib.DpkgControl import DpkgParagraph
+from minideblib.DpkgDatalist import DpkgOrderedDatalist
+from minideblib.DpkgVersion import DpkgVersion, VersionError
+from minideblib.LoggableObject import LoggableObject
+import re, urllib2, os, types, time
+
+try:
+    set()
+except NameError:
+    from sets import Set as set
+
+
+def _universal_urlopen(url):
+    """More robust urlopen. It understands gzip transfer encoding"""
+    headers = { 'User-Agent': 'Mozilla/4.0 (compatible; Python/AptRepoClient)',
+                'Pragma': 'no-cache',
+                'Cache-Control': 'no-cache',
+                'Accept-encoding': 'gzip' }
+    request = urllib2.Request(url, None, headers)
+    usock = urllib2.urlopen(request)
+    if usock.headers.get('content-encoding', None) == 'gzip' or url.endswith(".gz"):
+        data = usock.read()
+        import cStringIO, gzip
+        data = gzip.GzipFile(fileobj = cStringIO.StringIO(data)).read()
+        return cStringIO.StringIO(data)
+    else:
+        return usock
+
+
+def _filter_base_urls(base_url, pkgcache):
+    """Return list of keys to be used in pkgcache lookup according to requested base_keys"""
+    if base_url:
+        if isinstance(base_url, types.ListType):
+            cache_keys = base_url
+        elif isinstance(base_url, types.StringType):
+            cache_keys = [ (base_url, "/", '') ]
+        elif isinstance(base_url, types.TupleType):
+            cache_keys = [base_url]
+        else:
+            # WTF!?
+            raise TypeError("Parameter base_url should be array of strings or string or tuple")
+        # Ok, we have list of keys, let's compare them
+        rkeys = set()
+        pckeys = pkgcache.keys()
+        for ckey in cache_keys:
+            if not isinstance(ckey, types.TupleType) and len(ckey) != 3:
+                raise TypeError("base_url key should be a tuple -> (url, distribution, section): %s" % str(ckey))
+            rkeys.update([akey for akey in pckeys if (ckey[0] is None or ckey[0] == akey[0]) and (ckey[1] is None or ckey[1] == akey[1]) and (ckey[2] is None or ckey[2] == akey[2])])
+        return list(rkeys) 
+    else:
+        return pkgcache.keys()
+
+
+def _get_available_pkgs(base_url, pkgcache):
+    """Returns list of package names, available in pkgcache filtered by base_url"""
+    cache_keys = _filter_base_urls(base_url, pkgcache)
+    pkg_names = set()
+    for cache_key in cache_keys:
+        pkgs = pkgcache.get(cache_key, {})
+        pkg_names.update(pkgs.keys())
+    return list(pkg_names)
+
+
+def _get_available_versions(package, base_url, pkgcache):
+    """
+        Should return touple (base_url,package_version) with the best version found in cache.
+        If base_url is not specified, all repositories will be checked
+    """
+    cache_keys = _filter_base_urls(base_url, pkgcache)
+
+    pkg_vers = [] 
+    for cache_key in cache_keys:
+        cache = pkgcache.get(cache_key, {})
+        if package in cache:
+            for pkg in cache[package]:
+                if (cache_key, pkg['version']) not in pkg_vers:
+                    pkg_vers.append((cache_key, pkg['version']))
+    return pkg_vers
+
+
+class AptRepoException(Exception):
+    """Exception generated in error situations"""
+    def __init__(self, msg):
+        Exception.__init__(self)
+        self.msg = msg
+    def __repr__(self):
+        return self.msg
+    def __str__(self):
+        return self.msg
+
+
+class AptRepoParagraph(DpkgParagraph):
+    """Like DpkgParagraph, but can return urls to packages and can return correct source package name/version for binaries"""
+    def __init__(self, fname = "", base_url = None):
+        DpkgParagraph.__init__(self, fname)
+        self.base_url = base_url
+        self.__files = None
+        self.__urls = None
+        self.__pkgid = None
+        self.__source_version = None
+
+    def __hash__(self):
+        """Make this object hashable"""
+        return hash( (self.get("package", None), self.get("version", None)) )
+
+    def set_base_url(self, base_url):
+        """Sets base url for this package. Used later to calculate relative paths"""
+        self.base_url = base_url
+        # After change the base URL, cached URLs are not valid anymore
+        self.__urls = None
+
+    def get_files(self):
+        """Return list of files in this package. Format similar to .changes files section"""
+        if self.__files:
+            return self.__files
+        try:
+            files = self['files']
+        except KeyError:
+            # Binary package ?
+            if "filename" in self:
+                self.__files = [(self['md5sum'], self['size'], None, None, self['filename'])]
+                return self.__files
+            else:
+                # Something wrong
+                return []
+
+        self.__files = []
+        lineregexp = re.compile( 
+            "^(?P<f_md5>[0-9a-f]{32})[ \t]+(?P<f_size>\d+)" +
+            "(?:[ \t]+(?P<f_section>[-/a-zA-Z0-9]+)[ \t]+(?P<f_priority>[-a-zA-Z0-9]+))?" +
+            "[ \t]+(?P<f_name>[0-9a-zA-Z][-+:.,=~0-9a-zA-Z_]+)$")
+    
+        for line in files:
+            if line == '':
+                continue
+            match = lineregexp.match(line)
+            if (match is None):
+                raise AptRepoException("Couldn't parse file entry \"%s\" in Files field of .changes" % (line,))
+            else:
+                self.__files.append((match.group("f_md5"), match.group("f_size"), match.group("f_section"), match.group("f_priority"), match.group("f_name")))
+        return self.__files
+
+    def get_pkgid(self):
+        """Return pkg id for this package. For binaries it's MD5 sum of file, for sources MD5 sum of .dsc"""
+        if self.__pkgid:
+            return self.__pkgid
+        try:
+            files = self['files']
+        except KeyError:
+            # Binary package ?
+            if "md5sum" in self:
+                self.__pkgid = self['md5sum']
+                return self.__pkgid
+            else:
+                # Something wrong
+                raise AptRepoException("Binary package, but MD5Sum not defined")
+
+        lineregexp = re.compile( 
+            "^(?P<f_md5>[0-9a-f]{32})[ \t]+(?P<f_size>\d+)" +
+            "(?:[ \t]+(?P<f_section>[-/a-zA-Z0-9]+)[ \t]+(?P<f_priority>[-a-zA-Z0-9]+))?" +
+            "[ \t]+(?P<f_name>[0-9a-zA-Z][-+:.,=~0-9a-zA-Z_]+)$")
+    
+        for line in files:
+            if line == '':
+                continue
+            match = lineregexp.match(line)
+            if (match is None):
+                raise AptRepoException("Couldn't parse file entry \"%s\" in Files field of .changes" % (line,))
+            else:
+                if match.group("f_name").endswith(".dsc"):
+                    self.__pkgid = match.group("f_md5")
+                    return self.__pkgid
+        raise AptRepoException("No DSC file found in source package")
+
+    def get_urls(self):
+        """Return array of URLs to package files"""
+
+        if self.__urls:
+            return self.__urls
+        if "filename" in self:
+            self.__urls = [os.path.join(self.base_url, self['filename'])]
+            return self.__urls
+        if "files" in self:
+            self.__urls = []
+            for elems in self.get_files():
+                self.__urls.append(os.path.join(self.base_url, self['directory'], elems[4]))
+            return self.__urls
+
+    def get_source(self):
+        """ Return tuple (name, version) for sources of this package """
+        if self.__source_version:
+            return self.__source_version
+        if "files" in self:
+            # It's source itself, stupid people
+            self.__source_version = (self['package'], self['version'])
+        # Ok, it's binary. Let's analize some situations
+        elif "source" not in self:
+            # source name the same as package
+            self.__source_version = (self['package'], self['version'])
+        else:
+            # Source: tag present. Let's deal with it
+            match = re.search(r"(?P<name>[0-9a-zA-Z][-+:.,=~0-9a-zA-Z_]+)(\s+\((?P<ver>(?:[0-9]+:)?[a-zA-Z0-9.+-]+)\))?", self['source'])
+            if not match.group("ver"):
+                self.__source_version = (match.group("name"), self['version'])
+            else:
+                # mostly braindead packagers
+                self.__source_version = (match.group("name"), match.group("ver"))
+        if self.__source_version:
+            return self.__source_version
+        else:
+            raise AptRepoException("Something strange. We can't identify source version")
+
+
+class AptRepoMetadataBase(DpkgOrderedDatalist):
+    def __init__(self, base_url = None, case_sensitive = 0, allowed_arches = None):
+        DpkgOrderedDatalist.__init__(self)
+        self.key = "package"
+        self.case_sensitive = case_sensitive
+        self.base_url = base_url
+        self.allowed_arches = allowed_arches
+
+    def setkey(self, key):
+        self.key = key
+
+    def set_case_sensitive(self, value):
+        self.case_sensitive = value
+
+    def __load_one(self, in_file, base_url):
+        """Load meta-information for one package"""
+        para = AptRepoParagraph(None, base_url = base_url)
+        para.setCaseSensitive(self.case_sensitive)
+        para.load( in_file )
+        return para
+
+    def load(self, inf, base_url = None):
+        """Load packages meta-information to internal data structures"""
+        if base_url is None:
+            base_url = self.base_url
+        while 1:
+            para = self.__load_one(inf, base_url)
+            if not para: 
+                break
+            
+            if 'architecture' in para and \
+                    para['architecture'] not in [ "all", "any" ] and \
+                    self.allowed_arches and self.allowed_arches != [ "all" ] and \
+                    not [arch for arch in para['architecture'].split() if arch in self.allowed_arches]:
+                continue
+            if para[self.key] not in self:
+                self[para[self.key]] = []
+            self[para[self.key]].append(para)
+
+    def _store(self, ofl):
+        """Write our control data to a file object"""
+        for key in self.keys():
+            for elem in self[key]:
+                elem._store(ofl)
+                ofl.write("\n")
+
+
+class AptRepoClient(LoggableObject):
+    """ Client class to access Apt repositories. """
+    def __init__(self, repos = None, arch = None):
+        """Base class to access APT debian packages meta-data"""
+        if arch:
+            self._arch = arch
+        else:
+            self._arch = ["all"]
+        self.sources = {}
+        self.binaries = {}
+        self.source_to_binaries_map = {}
+        self.pkgid_map = {}
+        self._repos = []
+        if repos:
+            self.__make_repos(repos)
+
+    def load_repos(self, repoline = None, ignore_errors = True, clear = True, local = None):
+        """Loads repositories into internal data structures. Replaces previous content if clear = True (default)"""
+        if clear:
+            self.sources = {}
+            self.binaries = {}
+            self.source_to_binaries_map = {}
+            self.pkgid_map = {}
+        if repoline:
+            self.__make_repos(repoline, clear)    
+
+        self.__load_repos(self._repos, ignore_errors, local)
+
+    # Alias for load_repos(). Just to make commandline apt-get users happy
+    update = load_repos
+
+    def load_local(self, folder, repoline = None, ignore_errors = True, clear = True):
+        """Loads repositories into internal data structures from a local cache. Replaces previous content if clear = True (default)"""
+        self.load_repos(repoline, ignore_errors, clear, folder)
+        
+    # Alias for load_local()
+    update_local = load_local
+
+    def make_source_to_binaries_map(self):
+        """Makes dictionary 'source_to_binaries' out of available packages"""
+        if not self.binaries:
+            # If no binary packages, try to load them
+            self.load_repos()
+        if not self.source_to_binaries_map:
+            # Map not present and needs to be generated
+            for repo in self.binaries:
+                for pkgname in self.binaries[repo].keys():
+                    for pkg in self.binaries[repo][pkgname]:
+                        src = pkg.get_source()
+                        if src not in self.source_to_binaries_map:
+                            self.source_to_binaries_map[src] = []
+                        self.source_to_binaries_map[src].append(pkg)
+
+    def make_pkgid_map(self):
+        """Makes dictionary 'pkgid_map' out of available source/binary packages"""
+        if not self.binaries and not self.sources:
+            # If no packages, try to load them
+            self.load_repos()
+        if not self.pkgid_map:
+            # Map not present and needs to be generated
+            for repo in self.sources:
+                for pkgname in self.sources[repo].keys():
+                    for pkg in self.sources[repo][pkgname]:
+                        pkgid = pkg.get_pkgid()
+                        if pkgid not in self.pkgid_map:
+                            self.pkgid_map[pkgid] = []
+                        self.pkgid_map[pkgid].append(pkg)
+            for repo in self.binaries:
+                for pkgname in self.binaries[repo].keys():
+                    for pkg in self.binaries[repo][pkgname]:
+                        pkgid = pkg.get_pkgid()
+                        if pkgid not in self.pkgid_map:
+                            self.pkgid_map[pkgid] = []
+                        self.pkgid_map[pkgid].append(pkg)
+
+    def get_available_source_repos(self):
+        """Lists known source repositories. Format is [ (base_url, distribution, section), ... ]"""
+        return self.sources.keys()
+
+    def get_available_binary_repos(self):
+        """Lists known binary repositories. Format is [ (base_url, distribution, section), ... ]"""
+        return self.binaries.keys()
+
+    def get_best_binary_version(self, package, base_url = None):
+        """Return exact repository and best available version for binary package"""
+        return self.__get_best_version(package, base_url, self.binaries)
+
+    def get_best_source_version(self, package, base_url = None):
+        """Return exact repository and best available version for source package"""
+        return self.__get_best_version(package, base_url, self.sources)
+
+    def get_binary_name_version(self, package, version = None, base_url = None):
+        """ 
+           Returns list of packages for requested name/version. 
+           If version is not specified, the best version will be choosen
+        """
+        if version is None:
+            return self.__get_pkgs_by_name_version(package, self.get_best_binary_version(package, base_url)[1], base_url, self.binaries)
+        else:
+            return self.__get_pkgs_by_name_version(package, version, base_url, self.binaries)
+
+    def get_source_name_version(self, package, version = None, base_url = None):
+        """ 
+           Returns list of packages for requested name/version. 
+           If version is not specified, the best version will be choosen
+        """
+        if version is None:
+            return self.__get_pkgs_by_name_version(package, self.get_best_source_version(package, base_url)[1], base_url, self.sources)
+        else:
+            return self.__get_pkgs_by_name_version(package, version, base_url, self.sources)
+
+    def get_available_binary_versions(self, package, base_url = None):
+        return _get_available_versions(package, base_url, self.binaries)
+
+    def get_available_source_versions(self, package, base_url = None):
+        return _get_available_versions(package, base_url, self.sources)
+
+    def get_available_sources(self, base_url = None):
+        return _get_available_pkgs(base_url, self.sources)
+
+    def get_available_binaries(self, base_url = None):
+        return _get_available_pkgs(base_url, self.binaries)
+
+    def __get_best_version(self, package, base_url, pkgcache):
+        """
+            Should return touple (base_url,package_version) with the best version found in cache.
+            If base_url is not specified, all repositories will be checked
+        """
+        cache_keys = _filter_base_urls(base_url, pkgcache)
+
+        # Go trough all base_url keys
+        best = None
+        best_base_url = None
+        for cache_key in cache_keys:
+            cache = pkgcache.get(cache_key, {})
+            if package in cache:
+                match = self.__pkg_best_match(cache[package])
+                if match:
+                    if not best:
+                        best = match
+                        # We're safe. this should not be assigned
+                        best_base_url = cache_key
+                    else:
+                        if DpkgVersion(match) > DpkgVersion(best):
+                            best = match
+                            best_base_url = cache_key
+        if best is None:
+            return (None, None)
+        else:
+            return (best_base_url, str(best))
+
+    def __get_pkgs_by_name_version(self, package, version, base_url, pkgcache):
+        """
+           Should return array of packages, matched by name/vesion, from one or more base_urls
+        """
+        cache_keys = _filter_base_urls(base_url, pkgcache)
+        
+        if version is not None and not isinstance(version, DpkgVersion):
+            try:
+                version = DpkgVersion(version)
+            except VersionError:
+                # Bad input data. Return empty set
+                self._logger.info("BadVersion: %s" % version)
+                return []
+
+        # Go trough all base_url keys
+        pkgs = []
+        for cache_key in cache_keys:
+            cache = pkgcache.get(cache_key, {})
+            if package in cache:
+                for pkg in cache[package]:
+                    try:
+                        if version is not None and DpkgVersion(pkg['version']) == version:
+                            pkgs.append(pkg)
+                    except VersionError:
+                        # Package with bad version in repository. Let's skip it
+                        self._logger.info("BadVersion: %s %s" % (package, pkg['version']))
+                        continue
+        return pkgs
+
+    def __pkg_best_match(self, cache):
+        """ Looks for best version available """
+        if len(cache) == 0:
+            # WTF!?
+            return None
+        try:
+            best = DpkgVersion(cache[0]['version'])
+        except VersionError:
+            self._logger.info("BadVersion: %s %s" % (cache[0]['package'], cache[0]['version']))
+            return None
+        if len(cache) > 1:
+            for pkg in cache:
+                try:
+                    pkg_ver = DpkgVersion(pkg['version'])
+                    if pkg_ver > best:
+                        best = pkg_ver
+                except VersionError:
+                    self._logger.info("BadVersion: %s %s" % (pkg['package'], pkg['version']))
+                    continue
+        return best 
+
+    def __make_repos(self, repos = None, clear = True):
+        """ Update available repositories array """
+        def filter_repolines(repolines):
+            """Return filtered list of repos after removing comments and whitespace"""
+            def filter_repoline(repoline):
+                """ Get rid of all comments and whitespace."""
+                # Replace "copy:" method to "file:"
+                repoline = re.sub("(\s)copy:", "\\1file:", repoline)
+                # Strip comments and spaces
+                repos = repoline.split("#")[0].strip()
+                return (repos and [repos] or [None])[0]
+            temp = []
+            for line in repolines:
+                repoline = filter_repoline(line)
+                if repoline and repoline not in temp:
+                    temp.append(repoline)
+            return temp
+        if clear:
+            self._repos = []
+        if isinstance(repos, (types.ListType, types.TupleType)):
+            self._repos += [repo for repo in filter_repolines(repos) if repo not in self._repos]
+        elif isinstance(repos, types.StringType):
+            self._repos += [repo for repo in filter_repolines(repos.splitlines()) if repo not in self._repos]
+
+
+    def __load_repos(self, repos, ignore_errors = True, folder = None):
+        """Should load data from remote repository. Format the same as sources.list"""
+        to_load = []
+        for repo in repos:
+            (base_url, url_srcs, url_bins) = self.__make_urls(repo)
+            if url_srcs:
+                repourls = url_srcs 
+                dest_dict = self.sources
+            elif url_bins:
+                repourls = url_bins
+                dest_dict = self.binaries
+            else:
+                # Something wrong ?
+                raise AptRepoException("WTF?!")
+
+            for (url, distro, section) in repourls:
+                if (base_url, distro, section) not in dest_dict:
+                    dest_dict[(base_url, distro, section)] = AptRepoMetadataBase(base_url, allowed_arches = self._arch)
+                dest = dest_dict[(base_url, distro, section)]
+                to_load.append((base_url, url, dest, ignore_errors, folder))
+
+        stt = time.time()
+        for args in to_load:
+            self.__parse_one_repo(*args)
+        self._logger.debug("Parsing time: %f", time.time()-stt)
+
+
+    def __parse_one_repo(self, base_url, url, dest, ignore_errors, folder):
+        """Loads one repository meta-data from URL and parses it to dest"""
+        # Let's check .gz variant first
+        try:
+            if folder:
+                # Local
+                path = os.path.abspath(os.path.join(folder, url.split("//")[1].replace("/", "_")))
+                if os.path.exists(path+".gz"):
+                    self._logger.debug("Fetching file: %s.gz" % path)
+                    fls = open(path+".gz", "rb")
+                elif os.path.exists(path):
+                    self._logger.debug("Fetching file: %s" % path)
+                    fls = open(path, "rb")
+                else:
+                    self._logger.debug("%s not found. Skipping." % path)
+                    return
+            else:
+                # Remote
+                self._logger.debug("Fetching URL: %s.gz" % url)
+                fls = _universal_urlopen(url+".gz")
+        except urllib2.HTTPError, hte:
+            if hte.code == 404:
+                # If no Packages/Sources.gz found, let's try just Packages/Sources
+                try:
+                    self._logger.debug("Compressed metadata not found. Fetching URL: %s" % url)
+                    fls = _universal_urlopen(url)
+                except urllib2.HTTPError, hte:
+                    if hte.code == 404:
+                        if ignore_errors:
+                            return
+                        else:
+                            raise
+                    else:
+                        raise
+            else:
+                raise
+        dest.load(fls, base_url)
+        # Close socket after use
+        fls.close()
+        del fls
+
+
+    def __make_urls(self, repoline):
+        """The same as above, but only for one line"""
+        match = re.match(r"(?P<repo_type>deb|deb-src)\s+(?P<base_url>[\S]+?)/?\s+((?P<simple_repo>[\S]*?/)|(?P<repo>\S*?[^/\s])(?:\s+(?P<sections>[^/]+?)))\s*$", repoline)
+        if not match:
+            raise AptRepoException("Unable to parse: %s" % repoline)
+        url_bins = []
+        url_srcs = []
+        repo_type = match.group("repo_type")
+        if match.group("simple_repo"):
+            if repo_type == "deb":
+                __path = os.path.normpath(os.path.join("./" + match.group("simple_repo"), "Packages"))
+                url_bins = [ ("/".join(match.group("base_url"), __path), match.group("simple_repo"), '') ]
+            elif repo_type == "deb-src":
+                __path = os.path.normpath(os.path.join("./" + match.group("simple_repo"), "Sources"))
+                url_srcs = [ ("/".join(match.group("base_url"), __path), match.group("simple_repo"), '' ) ]
+            else:
+                raise AptRepoException("Unknown repository type: %s" % repo_type)
+        else:
+            if repo_type == "deb":
+                for item in re.split("\s+", match.group("sections")):
+                    for arch in self._arch:
+                        url_bins.append( ("/".join([match.group("base_url"), "dists", match.group("repo"), item, "binary-%s/Packages" % arch]), match.group("repo"), item))
+            elif repo_type == "deb-src":
+                for item in match.group("sections").split():
+                    url_srcs.append( ("/".join([match.group("base_url"), "dists", match.group("repo"), item, "source/Sources"]), match.group("repo"), item))
+            else:
+                raise AptRepoException("Unknown repository type: %s" % repo_type)
+        return (match.group("base_url"), url_srcs, url_bins)
+
+
+if __name__ == "__main__":
+    raise NotImplemented

=== added file 'minideblib/ChangeFile.py'
--- minideblib/ChangeFile.py	1970-01-01 00:00:00 +0000
+++ minideblib/ChangeFile.py	2009-08-23 18:39:29 +0000
@@ -0,0 +1,129 @@
+#!/usr/bin/python -tt
+# -*- coding: UTF-8 -*-
+# vim: sw=4 ts=4 expandtab ai
+# $Id$
+
+# ChangeFile
+
+# A class which represents a Debian change file.
+
+# Copyright 2002 Colin Walters <walters@xxxxxxx>
+
+# This file is free software; you can redistribute it and/or modify it
+# under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
+
+__revision__ = "r"+"$Revision$"[11:-2]
+__all__ = [ 'ChangeFile', 'ChangeFileException' ]
+
+import os, re, string, stat, popen2
+
+from minideblib import DpkgControl
+from minideblib import SignedFile
+from minideblib.LoggableObject import LoggableObject
+
+class ChangeFileException(Exception):
+    """Exception generated in error situations"""
+    def __init__(self, value):
+        Exception.__init__(self)
+        self._value = value
+    def __repr__(self):
+        return `self._value`
+    def __str__(self):
+        return `self._value`
+        
+class ChangeFile(DpkgControl.DpkgParagraph, LoggableObject):
+    def __init__(self): 
+        DpkgControl.DpkgParagraph.__init__(self)
+        self.dsc = False
+        
+    def load_from_file(self, filename):
+        if filename[-4:] == '.dsc':
+            self.dsc = True
+        fhdl = SignedFile.SignedFile(open(filename))
+        self.load(fhdl)
+        fhdl.close()
+
+    def getFiles(self):
+        out = []
+        try:
+            files = self['files']
+        except KeyError:
+            return []
+        if self.dsc:
+            lineregexp = re.compile("^([0-9a-f]{32})[ \t]+(\d+)[ \t]+([0-9a-zA-Z][-+:.,=~0-9a-zA-Z_]+)$")
+        else:
+            lineregexp = re.compile("^([0-9a-f]{32})[ \t]+(\d+)[ \t]+([-/a-zA-Z0-9]+)[ \t]+([-a-zA-Z0-9]+)[ \t]+([0-9a-zA-Z][-+:.,=~0-9a-zA-Z_]+)$")
+        for line in files:
+            if line == '':
+                continue
+            match = lineregexp.match(line)
+            if (match is None):
+                raise ChangeFileException("Couldn't parse file entry \"%s\" in Files field of .changes" % (line,))
+            if self.dsc:
+                out.append((match.group(1), match.group(2), "", "", match.group(3)))
+            else:
+                out.append((match.group(1), match.group(2), match.group(3), match.group(4), match.group(5)))
+        return out
+        
+    def verify(self, sourcedir):
+        for (md5sum, size, section, prioriy, filename) in self.getFiles():
+            self._verify_file_integrity(os.path.join(sourcedir, filename), int(size), md5sum)
+            
+    def _verify_file_integrity(self, filename, expected_size, expected_md5sum):
+        self._logger.debug('Checking integrity of %s' % (filename,))
+        try:
+            statbuf = os.stat(filename)
+            if not stat.S_ISREG(statbuf[stat.ST_MODE]):
+                raise ChangeFileException("%s is not a regular file" % (filename,))
+            size = statbuf[stat.ST_SIZE]
+        except OSError, excp:
+            raise ChangeFileException("Can't stat %s: %s" % (filename, excp.strerror))
+        if size != expected_size:
+            raise ChangeFileException("File size for %s does not match that specified in .dsc" % (filename,))
+        if (self._get_file_md5sum(filename) != expected_md5sum):
+            raise ChangeFileException("md5sum for %s does not match that specified in .dsc" % (filename,))
+        self._logger.debug('Verified md5sum %s and size %s for %s' % (expected_md5sum, expected_size, filename))
+
+    def _get_file_md5sum(self, filename):
+        if os.access('/usr/bin/md5sum', os.X_OK):
+            cmd = '/usr/bin/md5sum %s' % (filename,)
+            self._logger.debug("Running: %s" % (cmd,))
+            child = popen2.Popen3(cmd, 1)
+            child.tochild.close()
+            erroutput = child.childerr.read()
+            child.childerr.close()
+            if erroutput != '':
+                child.fromchild.close()
+                raise ChangeFileException("md5sum returned error output \"%s\"" % (erroutput,))
+            (md5sum, filename) = string.split(child.fromchild.read(), None, 1)
+            child.fromchild.close()
+            status = child.wait()
+            if not (status is None or (os.WIFEXITED(status) and os.WEXITSTATUS(status) == 0)):
+                if os.WIFEXITED(status):
+                    msg = "md5sum exited with error code %d" % (os.WEXITSTATUS(status),)
+                elif os.WIFSTOPPED(status):
+                    msg = "md5sum stopped unexpectedly with signal %d" % (os.WSTOPSIG(status),)
+                elif os.WIFSIGNALED(status):
+                    msg = "md5sum died with signal %d" % (os.WTERMSIG(status),)
+                raise ChangeFileException(msg)
+            return md5sum.strip()
+        import md5
+        fhdl = open(filename)
+        md5sum = md5.new()
+        buf = fhdl.read(8192)
+        while buf != '':
+            md5sum.update(buf)
+            buf = fhdl.read(8192)
+        fhdl.close()
+        return md5sum.hexdigest()

=== added file 'minideblib/DebianSigVerifier.py'
--- minideblib/DebianSigVerifier.py	1970-01-01 00:00:00 +0000
+++ minideblib/DebianSigVerifier.py	2009-08-23 18:39:29 +0000
@@ -0,0 +1,37 @@
+#!/usr/bin/python -tt
+# -*- coding: UTF-8 -*-
+# vim: sw=4 ts=4 expandtab ai
+#
+# DebianSigVerifier
+
+# A class for verifying signed files, using Debian keys
+
+# Copyright © 2002 Colin Walters <walters@xxxxxxx>
+
+# This file is free software; you can redistribute it and/or modify it
+# under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
+
+import os
+from minideblib.GPGSigVerifier import GPGSigVerifier
+
+class DebianSigVerifier(GPGSigVerifier):
+    _dpkg_ring = '/etc/dpkg/local-keyring.gpg'
+    def __init__(self, keyrings=None, extra_keyrings=None):
+        if keyrings is None:
+            keyrings = ['/usr/share/keyrings/debian-keyring.gpg', '/usr/share/keyrings/debian-keyring.pgp']
+        if os.access(self._dpkg_ring, os.R_OK):
+            keyrings.append(self._dpkg_ring)
+        if not extra_keyrings is None:
+            keyrings += extra_keyrings
+        GPGSigVerifier.__init__(self, keyrings)

=== added file 'minideblib/DpkgChangelog.py'
--- minideblib/DpkgChangelog.py	1970-01-01 00:00:00 +0000
+++ minideblib/DpkgChangelog.py	2009-08-23 18:39:29 +0000
@@ -0,0 +1,270 @@
+#!/usr/bin/python -tt
+# -*- coding: UTF-8 -*-
+# vim: sw=4 ts=4 expandtab ai
+#
+# DpkgChangelog.py
+#
+# This module implements parser for Debian changelog files
+#
+# Copyright (C) 2005,2006 Alexandr Kanevskiy
+#
+# Contact: Alexandr Kanevskiy <packages@xxxxxxxx>
+#
+# This program is free software; you can redistribute it and/or
+# modify it under the terms of the GNU General Public License
+# version 2 as published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+#
+# $Id$
+
+import re
+from minideblib import DpkgVersion
+import rfc822
+
+__revision__ = "r"+"$Revision$"[11:-2]
+__all__ = ['DpkgChangelog', 'DpkgChangelogEntry', 'DpkgChangelogException']
+
+
+class DpkgChangelogException(Exception):
+    def __init__(self, msg, lineno = 0):
+        Exception.__init__(self)
+        self.msg = msg
+        self.lineno = lineno
+    def __str__(self):
+        return self.msg + (self.lineno and " at line %d" % self.lineno or "")
+    def __repr__(self):
+        return self.msg + (self.lineno and " at line %d" % self.lineno or "")
+
+# Fixed settings, do not change these unless you really know what you are doing
+PackageRegex    = "[a-z0-9][a-z0-9.+-]+"        # Regular expression package names must comply with
+VersionRegex    = "(?:[0-9]+:)?[a-zA-Z0-9~.+-]+" # Regular expression package versions must comply with
+
+# Regular expressions for various bits of the syntax used
+ClosesRegex     = "closes:\s*(?:bug)?#?\s?\d+(?:,\s?(?:bug)?#?\s?\d+)*"
+NBugRegex       = "Fixe[sd]:\s*NB#\d+(?:\s*,\s*NB#\d+)*"
+MBugRegex       = "Fixe[sd]:\s*MB#\d+(?:\s*,\s*MB#\d+)*"
+BugRegex        = "(\d+)"
+NMBugRegex      = "B#(\d+)"
+NReqImplRegex   = "Implemented:\s*NR#\d{1,6}(?:,\s*NR#\d{1,6})*"
+NReqUpdRegex    = "Updated:\s*NR#\d{1,6}(?:,\s*NR#\d{1,6})*"
+NReqPartRegex   = "Partial:\s*NR#\d{1,6}(?:,\s*NR#\d{1,6})*"
+NReqDropRegex   = "Dropped:\s*NR#\d{1,6}(?:,\s*NR#\d{1,6})*"
+NReqRegex       = "R#(\d{1,6})"
+
+# Precompile the regular expressions
+ClosesMatcher    = re.compile(ClosesRegex, re.IGNORECASE)
+BugMatcher       = re.compile(BugRegex)
+NMBugMatcher     = re.compile(NMBugRegex)
+NBugMatcher      = re.compile(NBugRegex, re.IGNORECASE)
+MBugMatcher      = re.compile(MBugRegex, re.IGNORECASE)
+NReqImplMatcher  = re.compile(NReqImplRegex, re.IGNORECASE)
+NReqUpdMatcher   = re.compile(NReqUpdRegex, re.IGNORECASE)
+NReqPartMatcher  = re.compile(NReqPartRegex, re.IGNORECASE)
+NReqDropMatcher  = re.compile(NReqDropRegex, re.IGNORECASE)
+NReqMatcher      = re.compile(NReqRegex)
+
+
+# Changelog regexps
+StartRegex      = "(?P<package>%s) \((?P<version>%s)\) (?P<distribution>[\w-]+(?:\s+[\w-]+)*); (?P<attrs>.*)" % (PackageRegex, VersionRegex)
+EndRegex        = " -- (?P<changedby>.+? <.+?>)  (?P<date>.*)"
+AttrRegex       = "(?P<key>.+?)=(?P<value>.*)"
+
+# Precompile the regular expressions
+StartMatcher    = re.compile(StartRegex)
+EndMatcher      = re.compile(EndRegex)
+AttrMatcher     = re.compile(AttrRegex)
+
+
+class DpkgChangelogEntry:
+    '''Simple class to represent a single changelog entry. The list of
+    attributes in the entry header is stored in the attributes map. The
+    timestamp associated with the changes are stored in time.mktime()
+    comptabile tuple format in the
+    date member.'''
+
+    def __init__(self):
+        self.package = ""
+        self.version = ""
+        self.distribution = None
+        self.date = None
+        self.strdate = ""
+        self.changedby = ""
+        self.bugsfixed = []
+        self.nbugsfixed = []
+        self.mbugsfixed = []
+        self.nreqsimplemented = []
+        self.nreqsupdated = []
+        self.nreqspartial = []
+        self.nreqsdropped = []
+        self.attributes = {}
+        self.entries = []
+
+
+    def add_entry(self, entry):
+        '''Utility function to add a changelog entry. Also takes care
+        of extracting the bugs closed by this change and adding them to
+        the self.bugsfixed array.'''
+
+        # Check if we have a proper Closes command
+        match = ClosesMatcher.search(entry)
+        if match:
+            self.bugsfixed.extend(BugMatcher.findall(match.group(0)))
+        # Check if we have a proper NBugs
+        match = NBugMatcher.search(entry)
+        if match:
+            self.nbugsfixed.extend(NMBugMatcher.findall(match.group(0)))
+        # Check if we have a proper MBugs
+        match = MBugMatcher.search(entry)
+        if match:
+            self.mbugsfixed.extend(NMBugMatcher.findall(match.group(0)))
+        # Check if we have implemented requirements
+        match = NReqImplMatcher.search(entry)
+        if match:
+            self.nreqsimplemented.extend(NReqMatcher.findall(match.group(0)))
+        # Check if we have updated requirements
+        match = NReqUpdMatcher.search(entry)
+        if match:
+            self.nreqsupdated.extend(NReqMatcher.findall(match.group(0)))
+        # Check if we have partially implemented requirements
+        match = NReqPartMatcher.search(entry)
+        if match:
+            self.nreqspartial.extend(NReqMatcher.findall(match.group(0)))
+        # Check if we have dropped requirements
+        match = NReqDropMatcher.search(entry)
+        if match:
+            self.nreqsdropped.extend(NReqMatcher.findall(match.group(0)))
+        self.entries.append(entry)
+
+
+class DpkgChangelog:
+    '''Simple class to repsresent Debian changlog'''
+    def __init__(self):
+        self.entries = []
+        self.lineno = 0
+        self.package = None
+        self.version = None
+        self.distribution = None
+        self.changedby = None
+ 
+
+    def __get_next_nonempty_line(self, infile):
+        "Return the next line that is not empty"
+        self.lineno += 1
+        line = infile.readline()
+        while not line.strip():
+            self.lineno += 1
+            line = infile.readline()
+            if line == '':
+                return ''
+        if line[-1] == "\n":
+            return line[:-1]
+        else:
+            return line
+
+
+    def _parse_one_entry(self, infile):
+
+        line = self.__get_next_nonempty_line(infile)
+        match = StartMatcher.match(line)
+        if not match:
+            raise DpkgChangelogException("Invalid first line", self.lineno)
+
+        entry = DpkgChangelogEntry()
+        entry.package = match.group("package")
+        try:
+            entry.version = DpkgVersion.DpkgVersion(match.group("version"))
+        except Exception, e:
+            raise DpkgChangelogException("Invalid version: %s" % e, self.lineno)
+
+        entry.distribution = match.group("distribution").split()
+
+        # Extract the attributes from the line
+        for attr in match.group("attrs").split():
+            am = AttrMatcher.match(attr)
+            if not am:
+                raise DpkgChangelogException("Invalid syntax for attribute", self.lineno)
+            entry.attributes[am.group("key")] = am.group("value")
+
+        # Check for essential urgency attribute
+        if not entry.attributes.has_key("urgency"):
+            raise DpkgChangelogException("Missing urgency attribute", self.lineno)
+
+        # Read the changelog entries themselves
+        line = self.__get_next_nonempty_line(infile)
+        buf = ""
+        while line.startswith("  "):
+            if line.startswith("  *"):
+                if buf:
+                    entry.add_entry(buf.strip())
+                buf = line[2:]
+            else:
+                buf += "\n" + line[2:]
+            line = self.__get_next_nonempty_line(infile)
+
+        # Commit last seen line
+        if buf:
+            entry.add_entry(buf.strip())
+
+        # Try and parse the last line
+        em = EndMatcher.match(line)
+        if not em:
+            raise DpkgChangelogException("Invalid line in changelog entry", self.lineno)
+
+        entry.changedby = em.group("changedby")
+        try:
+            entry.strdate = em.group("date")
+            entry.date = rfc822.parsedate(entry.strdate)
+            if not entry.date:
+                raise DpkgChangelogException("Invalid date in changelog entry: %s" % entry.strdate, self.lineno)
+        except:
+            raise DpkgChangelogException("Invalid date in changelog entry: %s" % entry.strdate, self.lineno)
+
+        # Return the parsed changelog entry
+        return entry
+
+
+    def parse_changelog(self, changelog, since_ver = None):
+        '''Parses changelog argument (could be file or string)
+        and represents it's content as array of DpkgChangelogEntry'''
+        if isinstance(changelog, basestring):
+            import StringIO
+            fh = StringIO.StringIO(changelog)
+        elif hasattr(changelog, "readline") and callable(changelog.readline):
+            fh = changelog
+        else: 
+            raise DpkgChangelogException("Invalid argument type")
+
+        pkg_name = None
+
+        while True:
+            try:
+                entry = self._parse_one_entry(fh)
+                if since_ver:
+                    if not pkg_name:
+                        pkg_name = entry.package
+                    if pkg_name != entry.package or entry.version <= since_ver:
+                        # if changelog contains entries for different source 
+                        # package name or we already parsed version till which
+                        # we asked to parse -> stop.
+                        break
+                self.entries.append(entry)
+            except DpkgChangelogException, ex:
+                last_err = ex.msg
+                break
+
+        if len(self.entries) > 0:
+            self.package = self.entries[0].package
+            self.version = self.entries[0].version
+            self.distribution = self.entries[0].distribution
+            self.changedby = self.entries[0].changedby
+        else:
+            raise DpkgChangelogException("Unable to get entries from changelog: %s" % last_err, self.lineno)

=== added file 'minideblib/DpkgControl.py'
--- minideblib/DpkgControl.py	1970-01-01 00:00:00 +0000
+++ minideblib/DpkgControl.py	2009-08-23 18:39:29 +0000
@@ -0,0 +1,159 @@
+# DpkgControl.py
+#
+# This module implements control file parsing.
+#
+# DpkgParagraph is a low-level class, that reads/parses a single paragraph
+# from a file object.
+#
+# DpkgControl uses DpkgParagraph in a loop, pulling out the value of a
+# defined key(package), and using that as a key in it's internal
+# dictionary.
+#
+# DpkgSourceControl grabs the first paragraph from the file object, stores
+# it in object.source, then passes control to DpkgControl.load, to parse
+# the rest of the file.
+#
+# To test this, pass it a filetype char, a filename, then, optionally,
+# the key to a paragraph to display, and if a fourth arg is given, only
+# show that field.
+#
+# Copyright 2001 Adam Heath <doogie@xxxxxxxxxx>
+#
+# This file is free software; you can redistribute it and/or modify it
+# under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
+
+import re, string
+from minideblib.DpkgDatalist import *
+from minideblib.SignedFile import *
+from types import ListType
+
+class DpkgParagraph(DpkgOrderedDatalist):
+    caseSensitive = 0
+    trueFieldCasing = {}
+
+    def setCaseSensitive( self, value ):    self.caseSensitive = value
+
+    def load( self, f ):
+        "Paragraph data from a file object."
+        key = None
+        value = None
+        while 1:
+            line = f.readline()
+            if not line: 
+                return
+            # skip blank lines until we reach a paragraph
+            if line == '\n':
+                if not self:
+                    continue
+                else:
+                    return
+            if line[ 0 ] == '#':
+                # Skip commentaries
+                continue
+            line = line[ :-1 ]
+            if line[ 0 ] not in [' ', '\t']:
+                key, value = string.split( line, ":", 1 )
+                if value: value = value[ 1: ]
+                if not self.caseSensitive:
+                    newkey = string.lower( key )
+                    if not self.trueFieldCasing.has_key( key ):
+                        self.trueFieldCasing[ newkey ] = key
+                    key = newkey
+            else:
+                if isinstance( value, ListType ):
+                    value.append( line[ 1: ] )
+                else:
+                    value = [ value, line[ 1: ] ]
+            self[ key ] = value
+
+    def _storeField( self, f, value, lead = " " ):
+        if isinstance( value, ListType ):
+            value = string.join( map( lambda v, lead = lead: v and ( lead + v ) or v, value ), "\n" )
+        else:
+            if value: value = lead + value
+        f.write( "%s\n" % ( value ) )
+
+    def _store( self, f ):
+        "Write our paragraph data to a file object"
+        for key in self.keys():
+            value = self[ key ]
+            if self.trueFieldCasing.has_key( key ):
+                key = self.trueFieldCasing[ key ]
+            f.write( "%s:" % key )
+            self._storeField( f, value )
+
+class DpkgControl(DpkgOrderedDatalist):
+
+    key = "package"
+    caseSensitive = 0
+
+    def setkey( self, key ):        self.key = key
+    def setCaseSensitive( self, value ):    self.caseSensitive = value
+
+    def _load_one( self, f ):
+        p = DpkgParagraph( None )
+        p.setCaseSensitive( self.caseSensitive )
+        p.load( f )
+        return p
+
+    def load( self, f ):
+        while 1:
+            p = self._load_one( f )
+            if not p: break
+            self[ p[ self.key ] ] = p
+
+    def _store( self, f ):
+        "Write our control data to a file object"
+
+        for key in self.keys():
+            self[ key ]._store( f )
+            f.write( "\n" )
+
+class DpkgSourceControl( DpkgControl ):
+    source = None
+
+    def load( self, f ):
+        f = SignedFile(f)
+        self.source = self._load_one( f )
+        DpkgControl.load( self, f )
+    
+    def __repr__( self ):
+        return self.source.__repr__() + "\n" + DpkgControl.__repr__( self )
+    
+    def _store( self, f ):
+        "Write our control data to a file object"
+        self.source._store( f )
+        f.write( "\n" )
+        DpkgControl._store( self, f )
+
+if __name__ == "__main__":
+    import sys
+    types = { 'p' : DpkgParagraph, 'c' : DpkgControl, 's' : DpkgSourceControl }
+    type = sys.argv[ 1 ]
+    if not types.has_key( type ):
+        print "Unknown type `%s'!" % type
+        sys.exit( 1 )
+    file = open( sys.argv[ 2 ], "r" )
+    data = types[ type ]()
+    data.load( file )
+    if len( sys.argv ) > 3:
+        para = data[ sys.argv[ 3 ] ]
+        if len( sys.argv ) > 4:
+            para._storeField( sys.stdout, para[ sys.argv[ 4 ] ], "" )
+        else:
+            para._store( sys.stdout )
+    else:
+        data._store( sys.stdout )
+
+# vim:ts=4:sw=4:et:

=== added file 'minideblib/DpkgDatalist.py'
--- minideblib/DpkgDatalist.py	1970-01-01 00:00:00 +0000
+++ minideblib/DpkgDatalist.py	2009-08-23 18:39:29 +0000
@@ -0,0 +1,81 @@
+# DpkgDatalist.py
+#
+# This module implements DpkgDatalist, an abstract class for storing 
+# a list of objects in a file. Children of this class have to implement
+# the load and _store methods.
+#
+# Copyright 2001 Wichert Akkerman <wichert@xxxxxxxxx>
+#
+# This file is free software; you can redistribute it and/or modify it
+# under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
+
+import os, sys
+from UserDict import UserDict
+from OrderedDict import OrderedDict
+import SafeWriteFile
+from types import StringType
+
+class DpkgDatalistException(Exception):
+    UNKNOWN     = 0
+    SYNTAXERROR = 1
+
+    def __init__(self, message="", reason=UNKNOWN, file=None, line=None):
+        self.message=message
+        self.reason=reason
+        self.filename=file
+        self.line=line
+
+class _DpkgDatalist:
+    def __init__(self, fn=""):
+        '''Initialize a DpkgDatalist object. An optional argument is a
+        file from which we load values.'''
+
+        self.filename=fn
+        if self.filename:
+            self.load(self.filename)
+
+    def store(self, fn=None):
+        "Store variable data in a file."
+
+        if fn==None:
+            fn=self.filename
+        # Special case for writing to stdout
+        if not fn:
+            self._store(sys.stdout)
+            return
+
+        # Write to a temporary file first
+        if type(fn) == StringType:
+            vf=SafeWriteFile(fn+".new", fn, "w")
+        else:
+            vf=fn
+        try:
+            self._store(vf)
+        finally:
+            if type(fn) == StringType:
+                vf.close()
+
+
+class DpkgDatalist(UserDict, _DpkgDatalist):
+    def __init__(self, fn=""):
+        UserDict.__init__(self)
+        _DpkgDatalist.__init__(self, fn)
+
+
+class DpkgOrderedDatalist(OrderedDict, _DpkgDatalist):
+    def __init__(self, fn=""):
+        OrderedDict.__init__(self)
+        _DpkgDatalist.__init__(self, fn)
+
+# vim:ts=4:sw=4:et:

=== added file 'minideblib/DpkgDebPackage.py'
--- minideblib/DpkgDebPackage.py	1970-01-01 00:00:00 +0000
+++ minideblib/DpkgDebPackage.py	2009-08-23 18:39:29 +0000
@@ -0,0 +1,278 @@
+#!/usr/bin/python -tt
+# -*- coding: UTF-8 -*-
+# vim: sw=4 ts=4 expandtab ai
+#
+# DpkgDebPackage.py
+#
+# This module implements class for accessing .deb packages
+#
+# Copyright (C) 2005,2006 Alexandr Kanevskiy
+#
+# Contact: Alexandr Kanevskiy <packages@xxxxxxxx>
+#
+# This program is free software; you can redistribute it and/or
+# modify it under the terms of the GNU General Public License
+# version 2 as published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+#
+# $Id$
+
+__revision__ = "r"+"$Revision$"[11:-2]
+__all__ = [ 'DpkgDebPackage', 'DpkgDebPackageException' ]
+
+import os
+import re
+import glob
+import shutil
+import gzip 
+import tempfile
+import commands
+import errno
+
+from minideblib.DpkgControl import DpkgParagraph
+from minideblib.DpkgVersion import DpkgVersion
+from minideblib.LoggableObject import LoggableObject
+
+class DpkgDebPackageException(Exception):
+    """General exception which could be raised by DpkgDebPackage"""
+    def __init__(self, msg):
+        Exception.__init__(self)
+        self.msg = msg
+    def __str__(self):
+        return self.msg
+    def __repr__(self):
+        return self.msg
+
+
+class DpkgDebPackage(LoggableObject):
+    """This class represent complete information about Debian binary package"""
+
+    def __init__(self, pkgfile=None):
+        """ path -- path to .deb package """
+        self.control = DpkgParagraph()
+        self.md5sums = None
+        self.__md5files = None
+        self.changes = None
+        self.news = None
+        self.files = None
+        self.__raw_files = None
+        self.path = None
+        if pkgfile:
+            self.path = os.path.abspath(pkgfile)
+            if not os.path.isfile(self.path):
+                raise DpkgDebPackageException("Unable to locate file: %s" % self.path)
+            self.load_control()
+
+
+    def load(self, path = None, getfiles=True, getchanges='both'):
+        """ Loads .deb file for processing """
+        path_changed = False
+        if not path and not self.path:
+            raise DpkgDebPackageException("No deb file specified")
+        if path:
+            new_path = os.path.abspath(path)
+            if new_path != self.path:
+                self.path = new_path
+                path_changed = True
+        if not os.path.isfile(self.path):
+            raise DpkgDebPackageException("Unable to locate file: %s" % self.path)
+        if path_changed or not self.control:
+            self.load_control()
+        if getchanges:
+            self.load_changes(getchanges)
+        if getfiles:
+            self.load_contents()
+
+    def load_contents(self):
+        """ Reads contents of .deb file into memory """
+        if self.path and os.path.isfile(self.path):
+            self.__raw_files = self.__list_contents()
+            if self.__raw_files:
+                self.files = [ fname[5] for fname in self.__raw_files ]
+        else: 
+            raise DpkgDebPackageException("Unable to locate file: %s" % self.path)
+
+    def load_changes(self, getchanges='both'):
+        """ Reads changelog and/or news information into memory """
+        if self.path and os.path.isfile(self.path):
+            (news, changes) = self.__extract_changes(getchanges)
+            self.news = news
+            self.changes = changes
+        else:
+            raise DpkgDebPackageException("Unable to locate file: %s" % self.path)
+
+    def load_control(self):
+        """ Reads control information into memory """
+        if self.path and os.path.isfile(self.path):
+            tempdir = self.__extract_control()
+            fhdl = open(os.path.join(tempdir,"control"),"r")
+            self.control = DpkgParagraph()
+            self.control.load(fhdl)
+            fhdl.close()
+            if not self.__parse_md5sums(tempdir):
+                self._logger.warning("Can't parse md5sums")
+            else:
+                self.__md5files = [ xsum[1] for xsum in self.md5sums ]
+            shutil.rmtree(tempdir, True)
+        else:
+            raise DpkgDebPackageException("Unable to locate file: %s" % self.path)
+
+    def __extract_changes(self, which, since_version=None):
+        '''Extract changelog entries, news or both from the package.
+        If since_version is specified, only return entries later than the specified version.
+        returns a sequence of Changes objects.'''
+
+        def changelog_variations(filename):
+            """Return list of all possible changelog/news locations"""
+            formats = ['usr/doc/*/%s.gz',
+                       'usr/share/doc/*/%s.gz',
+                       'usr/doc/*/%s',
+                       'usr/share/doc/*/%s',
+                       './usr/doc/*/%s.gz',
+                       './usr/share/doc/*/%s.gz',
+                       './usr/doc/*/%s',
+                       './usr/share/doc/*/%s']
+            return [ format % filename for format in formats ]
+        
+        news_filenames = changelog_variations('NEWS.Debian')
+        changelog_filenames = changelog_variations('changelog.Debian')
+        changelog_filenames_native = changelog_variations('changelog')
+
+        filenames = []
+        if which == 'both' or which == 'news':
+            filenames.extend(news_filenames)
+        if which == 'both' or which == 'changelogs':
+            filenames.extend(changelog_filenames)
+            filenames.extend(changelog_filenames_native)
+
+        tempdir = self.extract_contents(filenames)
+
+        news = None
+        for filename in news_filenames:
+            news = self.__read_changelog(os.path.join(tempdir, filename),
+                                       since_version)
+            if news:
+                break
+
+        changelog = None
+        for batch in (changelog_filenames, changelog_filenames_native):
+            for filename in batch:
+                changelog = self.__read_changelog(os.path.join(tempdir, filename),
+                                                since_version)
+                if changelog:
+                    break
+            if changelog:
+                break
+
+        shutil.rmtree(tempdir, True)
+
+        return (news, changelog)
+
+    def __extract_control(self):
+        """Extracts content of control.tar.gz from .deb package"""
+        try:
+            tempdir = tempfile.mkdtemp(prefix='dpkgdebpackage')
+        except AttributeError:
+            tempdir = tempfile.mktemp()
+            os.mkdir(tempdir)
+
+        extract_command = 'ar p %s control.tar.gz | tar zxf - -C %s 2>/dev/null' % (self.path, tempdir)
+
+        os.system(extract_command)
+
+        return tempdir
+
+    def extract_contents(self, filenames):
+        """Extracts partial contents of Debian package to temporary directory"""
+        try:
+            tempdir = tempfile.mkdtemp(prefix='dpkgdebpackage')
+        except AttributeError:
+            tempdir = tempfile.mktemp()
+            os.mkdir(tempdir)
+
+        extract_command = 'ar p %s data.tar.gz |tar zxf - -C %s %s 2>/dev/null' % (
+            self.path,
+            tempdir,
+            ' '.join( [ "'%s'" % filen for filen in filenames ] )
+            )
+
+        # tar exits unsuccessfully if _any_ of the files we wanted
+        # were not available, so we can't do much with its status
+        os.system(extract_command)
+
+        return tempdir
+    
+    def __parse_md5sums(self, tempdir):
+        """Parses md5sums file from extracted control section of debian package"""
+        path = os.path.join(tempdir,"md5sums")
+        if not os.access(path, os.R_OK):
+            print "Can't open file %s" % path
+            return False
+        self.md5sums = []
+        fhdl = open(path,"r")
+        for line in fhdl.readlines():
+            if line[33] != " ":
+                print "33 is not a space.\n %s" % line
+                # Something bad happend, unknown file format.
+                fhdl.close()
+                return False
+            argl = [ line[:32].strip(), line[34:].strip() ]
+            self.md5sums.append(argl)
+        fhdl.close()
+        return True
+
+    def __list_contents(self):
+        """Returns filelist of data.tar.gz"""
+        (status, output) = commands.getstatusoutput("ar p %s data.tar.gz | tar ztvf -" % self.path)
+        if status != 0:
+            return []
+        files = [ line.split() for line in output.splitlines() ]
+        return files
+
+    def __read_changelog(self, filename, since_version):
+        """Read changelog up to specified version"""
+        changelog_header = re.compile('^\S+ \((?P<version>.*)\) .*;.*urgency=(?P<urgency>\w+).*')
+        filenames = glob.glob(filename)
+
+        fhdl = None
+        for filename in filenames:
+            try:
+                if filename.endswith('.gz'):
+                    fhdl = gzip.GzipFile(filename)
+                else:
+                    fhdl = open(filename)
+                break
+            except IOError, ioerr:
+                if ioerr.errno == errno.ENOENT:
+                    pass
+                else:
+                    raise
+
+        if not fhdl:
+            return None
+
+        changes = ''
+        is_debian_changelog = 0
+        for line in fhdl.readlines():
+            match = changelog_header.match(line)
+            if match:
+                is_debian_changelog = 1
+                if since_version:
+                    if DpkgVersion(match.group('version')) <= since_version:
+                        break
+            changes += line
+
+        fhdl.close()
+        if not is_debian_changelog:
+            return None
+
+        return changes

=== added file 'minideblib/DpkgUtils.py'
--- minideblib/DpkgUtils.py	1970-01-01 00:00:00 +0000
+++ minideblib/DpkgUtils.py	2009-08-23 18:39:29 +0000
@@ -0,0 +1,135 @@
+# DpkgUtils.py
+#
+# This module contains a set of utility functions that are used
+# throughout the dpkg suite.
+#
+# Copyright 2002 Wichert Akkerman <wichert@xxxxxxxxxxxxxxxx>
+#
+# This file is free software; you can redistribute it and/or modify it
+# under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
+
+import os, re, string, sys
+import DpkgOptions
+
+# Message levels as used by inform()
+VERB_QUIET		= 0	# Quiet operation (default)
+VERB_INFORMATIVE	= 1	# Informative messages
+VERB_DETAIL		= 2	# Detailed infomration on what we're doing
+VERB_DEBUG		= 3	# Debug information
+
+
+def inform(msg, level=VERB_INFORMATIVE):
+	"Print an informative message if the verbose-level is high enough."
+
+	if DpkgOptions.Options["verbose"]>=level:
+		print msg
+
+def abort(str):
+	"Print a message and exit with an error."
+	sys.stderr.write(str + "\n")
+	sys.exit(1)
+
+
+def SlurpFile(file, sep='\n'):
+	"Read the contents of a file."
+
+	fd=open(file, 'r')
+	return string.split(fd.read(), sep)
+
+
+def SlurpCommand(command, sep='\n'):
+	"Run a command and return its output."
+
+	fd=os.popen(command)
+	data=fd.read()
+	if data=='':
+		return ()
+	else:
+		return string.split(data, sep)
+
+
+def __FilterData(data, regexp):
+	"Filter the data through a regexp and return the matching groups."
+
+	lst=[]
+	matcher=re.compile(regexp)
+	for d in data:
+		mo=matcher.search(d)
+		if mo:
+			lst.append(mo.groups())
+	
+	return lst
+
+
+def FilterFile(file, regexp, sep='\n'):
+	"Read a file return the regexp matches."
+
+	return __FilterData(SlurpFile(file, sep), regexp)
+
+
+def FilterCommand(command, regexp, sep='\n'):
+	"Run a command and return the regexp matches."
+
+	return __FilterData(SlurpCommand(command, sep), regexp)
+
+
+def ValidPackageName(name):
+	"Check if a package name is valid"
+
+	if re.match("^%s$" % DpkgOptions.PackageRegex, name):
+		return 1
+	return 0
+
+
+def ValidPackagevVersion(version):
+	"Check if a package version is valid"
+
+	if re.match("^%s$" % DpkgOptions.VersionRegex, version):
+		return 1
+	return 0
+
+
+def HandleArgOption(keyword, sopt, lopt, opt, args):
+	'''Utility function for argument parsers. Check for a specific
+	option-taking argument and processes it.'''
+	if opt==sopt:
+		DpkgOptions.Options[keyword]=args.pop(0)
+		return 1
+	elif opt[:2]==sopt:
+		DpkgOptions.Options[keyword]=ol[2:]
+		return 1
+	elif opt==lopt:
+		DpkgOptions.Options[keyword]=args.pop(0)
+		return 1
+	elif lopt and opt[:len(lopt)]==lopt and opt[len(lopt)]=='=':
+		DpkgOptions.Options[keyword]=opt[len(lopt)+1:]
+		return 1
+
+	return 0
+
+
+def HandleNoArgOption(keyword, sopt, lopt, opt):
+	'''Utility function for argument parsers. Check for a specific
+	no-option-taking argument and processes it.'''
+	if opt==sopt or (lopt and opt==lopt):
+		DpkgOptions.Options[keyword]=DpkgOptions.Options[keyword]+1
+		return 1
+
+	return 0
+
+
+# Global initialization
+if not DpkgOptions.Options.has_key("verbose"):
+	DpkgOptions.Options["verbose"]=VERB_QUIET
+

=== added file 'minideblib/DpkgVarlist.py'
--- minideblib/DpkgVarlist.py	1970-01-01 00:00:00 +0000
+++ minideblib/DpkgVarlist.py	2009-08-23 18:39:29 +0000
@@ -0,0 +1,51 @@
+# DpkgVarlist.py
+#
+# This module implements DpkgVarlist, a class which contains
+# variables that are using in dpkg source packages.
+#
+# Unlike the other dpkg files this file is not in RFC822 syntax. Instead
+# each variable definition conists of a single line of the form "key=value".
+# Comments are not allowed.
+#
+# Copyright 2001 Wichert Akkerman <wichert@xxxxxxxxx>
+#
+# This file is free software; you can redistribute it and/or modify it
+# under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
+
+import re, string
+import DpkgDatalist
+
+class DpkgVarlist(DpkgDatalist.DpkgDatalist):
+	def load(self, fn):
+		"Load variable data from a file."
+
+		vf=open(fn, "r")
+		matcher=re.compile("^([^=]+)=\s*(.*)\s*$")
+		lineno=1
+		for line in vf.readlines():
+			mo=matcher.search(line)
+			if not mo:
+				raise DpkgDatalist.DpkgDatalistException("Syntax error in varlistfile", DpkgVarlistException.SYNTAXERROR, fn, lineno)
+
+			self.data[mo.group(1)]=string.strip(mo.group(2))
+			lineno=lineno+1
+
+		vf.close()
+
+	def _store(self, fo):
+		"Write our variable data to a file object"
+
+		for key in self.data.keys():
+			fo.write("%s=%s\n" % (key, self.data[key]))
+

=== added file 'minideblib/DpkgVersion.py'
--- minideblib/DpkgVersion.py	1970-01-01 00:00:00 +0000
+++ minideblib/DpkgVersion.py	2009-08-23 18:39:29 +0000
@@ -0,0 +1,191 @@
+# -*- coding: UTF-8 -*-
+# Small changes by Steve Kowalik, GPL (C) 2005
+# Scott James Remnant told me the license is MIT
+"""Parse and compare Debian version strings.
+
+This module contains a class designed to sit in your Python code pretty
+naturally and represent a Debian version string.  It implements various
+special methods to make dealing with them sweet.
+"""
+
+__author__    = "Scott James Remnant <scott@xxxxxxxxxxxx>"
+
+
+import re
+
+
+# Regular expressions make validating things easy
+valid_epoch = re.compile(r'^[0-9]+$')
+valid_upstream = re.compile(r'^[0-9][A-Za-z0-9+:.~-]*$')
+valid_revision = re.compile(r'^[A-Za-z0-9+.~]+$')
+
+# Character comparison table for upstream and revision components
+cmp_table = "~ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz+-.:"
+
+
+class VersionError(Exception): pass
+class BadInputError(VersionError): pass
+class BadEpochError(BadInputError): pass
+class BadUpstreamError(BadInputError): pass
+class BadRevisionError(BadInputError): pass
+
+class DpkgVersion(object):
+    """
+    Debian version number.
+
+    This class is designed to be reasonably transparent and allow you to write code like:
+
+    >>> s.version >= '1.100-1'
+
+    The comparison will be done according to Debian rules, so '1.2' will compare lower.
+
+    Properties:
+     - epoch: Epoch
+     - upstream: Upstream version
+     - revision: Debian/local revision
+    """
+
+    def __init__(self, ver):
+        """Parse a string or number into the three components."""
+        self.epoch = None
+        self.upstream = None
+        self.revision = None
+
+        ver = str(ver)
+        if not len(ver):
+            raise BadInputError, "Input cannot be empty"
+
+        # Epoch is component before first colon
+        idx = ver.find(":")
+        if idx != -1:
+            self.epoch = ver[:idx]
+            if not len(self.epoch):
+                raise BadEpochError, "Epoch cannot be empty"
+            if not valid_epoch.search(self.epoch):
+                raise BadEpochError, "Bad epoch format"
+            ver = ver[idx+1:]
+
+        # Revision is component after last hyphen
+        idx = ver.rfind("-")
+        if idx != -1:
+            self.revision = ver[idx+1:]
+            if not len(self.revision):
+                raise BadRevisionError, "Revision cannot be empty"
+            if not valid_revision.search(self.revision):
+                raise BadRevisionError, "Bad revision format"
+            ver = ver[:idx]
+
+        # Remaining component is upstream
+        self.upstream = ver
+        if not len(self.upstream):
+            raise BadUpstreamError, "Upstream version cannot be empty"
+        if not valid_upstream.search(self.upstream):
+            raise BadUpstreamError, "Bad upstream version format"
+
+        if self.epoch is not None:
+            self.epoch = int(self.epoch)
+
+    def getWithoutEpoch(self):
+        """Return the version without the epoch."""
+        str = self.upstream
+        if self.revision is not None:
+            str += "-%s" % (self.revision,)
+        return str
+
+    without_epoch = property(getWithoutEpoch)
+
+    def __str__(self):
+        """Return the class as a string for printing."""
+        str = ""
+        if self.epoch is not None:
+            str += "%d:" % (self.epoch,)
+        str += self.upstream
+        if self.revision is not None:
+            str += "-%s" % (self.revision,)
+        return str
+
+    def __repr__(self):
+        """Return a debugging representation of the object."""
+        return "<%s epoch: %r, upstream: %r, revision: %r>" \
+               % (self.__class__.__name__, self.epoch,
+                  self.upstream, self.revision)
+
+    def __cmp__(self, other):
+        """Compare two Version classes."""
+        other = DpkgVersion(other)
+
+        # Compare epochs only if they are not equal.
+        if self.epoch != other.epoch:
+            # Special cases for braindead packages
+            sepoch = self.epoch
+            oepoch = other.epoch
+            if sepoch is None:
+                sepoch = 0
+            if oepoch is None:
+                oepoch = 0
+            result = cmp(sepoch, oepoch)
+            if result != 0: return result
+
+        result = deb_cmp(self.upstream, other.upstream)
+        if result != 0: return result
+
+        result = deb_cmp(self.revision or "", other.revision or "")
+        if result != 0: return result
+
+        return 0
+
+    def is_native(self):
+        native = False
+        if not self.revision:
+            native = True
+        return native
+
+def strcut(str, idx, accept):
+    """Cut characters from str that are entirely in accept."""
+    ret = ""
+    while idx < len(str) and str[idx] in accept:
+        ret += str[idx]
+        idx += 1
+
+    return (ret, idx)
+
+def deb_order(str, idx):
+    """Return the comparison order of two characters."""
+    if idx >= len(str):
+        return 0
+    elif str[idx] == "~":
+        return -1
+    else:
+        return cmp_table.index(str[idx])
+
+def deb_cmp_str(x, y):
+    """Compare two strings in a deb version."""
+    idx = 0
+    while (idx < len(x)) or (idx < len(y)):
+        result = deb_order(x, idx) - deb_order(y, idx)
+        if result < 0:
+            return -1
+        elif result > 0:
+            return 1
+
+        idx += 1
+
+    return 0
+
+def deb_cmp(x, y):
+    """Implement the string comparison outlined by Debian policy."""
+    x_idx = y_idx = 0
+    while x_idx < len(x) or y_idx < len(y):
+        # Compare strings
+        (x_str, x_idx) = strcut(x, x_idx, cmp_table)
+        (y_str, y_idx) = strcut(y, y_idx, cmp_table)
+        result = deb_cmp_str(x_str, y_str)
+        if result != 0: return result
+
+        # Compare numbers
+        (x_str, x_idx) = strcut(x, x_idx, "0123456789")
+        (y_str, y_idx) = strcut(y, y_idx, "0123456789")
+        result = cmp(int(x_str or "0"), int(y_str or "0"))
+        if result != 0: return result
+
+    return 0

=== added file 'minideblib/GPGSigVerifier.py'
--- minideblib/GPGSigVerifier.py	1970-01-01 00:00:00 +0000
+++ minideblib/GPGSigVerifier.py	2009-08-23 18:39:29 +0000
@@ -0,0 +1,72 @@
+# GPGSigVerifier -*- mode: python; coding: utf-8 -*-
+# vim:ts=4:sw=4:et:
+
+# A class for verifying signed files
+
+# Copyright (c) 2002 Colin Walters <walters@xxxxxxx>
+
+# This file is free software; you can redistribute it and/or modify it
+# under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
+
+# Copyright (c) 2005,2006 Alexandr D. Kanevskiy <packages@xxxxxxxx>
+#               Rewrite to use python library calls instead of fork/exec
+
+import os, string, commands
+
+class GPGSigVerifierException(Exception):
+    def __init__(self, value):
+        self._value = value
+    def __str__(self):
+        return `self._value`
+
+class GPGSigVerificationFailure(Exception):
+    def __init__(self, value, output):
+        self._value = value
+        self._output = output
+    def __str__(self):
+        return `self._value`
+
+    def getOutput(self):
+        return self._output
+
+class GPGSigVerifier:
+    def __init__(self, keyrings, gpgv=None):
+        self._keyrings = keyrings
+        if gpgv is None:
+            gpgv = '/usr/bin/gpgv'
+        if not os.access(gpgv, os.X_OK):
+            raise GPGSigVerifierException("Couldn't execute \"%s\"" % (gpgv,))
+        self._gpgv = gpgv
+
+    def verify(self, filename, sigfilename=None):
+
+        args = []
+        for keyring in self._keyrings:
+            args.append('--keyring')
+            args.append(keyring)
+        if sigfilename:
+            args.append(sigfilename)
+        args = [self._gpgv] + args + [filename]
+
+        (status, output) = commands.getstatusoutput(string.join(args))
+        if not (status is None or (os.WIFEXITED(status) and os.WEXITSTATUS(status) == 0)):
+            if os.WIFEXITED(status):
+                msg = "gpgv exited with error code %d" % (os.WEXITSTATUS(status),)
+            elif os.WIFSTOPPED(status):
+                msg = "gpgv stopped unexpectedly with signal %d" % (os.WSTOPSIG(status),)
+            elif os.WIFSIGNALED(status):
+                msg = "gpgv died with signal %d" % (os.WTERMSIG(status),)
+            raise GPGSigVerificationFailure(msg, output)
+        return output.splitlines()
+ 

=== added file 'minideblib/LoggableObject.py'
--- minideblib/LoggableObject.py	1970-01-01 00:00:00 +0000
+++ minideblib/LoggableObject.py	2009-08-23 18:39:29 +0000
@@ -0,0 +1,61 @@
+#!/usr/bin/python -tt
+# -*- coding: UTF-8 -*-
+# vim: sw=4 ts=4 expandtab ai
+#
+# LoggableObject.py
+#
+# This module implements class which has internal _logger object for easy logging.
+#
+# Copyright (C) 2007 Alexandr Kanevskiy
+#
+# Contact: Alexandr Kanevskiy <packages@xxxxxxxx>
+#
+# This program is free software; you can redistribute it and/or
+# modify it under the terms of the GNU General Public License
+# version 2 as published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+#
+# $Id$
+
+__revision__ = "r"+"$Revision$"[11:-2]
+__all__ = [ 'LoggableObject' ]
+
+import logging
+
+class LazyInit(object):
+    def __init__(self, calculate_function):
+        self._calculate = calculate_function
+
+    def __get__(self, obj, _=None):
+        if obj is None:
+            return self
+        value = self._calculate(obj)
+        setattr(obj, self._calculate.func_name, value)
+        return value
+
+
+class LoggableObject:
+    def _logger(self):
+        """ Returns logger and initializes default handlers if needed """
+        logger = logging.getLogger(self.__module__)
+        c = logger
+        found = False
+        while c:
+            if c.handlers:
+                found = True
+                break
+            c = c.parent
+        if not found:
+            logging.basicConfig()
+        return logger
+    _logger = LazyInit(_logger)
+

=== added file 'minideblib/OrderedDict.py'
--- minideblib/OrderedDict.py	1970-01-01 00:00:00 +0000
+++ minideblib/OrderedDict.py	2009-08-23 18:39:29 +0000
@@ -0,0 +1,76 @@
+# OrderedDict.py
+#
+# This class functions almost exactly like UserDict.  However, when using
+# the sequence methods, it returns items in the same order in which they
+# were added, instead of some random order.
+#
+# Copyright 2001 Adam Heath <doogie@xxxxxxxxxx>
+#
+# This file is free software; you can redistribute it and/or modify it
+# under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
+
+from UserDict import UserDict
+
+class OrderedDict(UserDict):
+    __order=[]
+
+    def __init__(self, dict=None):
+        UserDict.__init__(self)
+        self.__order=[]
+        if dict is not None and dict.__class__ is not None:
+            self.update(dict)
+
+    def __cmp__(self, dict):
+        if isinstance(dict, OrderedDict):
+            ret=cmp(self.__order, dict.__order)
+            if not ret:
+                ret=UserDict.__cmp__(self, dict)
+            return ret
+        else:
+            return UserDict.__cmp__(self, dict)
+
+    def __setitem__(self, key, value):
+        if not self.has_key(key):
+            self.__order.append(key)
+        UserDict.__setitem__(self, key, value)
+
+    def __delitem__(self, key):
+        if self.has_key(key):
+            del self.__order[self.__order.index(key)]
+        UserDict.__delitem__(self, key)
+
+    def clear(self):
+        self.__order=[]
+        UserDict.clear(self)
+
+    def copy(self):
+        if self.__class__ is OrderedDict:
+            return OrderedDict(self)
+        import copy
+        return copy.copy(self)
+
+    def keys(self):
+        return self.__order
+
+    def items(self):
+        return map(lambda x, self=self: (x, self.__getitem__(x)), self.__order)
+
+    def values(self):
+        return map(lambda x, self=self: self.__getitem__(x), self.__order)
+
+    def update(self, dict):
+        for k, v in dict.items():
+            self.__setitem__(k, v)
+
+# vim:ts=4:sw=4:et:

=== added file 'minideblib/SafeWriteFile.py'
--- minideblib/SafeWriteFile.py	1970-01-01 00:00:00 +0000
+++ minideblib/SafeWriteFile.py	2009-08-23 18:39:29 +0000
@@ -0,0 +1,81 @@
+# SafeWriteFile.py
+#
+# This file is a writable file object.  It writes to a specified newname,
+# and when closed, renames the file to the realname.  If the object is
+# deleted, without being closed, this rename isn't done.  If abort() is
+# called, it also disables the rename.
+#
+# Copyright 2001 Adam Heath <doogie@xxxxxxxxxx>
+#
+# This file is free software; you can redistribute it and/or modify it
+# under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
+
+from types import StringType
+from shutil import copy2
+from string import find
+from os import rename
+
+class ObjectNotAllowed(Exception):
+    pass
+
+
+class InvalidMode(Exception):
+    pass
+
+
+class SafeWriteFile:
+    def __init__(self, newname, realname, mode="w", bufsize=-1):
+
+        if type(newname)!=StringType:
+            raise ObjectNotAllowed(newname)
+        if type(realname)!=StringType:
+            raise ObjectNotAllowed(realname)
+
+        if find(mode, "r")>=0:
+            raise InvalidMode(mode)
+        if find(mode, "a")>=0 or find(mode, "+") >= 0:
+            copy2(realname, newname)
+        self.fobj=open(newname, mode, bufsize)
+        self.newname=newname
+        self.realname=realname
+        self.__abort=0
+
+    def close(self):
+        self.fobj.close()
+        if not (self.closed and self.__abort):
+            rename(self.newname, self.realname)
+
+    def abort(self):
+        self.__abort=1
+
+    def __del__(self):
+        self.abort()
+        del self.fobj
+
+    def __getattr__(self, attr):
+        try:
+            return self.__dict__[attr]
+        except:
+            return eval("self.fobj." + attr)
+
+
+if __name__ == "__main__":
+    import time
+    f=SafeWriteFile("sf.new", "sf.data")
+    f.write("test\n")
+    f.flush()
+    time.sleep(1)
+    f.close()
+
+# vim:ts=4:sw=4:et:

=== added file 'minideblib/SignedFile.py'
--- minideblib/SignedFile.py	1970-01-01 00:00:00 +0000
+++ minideblib/SignedFile.py	2009-08-23 18:39:29 +0000
@@ -0,0 +1,107 @@
+# SignedFile -*- mode: python; coding: utf-8 -*-
+
+# SignedFile offers a subset of file object operations, and is
+# designed to transparently handle files with PGP signatures.
+
+# Copyright © 2002 Colin Walters <walters@xxxxxxx>
+#
+# This file is free software; you can redistribute it and/or modify it
+# under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
+
+import re,string
+
+class SignedFile:
+    _stream = None
+    _eof = 0
+    _signed = 0
+    _signature = None
+    _signatureversion = None
+    _initline = None
+    def __init__(self, stream):
+        self._stream = stream
+        line = stream.readline()
+        if (line == "-----BEGIN PGP SIGNED MESSAGE-----\n"):
+            self._signed = 1
+            while (1):
+                line = stream.readline()
+                if (len(line) == 0 or line == '\n'):
+                    break
+        else:
+            self._initline = line
+
+    def readline(self):
+        if self._eof:
+            return ''
+        if self._initline:
+            line = self._initline
+            self._initline = None
+        else:
+            line = self._stream.readline()
+        if not self._signed:
+            return line
+        elif line == "-----BEGIN PGP SIGNATURE-----\n":
+            self._eof = 1
+            self._signature = []
+            self._signatureversion = self._stream.readline()
+            self._stream.readline()  # skip blank line
+            while 1:
+                line = self._stream.readline()
+                if len(line) == 0 or line == "-----END PGP SIGNATURE-----\n":
+                    break
+                self._signature.append(line)
+            self._signature = string.join
+            return ''
+        return line
+            
+    def readlines(self):
+        ret = []
+        while 1:
+            line = self.readline()
+            if (line != ''):
+                ret.append(line)
+            else:
+                break
+        return ret
+
+    def close(self):
+        self._stream.close()
+
+    def getSigned(self):
+        return self._signed
+
+    def getSignature(self):
+        return self._signature
+
+    def getSignatureVersion(self):
+        return self._signatureversion
+            
+if __name__=="__main__":
+    import sys
+    if len(sys.argv) == 0:
+        print "Need one file as an argument"
+        sys.exit(1)
+    filename = sys.argv[1]
+    f=SignedFile(open(filename))
+    if f.getSigned():
+        print "**** SIGNED ****"
+    else:
+        print "**** NOT SIGNED ****"
+    lines=f.readlines()
+    print lines
+    if not f.getSigned():
+        assert(len(lines) == len(actuallines))
+    else:
+        print "Signature: %s" % (f.getSignature())
+
+# vim:ts=4:sw=4:et:

=== added file 'minideblib/__init__.py'
--- minideblib/__init__.py	1970-01-01 00:00:00 +0000
+++ minideblib/__init__.py	2009-08-23 18:39:29 +0000
@@ -0,0 +1,5 @@
+#!/usr/bin/python -tt
+# -*- coding: UTF-8 -*-
+# vim: sw=4 ts=4 expandtab ai
+
+__all__ = [ 'ChangeFile', 'DpkgVersion', 'DpkgControl', 'AptRepoClient', 'DpkgDebPackage', 'DpkgChangelog', 'LoggableObject' ]