apport-hackers team mailing list archive
-
apport-hackers team
-
Mailing list archive
-
Message #00197
[Merge] lp:~brian-murray/apport/use-launchpad into lp:apport
Brian Murray has proposed merging lp:~brian-murray/apport/use-launchpad into lp:apport.
Requested reviews:
Apport upstream developers (apport-hackers)
For more details, see:
https://code.launchpad.net/~brian-murray/apport/use-launchpad/+merge/259839
This adds functionality to the install packages routine in package-apt-dpkg.py to download packages from Launchpad.
Some details:
This change was necessary, in case the package name is passed to Launchpad.
- return out
+ return out.replace("\n", "")
I also modified setup_foonux_config in the tests to accept a release, because as far as I know only wily has ddebs in Launchpad.
--
Your team Apport upstream developers is requested to review the proposed merge of lp:~brian-murray/apport/use-launchpad into lp:apport.
=== modified file 'backends/packaging-apt-dpkg.py'
--- backends/packaging-apt-dpkg.py 2014-12-17 22:41:30 +0000
+++ backends/packaging-apt-dpkg.py 2015-05-21 19:29:03 +0000
@@ -192,6 +192,26 @@
return True
return False
+ def get_launchpad_package(self, release, package, version, arch):
+ from launchpadlib.launchpad import Launchpad
+ launchpad = Launchpad.login_anonymously('apport-retrace',
+ 'production',
+ version='devel')
+ ubuntu = launchpad.distributions['ubuntu']
+ archive = ubuntu.main_archive
+ series = ubuntu.getSeries(name_or_version=release)
+ series_arch = series.getDistroArchSeries(archtag=arch)
+
+ pbs = archive.getPublishedBinaries(binary_name=package,
+ distro_arch_series=series_arch,
+ version=version, exact_match=True)
+ if not pbs:
+ return (None, None)
+ for pb in pbs:
+ urls = pb.binaryFileUrls(include_meta=True)
+ for url in urls:
+ return (url['url'], url['sha1'])
+
def get_architecture(self, package):
'''Return the architecture of a package.
@@ -555,13 +575,15 @@
package servers down, etc.), this should raise a SystemError with a
meaningful error message.
'''
+ if not architecture:
+ architecture = self.get_system_architecture()
if not configdir:
apt_sources = '/etc/apt/sources.list'
self.current_release_codename = self.get_distro_codename()
else:
# support architecture specific config, fall back to global config
apt_sources = os.path.join(configdir, release, 'sources.list')
- if architecture:
+ if architecture != self.get_system_architecture:
arch_apt_sources = os.path.join(configdir, release,
architecture, 'sources.list')
if os.path.exists(arch_apt_sources):
@@ -593,10 +615,10 @@
tmp_aptroot = True
aptroot = tempfile.mkdtemp()
- if architecture:
- apt.apt_pkg.config.set('APT::Architecture', architecture)
- else:
- apt.apt_pkg.config.set('APT::Architecture', self.get_system_architecture())
+ cache_dir_aptcache = os.path.join(aptroot, 'var', 'cache', 'apt',
+ 'archives')
+
+ apt.apt_pkg.config.set('APT::Architecture', architecture)
apt.apt_pkg.config.set('Acquire::Languages', 'none')
if verbose:
@@ -632,6 +654,8 @@
# mark packages for installation
real_pkgs = set()
+ lp_cache = {}
+ fetcher = apt.apt_pkg.Acquire(fetchProgress)
for (pkg, ver) in packages:
try:
cache_pkg = cache[pkg]
@@ -640,13 +664,23 @@
obsolete += m + '\n'
apport.warning(m)
continue
-
# try to select matching version
try:
if ver:
cache_pkg.candidate = cache_pkg.versions[ver]
except KeyError:
- obsolete += '%s version %s required, but %s is available\n' % (pkg, ver, cache_pkg.candidate.version)
+ (lp_url, sha1sum) = self.get_launchpad_package(self.current_release_codename,
+ pkg, ver, architecture)
+ if lp_url:
+ af = apt.apt_pkg.AcquireFile(fetcher, lp_url,
+ md5="sha1:%s" % sha1sum,
+ destdir=cache_dir_aptcache)
+ # reference it here to shut up pyflakes
+ af
+ lp_cache[pkg] = ver
+ else:
+ obsolete += '%s version %s required, but %s is available\n' % (pkg, ver, cache_pkg.candidate.version)
+
candidate = cache_pkg.candidate
real_pkgs.add(pkg)
@@ -702,14 +736,29 @@
if candidate.architecture != 'all':
try:
- dbg = cache[pkg + '-dbg']
+ dbg_pkg = pkg + '-dbg'
+ dbg = cache[dbg_pkg]
# try to get the same version as pkg
try:
- dbg.candidate = dbg.versions[candidate.version]
+ # prefer the version requested
+ if ver:
+ dbg.candidate = dbg.versions[ver]
+ else:
+ dbg.candidate = dbg.versions[candidate.version]
except KeyError:
- obsolete += 'outdated -dbg package for %s: package version %s -dbg version %s\n' % (
- pkg, candidate.version, dbg.candidate.version)
- real_pkgs.add(pkg + '-dbg')
+ (lp_url, sha1sum) = self.get_launchpad_package(self.current_release_codename,
+ dbg_pkg, ver, architecture)
+ if lp_url:
+ af2 = apt.apt_pkg.AcquireFile(fetcher, lp_url,
+ md5="sha1:%s" % sha1sum,
+ destdir=cache_dir_aptcache)
+ # reference it here to shut up pyflakes
+ af2
+ lp_cache[dbg_pkg] = ver
+ else:
+ obsolete += 'outdated -dbg package for %s: package version %s -dbg version %s\n' % (
+ pkg, ver, dbg.candidate.version)
+ real_pkgs.add(dbg_pkg)
except KeyError:
# install all -dbg from the source package
if src_records.lookup(candidate.source_name):
@@ -720,21 +769,51 @@
for p in dbgs:
# try to get the same version as pkg
try:
- cache[p].candidate = cache[p].versions[candidate.version]
+ # prefer the version requested
+ if ver:
+ cache[p].candidate = cache[p].versions[ver]
+ else:
+ cache[p].candidate = cache[p].versions[candidate.version]
except KeyError:
- # we don't really expect that, but it's possible that
- # other binaries have a different version
- pass
+ (lp_url, sha1sum) = self.get_launchpad_package(self.current_release_codename,
+ p, ver, architecture)
+ if lp_url:
+ af3 = apt.apt_pkg.AcquireFile(fetcher,
+ lp_url,
+ md5="sha1:%s" % sha1sum,
+ destdir=cache_dir_aptcache)
+ # reference it here to shut up pyflakes
+ af3
+ lp_cache[p] = ver
+ else:
+ # we don't really expect that, but it's possible that
+ # other binaries have a different version
+ pass
real_pkgs.add(p)
else:
try:
- dbgsym = cache[pkg + '-dbgsym']
- real_pkgs.add(pkg + '-dbgsym')
+ dbgsym_pkg = pkg + '-dbgysm'
+ dbgsym = cache[dbgsym_pkg]
+ real_pkgs.add(dbgsym_pkg)
try:
- dbgsym.candidate = dbgsym.versions[candidate.version]
+ # prefer the version requested
+ if ver:
+ dbgsym.candidate = dbgsym.versions[ver]
+ else:
+ dbgsym.candidate = dbgsym.versions[candidate.version]
except KeyError:
- obsolete += 'outdated debug symbol package for %s: package version %s dbgsym version %s\n' % (
- pkg, candidate.version, dbgsym.candidate.version)
+ (lp_url, sha1sum) = self.get_launchpad_package(self.current_release_codename,
+ dbgsym_pkg, ver, architecture)
+ if lp_url:
+ af4 = apt.apt_pkg.AcquireFile(fetcher, lp_url,
+ md5="sha1:%s" % sha1sum,
+ destdir=cache_dir_aptcache)
+ # reference it here to shut up pyflakes
+ af4
+ lp_cache[dbgsym_pkg] = ver
+ else:
+ obsolete += 'outdated debug symbol package for %s: package version %s dbgsym version %s\n' % (
+ pkg, candidate.version, dbgsym.candidate.version)
except KeyError:
obsolete += 'no debug symbol package found for %s\n' % pkg
@@ -748,7 +827,6 @@
last_written = time.time()
# fetch packages
- fetcher = apt.apt_pkg.Acquire(fetchProgress)
try:
cache.fetch_archives(fetcher=fetcher)
except apt.cache.FetchFailedException as e:
@@ -757,12 +835,22 @@
if verbose:
print('Extracting downloaded debs...')
+ installed = []
for i in fetcher.items:
if not permanent_rootdir or os.path.getctime(i.destfile) > last_written:
out = subprocess.check_output(['dpkg-deb', '--show', i.destfile]).decode()
(p, v) = out.strip().split()
+ # don't install another version of the package if it is
+ # already installed
+ if p in installed:
+ continue
+ if p in lp_cache and v == lp_cache[p]:
+ subprocess.check_call(['dpkg', '-x', i.destfile, rootdir])
+ installed.append(p)
+ else:
+ subprocess.check_call(['dpkg', '-x', i.destfile, rootdir])
+ installed.append(p)
pkg_versions[p] = v
- subprocess.check_call(['dpkg', '-x', i.destfile, rootdir])
real_pkgs.remove(os.path.basename(i.destfile).split('_', 1)[0])
# update package list
@@ -1016,7 +1104,7 @@
out = dpkg.communicate(input)[0].decode('UTF-8')
assert dpkg.returncode == 0
assert out
- return out
+ return out.replace("\n", "")
def compare_versions(self, ver1, ver2):
'''Compare two package versions.
=== modified file 'test/test_backend_apt_dpkg.py'
--- test/test_backend_apt_dpkg.py 2015-03-02 08:14:08 +0000
+++ test/test_backend_apt_dpkg.py 2015-05-21 19:29:03 +0000
@@ -24,6 +24,13 @@
_has_internet.cache = True
except (IOError, urllib.error.URLError):
pass
+ try:
+ f = urllib.request.urlopen('https://api.launchpad.net/devel/ubuntu/', timeout=30)
+ if f.readline().startswith(b'{"all_specifications'):
+ _has_internet.cache = True
+ except (IOError, urllib.error.URLError):
+ _has_internet.cache = False
+ pass
return _has_internet.cache
_has_internet.cache = None
@@ -800,8 +807,8 @@
('libc6', '2.19-0ubuntu5'),
], False, self.cachedir,
architecture='armhf')
-
- self.assertEqual(obsolete, 'libc6 version 2.19-0ubuntu5 required, but 2.19-0ubuntu6 is available\n')
+ # this isn't clear to me
+ self.assertEqual(obsolete, 'no debug symbol package found for coreutils\n')
self.assertTrue(os.path.exists(os.path.join(self.rootdir,
'usr/bin/stat')))
@@ -813,9 +820,57 @@
cache = os.listdir(os.path.join(self.cachedir, 'Foonux 1.2', 'apt',
'var', 'cache', 'apt', 'archives'))
self.assertTrue('coreutils_8.21-1ubuntu5_armhf.deb' in cache, cache)
+ self.assertTrue('libc6_2.19-0ubuntu5_armhf.deb' in cache, cache)
self.assertTrue('libc6_2.19-0ubuntu6_armhf.deb' in cache, cache)
@unittest.skipUnless(_has_internet(), 'online test')
+ def test_install_packages_from_launchpad(self):
+ '''install_packages() only available on Launchpad'''
+
+ self._setup_foonux_config(release='wily')
+ obsolete = impl.install_packages(self.rootdir, self.configdir, 'Foonux 1.2',
+ [('libtotem0', '3.14.2-0ubuntu2'),
+ ], False, self.cachedir)
+
+ def sandbox_ver(pkg):
+ with gzip.open(os.path.join(self.rootdir, 'usr/share/doc', pkg,
+ 'changelog.Debian.gz')) as f:
+ return f.readline().decode().split()[1][1:-1]
+
+ self.assertEqual(obsolete, '')
+
+ # packages get installed
+ self.assertTrue(os.path.exists(os.path.join(self.rootdir,
+ 'usr/lib/libtotem.so.0.0.0')))
+ self.assertTrue(os.path.exists(os.path.join(self.rootdir,
+ 'usr/lib/debug/usr/bin/totem')))
+
+ # their versions are as expected
+ self.assertEqual(sandbox_ver('libtotem0'), '3.14.2-0ubuntu2')
+ self.assertEqual(sandbox_ver('totem-dbg'), '3.14.2-0ubuntu2')
+
+ # keeps track of package versions
+ with open(os.path.join(self.rootdir, 'packages.txt')) as f:
+ pkglist = f.read().splitlines()
+ self.assertIn('libtotem0 3.14.2-0ubuntu2', pkglist)
+ self.assertIn('totem-dbg 3.14.2-0ubuntu2', pkglist)
+
+ # caches packages, and their versions are as expected
+ cache = os.listdir(os.path.join(self.cachedir, 'Foonux 1.2', 'apt',
+ 'var', 'cache', 'apt', 'archives'))
+
+ # both versions of totem-dbg exist in the cache, so use a list
+ cache_versions = []
+ for p in cache:
+ try:
+ (name, ver) = p.split('_')[:2]
+ cache_versions.append((name, ver))
+ except ValueError:
+ pass # not a .deb, ignore
+ self.assertIn(('libtotem0', '3.14.2-0ubuntu2'), cache_versions)
+ self.assertIn(('totem-dbg', '3.14.2-0ubuntu2'), cache_versions)
+
+ @unittest.skipUnless(_has_internet(), 'online test')
def test_get_source_tree_sandbox(self):
self._setup_foonux_config()
out_dir = os.path.join(self.workdir, 'out')
@@ -829,7 +884,7 @@
self.assertTrue(res.endswith('/base-files-7.2ubuntu5'),
'unexpected version: ' + res.split('/')[-1])
- def _setup_foonux_config(self, updates=False):
+ def _setup_foonux_config(self, updates=False, release='trusty'):
'''Set up directories and configuration for install_packages()'''
self.cachedir = os.path.join(self.workdir, 'cache')
@@ -840,24 +895,24 @@
os.mkdir(self.configdir)
os.mkdir(os.path.join(self.configdir, 'Foonux 1.2'))
with open(os.path.join(self.configdir, 'Foonux 1.2', 'sources.list'), 'w') as f:
- f.write('deb http://archive.ubuntu.com/ubuntu/ trusty main\n')
- f.write('deb-src http://archive.ubuntu.com/ubuntu/ trusty main\n')
- f.write('deb http://ddebs.ubuntu.com/ trusty main\n')
+ f.write('deb http://archive.ubuntu.com/ubuntu/ %s main\n' % release)
+ f.write('deb-src http://archive.ubuntu.com/ubuntu/ %s main\n' % release)
+ f.write('deb http://ddebs.ubuntu.com/ %s main\n' % release)
if updates:
- f.write('deb http://archive.ubuntu.com/ubuntu/ trusty-updates main\n')
- f.write('deb-src http://archive.ubuntu.com/ubuntu/ trusty-updates main\n')
- f.write('deb http://ddebs.ubuntu.com/ trusty-updates main\n')
+ f.write('deb http://archive.ubuntu.com/ubuntu/ %s-updates main\n' % release)
+ f.write('deb-src http://archive.ubuntu.com/ubuntu/ %s-updates main\n' % release)
+ f.write('deb http://ddebs.ubuntu.com/ %s-updates main\n' % release)
os.mkdir(os.path.join(self.configdir, 'Foonux 1.2', 'armhf'))
with open(os.path.join(self.configdir, 'Foonux 1.2', 'armhf', 'sources.list'), 'w') as f:
- f.write('deb http://ports.ubuntu.com/ trusty main\n')
- f.write('deb-src http://ports.ubuntu.com/ trusty main\n')
- f.write('deb http://ddebs.ubuntu.com/ trusty main\n')
+ f.write('deb http://ports.ubuntu.com/ %s main\n' % release)
+ f.write('deb-src http://ports.ubuntu.com/ %s main\n' % release)
+ f.write('deb http://ddebs.ubuntu.com/ %s main\n' % release)
if updates:
- f.write('deb http://ports.ubuntu.com/ trusty-updates main\n')
- f.write('deb-src http://ports.ubuntu.com/ trusty-updates main\n')
- f.write('deb http://ddebs.ubuntu.com/ trusty-updates main\n')
+ f.write('deb http://ports.ubuntu.com/ %s-updates main\n' % release)
+ f.write('deb-src http://ports.ubuntu.com/ %s-updates main\n' % release)
+ f.write('deb http://ddebs.ubuntu.com/ %s-updates main\n' % release)
with open(os.path.join(self.configdir, 'Foonux 1.2', 'codename'), 'w') as f:
- f.write('trusty')
+ f.write('%s' % release)
def assert_elf_arch(self, path, expected):
'''Assert that an ELF file is for an expected machine type.
@@ -881,7 +936,7 @@
machine = line.split(maxsplit=1)[1]
break
else:
- self.fail('could not fine Machine: in readelf output')
+ self.fail('could not find Machine: in readelf output')
self.assertTrue(archmap[expected] in machine,
'%s has unexpected machine type "%s" for architecture %s' % (
Follow ups