← Back to team overview

apport-hackers team mailing list archive

Re: [Merge] lp:~brian-murray/apport/sandbox-gdb into lp:apport

 

Just a few comments, but nothing that must be changed.

Diff comments:

> 
> === modified file 'apport/report.py'
> --- apport/report.py	2016-12-18 12:50:20 +0000
> +++ apport/report.py	2017-01-27 21:11:47 +0000
> @@ -213,6 +213,21 @@
>  
>      return False
>  
> +
> +def _which_extrapath(command, extra_path):
> +    '''Return path of command, preferring extra_path'''
> +
> +    if extra_path:
> +        env = os.environ.copy()
> +        env['PATH'] = extra_path + ':' + env.get('PATH', '')

It may not actually matter, but this will leave a trailing colon in $PATH when the original $PATH is empty.  Just FYI a more robust and/or platform agnostic way of doing this would be:

parts = env.get('PATH', '').split(os.pathsep)
# The following if extra_path can also be colon separated...
parts[0:0] = extra_path.split(os.pathsep)
env['PATH'] = os.pathsep.join(parts)

> +    else:
> +        env = None
> +    try:
> +        return subprocess.check_output(['which', command], env=env).decode().strip()
> +    except subprocess.CalledProcessError:
> +        return None

Should this error be logged somehow?  I've always found it to be pretty difficult to debug subprocess failures when the exception is just swallowed.

> +
> +
>  #
>  # Report class
>  #
> @@ -705,8 +719,7 @@
>              value_keys.append(name)
>              gdb_cmd += ['--ex', 'p -99', '--ex', cmd]
>  
> -        # call gdb (might raise OSError)
> -        out = _command_output(gdb_cmd).decode('UTF-8', errors='replace')
> +        out = _command_output(gdb_cmd, env=environ).decode('UTF-8', errors='replace')

Not that you've changed the old code, but one thing to keep in mind with subprocess, is passing in universal_newlines=True which sets utf-8 on the stdout (assuming the locale is set correctly) and always returns you unicodes.  But this might be a case where the locale could be broken.

>  
>          # check for truncated stack trace
>          if 'is truncated: expected core file size' in out:
> @@ -1501,20 +1514,28 @@
>          When available, this calls "gdb-multiarch" instead of "gdb", for
>          processing crash reports from foreign architectures.
>  
> -        Return argv list.
> +        Return argv list for gdb and any environment variables.
>          '''
>          assert 'ExecutablePath' in self
>          executable = self.get('InterpreterPath', self['ExecutablePath'])
>  
> -        command = ['gdb']
> -
> -        if 'Architecture' in self and self['Architecture'] != packaging.get_system_architecture():
> +        same_arch = False
> +        if 'Architecture' in self and self['Architecture'] == packaging.get_system_architecture():
> +            same_arch = True
> +
> +        gdb_sandbox_bin = gdb_sandbox and os.path.join(gdb_sandbox, 'usr', 'bin') or None

This would probably read better although slightly less terse, with a ternary operator:

gdb_sandbox_bin = (os.path.join(gdb_sandbox, 'usr', 'bin') if gdb_sandbox else None)

> +        gdb_path = _which_extrapath('gdb', gdb_sandbox_bin)
> +        if not gdb_path:
> +            apport.fatal('gdb does not exist in the %ssandbox nor on the host'
> +                         % ('gdb ' if not same_arch else ''))
> +        command = [gdb_path]
> +        environ = None
> +
> +        if not same_arch:
>              # check if we have gdb-multiarch
> -            which = subprocess.Popen(['which', 'gdb-multiarch'],
> -                                     stdout=subprocess.PIPE)
> -            which.communicate()
> -            if which.returncode == 0:
> -                command = ['gdb-multiarch']
> +            ma = _which_extrapath('gdb-multiarch', gdb_sandbox_bin)
> +            if ma:
> +                command = [ma]
>              else:
>                  sys.stderr.write(
>                      'WARNING: Please install gdb-multiarch for processing '
> 
> === modified file 'backends/packaging-apt-dpkg.py'
> --- backends/packaging-apt-dpkg.py	2016-12-10 11:28:27 +0000
> +++ backends/packaging-apt-dpkg.py	2017-01-27 21:11:47 +0000
> @@ -798,6 +808,38 @@
>          fetcher = apt.apt_pkg.Acquire(fetchProgress)
>          # need to keep AcquireFile references
>          acquire_queue = []
> +        # add any dependencies to the packages list
> +        if install_deps:
> +            deps = []
> +            for (pkg, ver) in packages:

The parens are unnecessary, but don't hurt.

> +                try:
> +                    cache_pkg = cache[pkg]
> +                except KeyError:
> +                    m = 'package %s does not exist, ignoring' % pkg.replace('%', '%%')
> +                    obsolete += m + '\n'
> +                    apport.warning(m)
> +                    continue
> +                for dep in cache_pkg.candidate.dependencies:
> +                    # if the dependency is in the list of packages we don't
> +                    # need to look up its dependencies again
> +                    if dep[0].name in [pkg[0] for pkg in packages]:
> +                        continue
> +                    # if the package is already extracted in the sandbox
> +                    # because the report need that package we don't want to
> +                    # install a newer version which may cause a CRC mismatch
> +                    # with the installed dbg symbols
> +                    if dep[0].name in pkg_versions:
> +                        inst_version = pkg_versions[dep[0].name]
> +                        if self.compare_versions(inst_version, dep[0].version) > -1:
> +                            deps.append((dep[0].name, inst_version))
> +                        else:
> +                            deps.append((dep[0].name, dep[0].version))
> +                    else:
> +                        deps.append((dep[0].name, dep[0].version))
> +                    if dep[0].name not in [pkg[0] for pkg in packages]:
> +                        packages.append((dep[0].name, None))
> +            packages.extend(deps)
> +
>          for (pkg, ver) in packages:
>              try:
>                  cache_pkg = cache[pkg]
> 
> === modified file 'test/test_backend_apt_dpkg.py'
> --- test/test_backend_apt_dpkg.py	2016-12-10 11:28:27 +0000
> +++ test/test_backend_apt_dpkg.py	2017-01-27 21:11:47 +0000
> @@ -644,6 +644,23 @@
>          self.assertEqual(len(pkglist), 3, str(pkglist))
>  
>      @unittest.skipUnless(_has_internet(), 'online test')
> +    def test_install_packages_dependencies(self):
> +        '''install packages's dependencies'''
> +
> +        self._setup_foonux_config(release='xenial')
> +        # coreutils should always depend on libc6
> +        impl.install_packages(self.rootdir, self.configdir,
> +                              'Foonux 1.2',
> +                              [('coreutils', None)],
> +                              False, None, install_deps=True)
> +
> +        # check packages.txt for a dependency
> +        with open(os.path.join(self.rootdir, 'packages.txt')) as f:

I would pass in `encoding='UTF-8'` to the open() call.

> +            pkglist = f.read().splitlines()
> +        self.assertIn('coreutils 8.25-2ubuntu2', pkglist)
> +        self.assertIn('libc6 2.23-0ubuntu3', pkglist)
> +
> +    @unittest.skipUnless(_has_internet(), 'online test')
>      def test_install_packages_system(self):
>          '''install_packages() with system configuration'''
>  


-- 
https://code.launchpad.net/~brian-murray/apport/sandbox-gdb/+merge/315619
Your team Apport upstream developers is requested to review the proposed merge of lp:~brian-murray/apport/sandbox-gdb into lp:apport.


References