Back to index

apport  2.3
packaging-apt-dpkg.py
Go to the documentation of this file.
00001 '''apport.PackageInfo class implementation for python-apt and dpkg.
00002 
00003 This is used on Debian and derivatives such as Ubuntu.
00004 '''
00005 
00006 # Copyright (C) 2007 - 2011 Canonical Ltd.
00007 # Author: Martin Pitt <martin.pitt@ubuntu.com>
00008 #
00009 # This program is free software; you can redistribute it and/or modify it
00010 # under the terms of the GNU General Public License as published by the
00011 # Free Software Foundation; either version 2 of the License, or (at your
00012 # option) any later version.  See http://www.gnu.org/copyleft/gpl.html for
00013 # the full text of the license.
00014 
00015 import subprocess, os, glob, stat, sys, tempfile, re, shutil, time
00016 import hashlib
00017 
00018 import warnings
00019 warnings.filterwarnings('ignore', 'apt API not stable yet', FutureWarning)
00020 import apt
00021 try:
00022     import cPickle as pickle
00023     from urllib import urlopen
00024     (pickle, urlopen)  # pyflakes
00025 except ImportError:
00026     # python 3
00027     from urllib.request import urlopen
00028     import pickle
00029 
00030 import apport
00031 from apport.packaging import PackageInfo
00032 
00033 
00034 class __AptDpkgPackageInfo(PackageInfo):
00035     '''Concrete apport.PackageInfo class implementation for python-apt and
00036     dpkg, as found on Debian and derivatives such as Ubuntu.'''
00037 
00038     def __init__(self):
00039         self._apt_cache = None
00040         self._sandbox_apt_cache = None
00041         self._contents_dir = None
00042         self._mirror = None
00043         self._virtual_mapping_obj = None
00044 
00045         self.configuration = '/etc/default/apport'
00046 
00047     def __del__(self):
00048         try:
00049             if self._contents_dir:
00050                 shutil.rmtree(self._contents_dir)
00051         except AttributeError:
00052             pass
00053 
00054     def _virtual_mapping(self, configdir):
00055         if self._virtual_mapping_obj is not None:
00056             return self._virtual_mapping_obj
00057 
00058         mapping_file = os.path.join(configdir, 'virtual_mapping.pickle')
00059         if os.path.exists(mapping_file):
00060             with open(mapping_file, 'rb') as fp:
00061                 self._virtual_mapping_obj = pickle.load(fp)
00062         else:
00063             self._virtual_mapping_obj = {}
00064 
00065         return self._virtual_mapping_obj
00066 
00067     def _save_virtual_mapping(self, configdir):
00068         mapping_file = os.path.join(configdir, 'virtual_mapping.pickle')
00069         if self._virtual_mapping_obj is not None:
00070             with open(mapping_file, 'wb') as fp:
00071                 pickle.dump(self._virtual_mapping_obj, fp)
00072 
00073     def _cache(self):
00074         '''Return apt.Cache() (initialized lazily).'''
00075 
00076         self._sandbox_apt_cache = None
00077         if not self._apt_cache:
00078             try:
00079                 # avoid spewage on stdout
00080                 progress = apt.progress.base.OpProgress()
00081                 self._apt_cache = apt.Cache(progress, rootdir='/')
00082             except AttributeError:
00083                 # older python-apt versions do not yet have above argument
00084                 self._apt_cache = apt.Cache(rootdir='/')
00085         return self._apt_cache
00086 
00087     def _sandbox_cache(self, aptroot, apt_sources, fetchProgress):
00088         '''Build apt sandbox and return apt.Cache(rootdir=) (initialized lazily).
00089 
00090         Clear the package selection on subsequent calls.
00091         '''
00092         self._apt_cache = None
00093         if not self._sandbox_apt_cache:
00094             self._build_apt_sandbox(aptroot, apt_sources)
00095             rootdir = os.path.abspath(aptroot)
00096             self._sandbox_apt_cache = apt.Cache(rootdir=rootdir)
00097             try:
00098                 # We don't need to update this multiple times.
00099                 self._sandbox_apt_cache.update(fetchProgress)
00100             except apt.cache.FetchFailedException as e:
00101                 raise SystemError(str(e))
00102             self._sandbox_apt_cache.open()
00103         else:
00104             self._sandbox_apt_cache.clear()
00105         return self._sandbox_apt_cache
00106 
00107     def _apt_pkg(self, package):
00108         '''Return apt.Cache()[package] (initialized lazily).
00109 
00110         Throw a ValueError if the package does not exist.
00111         '''
00112         try:
00113             return self._cache()[package]
00114         except KeyError:
00115             raise ValueError('package does not exist')
00116 
00117     def get_version(self, package):
00118         '''Return the installed version of a package.'''
00119 
00120         pkg = self._apt_pkg(package)
00121         inst = pkg.installed
00122         if not inst:
00123             raise ValueError('package does not exist')
00124         return inst.version
00125 
00126     def get_available_version(self, package):
00127         '''Return the latest available version of a package.'''
00128 
00129         return self._apt_pkg(package).candidate.version
00130 
00131     def get_dependencies(self, package):
00132         '''Return a list of packages a package depends on.'''
00133 
00134         cur_ver = self._apt_pkg(package)._pkg.current_ver
00135         if not cur_ver:
00136             # happens with virtual packages
00137             return []
00138         return [d[0].target_pkg.name for d in cur_ver.depends_list.get('Depends', []) +
00139                 cur_ver.depends_list.get('PreDepends', [])]
00140 
00141     def get_source(self, package):
00142         '''Return the source package name for a package.'''
00143 
00144         if self._apt_pkg(package).installed:
00145             return self._apt_pkg(package).installed.source_name
00146         elif self._apt_pkg(package).candidate:
00147             return self._apt_pkg(package).candidate.source_name
00148         else:
00149             raise ValueError('package %s does not exist' % package)
00150 
00151     def get_package_origin(self, package):
00152         '''Return package origin.
00153 
00154         Return the repository name from which a package was installed, or None
00155         if it cannot be determined.
00156 
00157         Throw ValueError if package is not installed.
00158         '''
00159         pkg = self._apt_pkg(package).installed
00160         if not pkg:
00161             raise ValueError('package is not installed')
00162         for origin in pkg.origins:
00163             if origin.origin:
00164                 return origin.origin
00165         return None
00166 
00167     def is_distro_package(self, package):
00168         '''Check if a package is a genuine distro package (True) or comes from
00169         a third-party source.'''
00170 
00171         lsb_release = subprocess.Popen(['lsb_release', '-i', '-s'],
00172                                        stdout=subprocess.PIPE)
00173         this_os = lsb_release.communicate()[0].decode().strip()
00174         assert lsb_release.returncode == 0
00175 
00176         pkg = self._apt_pkg(package)
00177         # some PPA packages have installed version None, see LP#252734
00178         if pkg.installed and pkg.installed.version is None:
00179             return False
00180 
00181         native_origins = [this_os]
00182         for f in glob.glob('/etc/apport/native-origins.d/*'):
00183             try:
00184                 with open(f) as fd:
00185                     for line in fd:
00186                         line = line.strip()
00187                         if line:
00188                             native_origins.append(line)
00189             except IOError:
00190                 pass
00191 
00192         if pkg.candidate and pkg.candidate.origins:  # might be None
00193             for o in pkg.candidate.origins:
00194                 if o.origin in native_origins:
00195                     return True
00196         return False
00197 
00198     def get_architecture(self, package):
00199         '''Return the architecture of a package.
00200 
00201         This might differ on multiarch architectures (e. g.  an i386 Firefox
00202         package on a x86_64 system)'''
00203 
00204         if self._apt_pkg(package).installed:
00205             return self._apt_pkg(package).installed.architecture or 'unknown'
00206         elif self._apt_pkg(package).candidate:
00207             return self._apt_pkg(package).candidate.architecture or 'unknown'
00208         else:
00209             raise ValueError('package %s does not exist' % package)
00210 
00211     def get_files(self, package):
00212         '''Return list of files shipped by a package.'''
00213 
00214         list = self._call_dpkg(['-L', package])
00215         if list is None:
00216             return None
00217         return [f for f in list.splitlines() if not f.startswith('diverted')]
00218 
00219     def get_modified_files(self, package):
00220         '''Return list of all modified files of a package.'''
00221 
00222         # get the maximum mtime of package files that we consider unmodified
00223         listfile = '/var/lib/dpkg/info/%s:%s.list' % (package, self.get_system_architecture())
00224         if not os.path.exists(listfile):
00225             listfile = '/var/lib/dpkg/info/%s.list' % package
00226         try:
00227             s = os.stat(listfile)
00228             if not stat.S_ISREG(s.st_mode):
00229                 raise OSError
00230             max_time = max(s.st_mtime, s.st_ctime)
00231         except OSError:
00232             return []
00233 
00234         # create a list of files with a newer timestamp for md5sum'ing
00235         sums = b''
00236         sumfile = '/var/lib/dpkg/info/%s:%s.md5sums' % (package, self.get_system_architecture())
00237         if not os.path.exists(sumfile):
00238             sumfile = '/var/lib/dpkg/info/%s.md5sums' % package
00239             if not os.path.exists(sumfile):
00240                 # some packages do not ship md5sums
00241                 return []
00242 
00243         with open(sumfile, 'rb') as fd:
00244             for line in fd:
00245                 try:
00246                     # ignore lines with NUL bytes (happens, LP#96050)
00247                     if b'\0' in line:
00248                         apport.warning('%s contains NUL character, ignoring line', sumfile)
00249                         continue
00250                     words = line.split()
00251                     if not words:
00252                         apport.warning('%s contains empty line, ignoring line', sumfile)
00253                         continue
00254                     s = os.stat('/' + words[-1].decode('UTF-8'))
00255                     if max(s.st_mtime, s.st_ctime) <= max_time:
00256                         continue
00257                 except OSError:
00258                     pass
00259 
00260                 sums += line
00261 
00262         if sums:
00263             return self._check_files_md5(sums)
00264         else:
00265             return []
00266 
00267     def get_modified_conffiles(self, package):
00268         '''Return modified configuration files of a package.
00269 
00270         Return a file name -> file contents map of all configuration files of
00271         package. Please note that apport.hookutils.attach_conffiles() is the
00272         official user-facing API for this, which will ask for confirmation and
00273         allows filtering.
00274         '''
00275         dpkg = subprocess.Popen(['dpkg-query', '-W', '--showformat=${Conffiles}',
00276                                  package], stdout=subprocess.PIPE)
00277 
00278         out = dpkg.communicate()[0].decode()
00279         if dpkg.returncode != 0:
00280             return {}
00281 
00282         modified = {}
00283         for line in out.splitlines():
00284             if not line:
00285                 continue
00286             # just take the first two fields, to not stumble over obsolete
00287             # conffiles
00288             path, default_md5sum = line.strip().split()[:2]
00289 
00290             if os.path.exists(path):
00291                 with open(path, 'rb') as fd:
00292                     contents = fd.read()
00293                 m = hashlib.md5()
00294                 m.update(contents)
00295                 calculated_md5sum = m.hexdigest()
00296 
00297                 if calculated_md5sum != default_md5sum:
00298                     modified[path] = contents
00299             else:
00300                 modified[path] = '[deleted]'
00301 
00302         return modified
00303 
00304     def __fgrep_files(self, pattern, file_list):
00305         '''Call fgrep for a pattern on given file list and return the first
00306         matching file, or None if no file matches.'''
00307 
00308         match = None
00309         slice_size = 100
00310         i = 0
00311 
00312         while not match and i < len(file_list):
00313             p = subprocess.Popen(['fgrep', '-lxm', '1', '--', pattern] +
00314                                  file_list[i:(i + slice_size)], stdin=subprocess.PIPE,
00315                                  stdout=subprocess.PIPE, stderr=subprocess.PIPE)
00316             out = p.communicate()[0].decode('UTF-8')
00317             if p.returncode == 0:
00318                 match = out
00319             i += slice_size
00320 
00321         return match
00322 
00323     def get_file_package(self, file, uninstalled=False, map_cachedir=None):
00324         '''Return the package a file belongs to, or None if the file is not
00325         shipped by any package.
00326 
00327         If uninstalled is True, this will also find files of uninstalled
00328         packages; this is very expensive, though, and needs network access and
00329         lots of CPU and I/O resources. In this case, map_cachedir can be set to
00330         an existing directory which will be used to permanently store the
00331         downloaded maps. If it is not set, a temporary directory will be used.
00332         '''
00333         # check if the file is a diversion
00334         dpkg = subprocess.Popen(['/usr/sbin/dpkg-divert', '--list', file],
00335                                 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
00336         out = dpkg.communicate()[0].decode('UTF-8')
00337         if dpkg.returncode == 0 and out:
00338             pkg = out.split()[-1]
00339             if pkg != 'hardening-wrapper':
00340                 return pkg
00341 
00342         fname = os.path.splitext(os.path.basename(file))[0].lower()
00343 
00344         all_lists = []
00345         likely_lists = []
00346         for f in glob.glob('/var/lib/dpkg/info/*.list'):
00347             p = os.path.splitext(os.path.basename(f))[0].lower().split(':')[0]
00348             if p in fname or fname in p:
00349                 likely_lists.append(f)
00350             else:
00351                 all_lists.append(f)
00352 
00353         # first check the likely packages
00354         match = self.__fgrep_files(file, likely_lists)
00355         if not match:
00356             match = self.__fgrep_files(file, all_lists)
00357 
00358         if match:
00359             return os.path.splitext(os.path.basename(match))[0].split(':')[0]
00360 
00361         if uninstalled:
00362             return self._search_contents(file, map_cachedir)
00363         else:
00364             return None
00365 
00366     @classmethod
00367     def get_system_architecture(klass):
00368         '''Return the architecture of the system, in the notation used by the
00369         particular distribution.'''
00370 
00371         dpkg = subprocess.Popen(['dpkg', '--print-architecture'],
00372                                 stdout=subprocess.PIPE)
00373         arch = dpkg.communicate()[0].decode().strip()
00374         assert dpkg.returncode == 0
00375         assert arch
00376         return arch
00377 
00378     def get_library_paths(self):
00379         '''Return a list of default library search paths.
00380 
00381         The entries should be separated with a colon ':', like for
00382         $LD_LIBRARY_PATH. This needs to take any multiarch directories into
00383         account.
00384         '''
00385         dpkg = subprocess.Popen(['dpkg-architecture', '-qDEB_HOST_MULTIARCH'],
00386                                 stdout=subprocess.PIPE)
00387         multiarch_triple = dpkg.communicate()[0].decode().strip()
00388         assert dpkg.returncode == 0
00389 
00390         return '/lib/%s:/lib' % multiarch_triple
00391 
00392     def set_mirror(self, url):
00393         '''Explicitly set a distribution mirror URL for operations that need to
00394         fetch distribution files/packages from the network.
00395 
00396         By default, the mirror will be read from the system configuration
00397         files.'''
00398 
00399         self._mirror = url
00400 
00401     def get_source_tree(self, srcpackage, dir, version=None, sandbox=None,
00402                         apt_update=False):
00403         '''Download source package and unpack it into dir.
00404 
00405         This also has to care about applying patches etc., so that dir will
00406         eventually contain the actually compiled source. dir needs to exist and
00407         should be empty.
00408 
00409         If version is given, this particular version will be retrieved.
00410         Otherwise this will fetch the latest available version.
00411 
00412         If sandbox is given, it calls apt-get source in that sandbox, otherwise
00413         it uses the system apt configuration.
00414 
00415         If apt_update is True, it will call apt-get update before apt-get
00416         source. This is mostly necessary for freshly created sandboxes.
00417 
00418         Return the directory that contains the actual source root directory
00419         (which might be a subdirectory of dir). Return None if the source is
00420         not available.
00421         '''
00422         # configure apt for sandbox
00423         env = os.environ.copy()
00424         if sandbox:
00425             f = tempfile.NamedTemporaryFile()
00426             f.write(('''Dir "%s";
00427 Debug::NoLocking "true";
00428  ''' % sandbox).encode())
00429             f.flush()
00430             env['APT_CONFIG'] = f.name
00431 
00432         if apt_update:
00433             subprocess.call(['apt-get', '-qq', 'update'], env=env)
00434 
00435         # fetch source tree
00436         argv = ['apt-get', '-qq', '--assume-yes', 'source', srcpackage]
00437         if version:
00438             argv[-1] += '=' + version
00439         try:
00440             if subprocess.call(argv, cwd=dir, env=env) != 0:
00441                 return None
00442         except OSError:
00443             return None
00444 
00445         # find top level directory
00446         root = None
00447         for d in glob.glob(os.path.join(dir, srcpackage + '-*')):
00448             if os.path.isdir(d):
00449                 root = d
00450         assert root, 'could not determine source tree root directory'
00451 
00452         # apply patches on a best-effort basis
00453         try:
00454             subprocess.call('(debian/rules patch || debian/rules apply-patches '
00455                             '|| debian/rules apply-dpatches || '
00456                             'debian/rules unpack || debian/rules patch-stamp || '
00457                             'debian/rules setup) >/dev/null 2>&1', shell=True, cwd=root)
00458         except OSError:
00459             pass
00460 
00461         return root
00462 
00463     def get_kernel_package(self):
00464         '''Return the actual Linux kernel package name.
00465 
00466         This is used when the user reports a bug against the "linux" package.
00467         '''
00468         # TODO: Ubuntu specific
00469         return 'linux-image-' + os.uname()[2]
00470 
00471     def _install_debug_kernel(self, report):
00472         '''Install kernel debug package
00473 
00474         Ideally this would be just another package but the kernel is
00475         special in various ways currently so we can not use the apt
00476         method.
00477         '''
00478         installed = []
00479         outdated = []
00480         kver = report['Uname'].split()[1]
00481         arch = report['Architecture']
00482         ver = report['Package'].split()[1]
00483         debug_pkgname = 'linux-image-debug-%s' % kver
00484         c = self._cache()
00485         if debug_pkgname in c and c[debug_pkgname].isInstalled:
00486             #print('kernel ddeb already installed')
00487             return (installed, outdated)
00488         target_dir = apt.apt_pkg.config.find_dir('Dir::Cache::archives') + '/partial'
00489         deb = '%s_%s_%s.ddeb' % (debug_pkgname, ver, arch)
00490         # FIXME: this package is currently not in Packages.gz
00491         url = 'http://ddebs.ubuntu.com/pool/main/l/linux/%s' % deb
00492         out = open(os.path.join(target_dir, deb), 'w')
00493         # urlretrieve does not return 404 in the headers so we use urlopen
00494         u = urlopen(url)
00495         if u.getcode() > 400:
00496             return ('', 'linux')
00497         while True:
00498             block = u.read(8 * 1024)
00499             if not block:
00500                 break
00501             out.write(block)
00502         out.flush()
00503         ret = subprocess.call(['dpkg', '-i', os.path.join(target_dir, deb)])
00504         if ret == 0:
00505             installed.append(deb.split('_')[0])
00506         return (installed, outdated)
00507 
00508     def install_packages(self, rootdir, configdir, release, packages,
00509                          verbose=False, cache_dir=None, permanent_rootdir=False):
00510         '''Install packages into a sandbox (for apport-retrace).
00511 
00512         In order to work without any special permissions and without touching
00513         the running system, this should only download and unpack packages into
00514         the given root directory, not install them into the system.
00515 
00516         configdir points to a directory with by-release configuration files for
00517         the packaging system; this is completely dependent on the backend
00518         implementation, the only assumption is that this looks into
00519         configdir/release/, so that you can use retracing for multiple
00520         DistroReleases. As a special case, if configdir is None, it uses the
00521         current system configuration, and "release" is ignored.
00522 
00523         release is the value of the report's 'DistroRelease' field.
00524 
00525         packages is a list of ('packagename', 'version') tuples. If the version
00526         is None, it should install the most current available version.
00527 
00528         If cache_dir is given, then the downloaded packages will be stored
00529         there, to speed up subsequent retraces.
00530 
00531         If permanent_rootdir is True, then the sandbox created from the
00532         downloaded packages will be reused, to speed up subsequent retraces.
00533 
00534         Return a string with outdated packages, or None if all packages were
00535         installed.
00536 
00537         If something is wrong with the environment (invalid configuration,
00538         package servers down, etc.), this should raise a SystemError with a
00539         meaningful error message.
00540         '''
00541         if not configdir:
00542             apt_sources = '/etc/apt/sources.list'
00543         else:
00544             apt_sources = os.path.join(configdir, release, 'sources.list')
00545         if not os.path.exists(apt_sources):
00546             raise SystemError('%s does not exist' % apt_sources)
00547 
00548         # create apt sandbox
00549         if cache_dir:
00550             tmp_aptroot = False
00551             if configdir:
00552                 aptroot = os.path.join(cache_dir, release, 'apt')
00553             else:
00554                 aptroot = os.path.join(cache_dir, 'system', 'apt')
00555             if not os.path.isdir(aptroot):
00556                 os.makedirs(aptroot)
00557         else:
00558             tmp_aptroot = True
00559             aptroot = tempfile.mkdtemp()
00560 
00561         if verbose:
00562             fetchProgress = apt.progress.text.AcquireProgress()
00563         else:
00564             fetchProgress = apt.progress.base.AcquireProgress()
00565         if not tmp_aptroot:
00566             c = self._sandbox_cache(aptroot, apt_sources, fetchProgress)
00567         else:
00568             self._build_apt_sandbox(aptroot, apt_sources)
00569             c = apt.Cache(rootdir=os.path.abspath(aptroot))
00570             try:
00571                 c.update(fetchProgress)
00572             except apt.cache.FetchFailedException as e:
00573                 raise SystemError(str(e))
00574             c.open()
00575 
00576         obsolete = ''
00577 
00578         # mark packages for installation
00579         real_pkgs = set()
00580         for (pkg, ver) in packages:
00581             try:
00582                 candidate = c[pkg].candidate
00583             except KeyError:
00584                 candidate = None
00585             if not candidate:
00586                 m = 'package %s does not exist, ignoring' % pkg
00587                 obsolete += m + '\n'
00588                 apport.warning(m)
00589                 continue
00590 
00591             if ver and candidate.version != ver:
00592                 w = '%s version %s required, but %s is available' % (pkg, ver, candidate.version)
00593                 obsolete += w + '\n'
00594             real_pkgs.add(pkg)
00595 
00596             if permanent_rootdir:
00597                 mapping_path = os.path.join(cache_dir, release)
00598                 virtual_mapping = self._virtual_mapping(mapping_path)
00599                 # Remember all the virtual packages that this package provides,
00600                 # so that if we encounter that virtual package as a
00601                 # Conflicts/Replaces later, we know to remove this package from
00602                 # the cache.
00603                 for p in candidate.provides:
00604                     virtual_mapping.setdefault(p, set()).add(pkg)
00605                 conflicts = []
00606                 if 'Conflicts' in candidate.record:
00607                     conflicts += apt.apt_pkg.parse_depends(candidate.record['Conflicts'])
00608                 if 'Replaces' in candidate.record:
00609                     conflicts += apt.apt_pkg.parse_depends(candidate.record['Replaces'])
00610                 archives = apt.apt_pkg.config.find_dir('Dir::Cache::archives')
00611                 for conflict in conflicts:
00612                     # apt_pkg.parse_depends needs to handle the or operator,
00613                     # but as policy states it is invalid to use that in
00614                     # Replaces/Depends, we can safely choose the first value
00615                     # here.
00616                     conflict = conflict[0]
00617                     if c.is_virtual_package(conflict[0]):
00618                         try:
00619                             providers = virtual_mapping[conflict[0]]
00620                         except KeyError:
00621                             # We may not have seen the virtual package that
00622                             # this conflicts with, so we can assume it's not
00623                             # unpacked into the sandbox.
00624                             continue
00625                         for p in providers:
00626                             debs = os.path.join(archives, '%s_*.deb' % p)
00627                             for path in glob.glob(debs):
00628                                 ver = self._deb_version(path)
00629                                 if apt.apt_pkg.check_dep(ver, conflict[2],
00630                                                               conflict[1]):
00631                                     os.unlink(path)
00632                         del providers
00633                     else:
00634                         debs = os.path.join(archives, '%s_*.deb' % conflict[0])
00635                         for path in glob.glob(debs):
00636                             ver = self._deb_version(path)
00637                             if apt.apt_pkg.check_dep(ver, conflict[2],
00638                                                           conflict[1]):
00639                                 os.unlink(path)
00640 
00641             if candidate.architecture != 'all':
00642                 if pkg + '-dbg' in c:
00643                     real_pkgs.add(pkg + '-dbg')
00644                 elif pkg + '-dbgsym' in c:
00645                     real_pkgs.add(pkg + '-dbgsym')
00646                     if c[pkg + '-dbgsym'].candidate.version != candidate.version:
00647                         obsolete += 'outdated debug symbol package for %s: package version %s dbgsym version %s\n' % (
00648                             pkg, candidate.version, c[pkg + '-dbgsym'].candidate.version)
00649 
00650         for p in real_pkgs:
00651             c[p].mark_install(False, False)
00652 
00653         last_written = time.time()
00654         # fetch packages
00655         fetcher = apt.apt_pkg.Acquire(fetchProgress)
00656         try:
00657             c.fetch_archives(fetcher=fetcher)
00658         except apt.cache.FetchFailedException as e:
00659             apport.error('Package download error, try again later: %s', str(e))
00660             sys.exit(99)  # transient error
00661 
00662         # unpack packages
00663         if verbose:
00664             print('Extracting downloaded debs...')
00665         for i in fetcher.items:
00666             if not permanent_rootdir or os.path.getctime(i.destfile) > last_written:
00667                 subprocess.check_call(['dpkg', '-x', i.destfile, rootdir])
00668             real_pkgs.remove(os.path.basename(i.destfile).split('_', 1)[0])
00669 
00670         if tmp_aptroot:
00671             shutil.rmtree(aptroot)
00672 
00673         # check bookkeeping that apt fetcher really got everything
00674         assert not real_pkgs, 'apt fetcher did not fetch these packages: ' \
00675             + ' '.join(real_pkgs)
00676 
00677         if permanent_rootdir:
00678             self._save_virtual_mapping(mapping_path)
00679         return obsolete
00680 
00681     def package_name_glob(self, nameglob):
00682         '''Return known package names which match given glob.'''
00683 
00684         return glob.fnmatch.filter(self._cache().keys(), nameglob)
00685 
00686     #
00687     # Internal helper methods
00688     #
00689 
00690     @classmethod
00691     def _call_dpkg(klass, args):
00692         '''Call dpkg with given arguments and return output, or return None on
00693         error.'''
00694 
00695         dpkg = subprocess.Popen(['dpkg'] + args, stdout=subprocess.PIPE,
00696                                 stderr=subprocess.PIPE)
00697         out = dpkg.communicate(input)[0].decode('UTF-8')
00698         if dpkg.returncode == 0:
00699             return out
00700         else:
00701             raise ValueError('package does not exist')
00702 
00703     def _check_files_md5(self, sumfile):
00704         '''Internal function for calling md5sum.
00705 
00706         This is separate from get_modified_files so that it is automatically
00707         testable.
00708         '''
00709         if os.path.exists(sumfile):
00710             m = subprocess.Popen(['/usr/bin/md5sum', '-c', sumfile],
00711                                  stdout=subprocess.PIPE, stderr=subprocess.PIPE,
00712                                  cwd='/', env={})
00713             out = m.communicate()[0].decode('UTF-8', errors='replace')
00714         else:
00715             assert type(sumfile) == bytes, 'md5sum list value must be a byte array'
00716             m = subprocess.Popen(['/usr/bin/md5sum', '-c'],
00717                                  stdin=subprocess.PIPE, stdout=subprocess.PIPE,
00718                                  stderr=subprocess.PIPE, cwd='/', env={})
00719             out = m.communicate(sumfile)[0].decode('UTF-8', errors='replace')
00720 
00721         # if md5sum succeeded, don't bother parsing the output
00722         if m.returncode == 0:
00723             return []
00724 
00725         mismatches = []
00726         for l in out.splitlines():
00727             if l.endswith('FAILED'):
00728                 mismatches.append(l.rsplit(':', 1)[0])
00729 
00730         return mismatches
00731 
00732     def _get_mirror(self):
00733         '''Return the distribution mirror URL.
00734 
00735         If it has not been set yet, it will be read from the system
00736         configuration.'''
00737 
00738         if not self._mirror:
00739             for l in open('/etc/apt/sources.list'):
00740                 fields = l.split()
00741                 if len(fields) >= 3 and fields[0] == 'deb' and fields[1].startswith('http://'):
00742                     self._mirror = fields[1]
00743                     break
00744             else:
00745                 raise SystemError('cannot determine default mirror: /etc/apt/sources.list does not contain a valid deb line')
00746 
00747         return self._mirror
00748 
00749     def _search_contents(self, file, map_cachedir):
00750         '''Internal function for searching file in Contents.gz.'''
00751 
00752         if map_cachedir:
00753             dir = map_cachedir
00754         else:
00755             if not self._contents_dir:
00756                 self._contents_dir = tempfile.mkdtemp()
00757             dir = self._contents_dir
00758 
00759         arch = self.get_system_architecture()
00760         map = os.path.join(dir, 'Contents-%s.gz' % arch)
00761 
00762         # check if map exists and is younger than a day; if not, we need to
00763         # refresh it
00764         try:
00765             st = os.stat(map)
00766             age = int(time.time() - st.st_mtime)
00767         except OSError:
00768             age = None
00769 
00770         if age is None or age >= 86400:
00771             # determine distro release code name
00772             lsb_release = subprocess.Popen(['lsb_release', '-sc'],
00773                                            stdout=subprocess.PIPE)
00774             release_name = lsb_release.communicate()[0].decode('UTF-8').strip()
00775             assert lsb_release.returncode == 0
00776 
00777             url = '%s/dists/%s/Contents-%s.gz' % (self._get_mirror(), release_name, arch)
00778 
00779             src = urlopen(url)
00780             with open(map, 'wb') as f:
00781                 while True:
00782                     data = src.read(1000000)
00783                     if not data:
00784                         break
00785                     f.write(data)
00786             src.close()
00787             assert os.path.exists(map)
00788 
00789         if file.startswith('/'):
00790             file = file[1:]
00791 
00792         # zgrep is magnitudes faster than a 'gzip.open/split() loop'
00793         package = None
00794         zgrep = subprocess.Popen(['zgrep', '-m1', '^%s[[:space:]]' % file, map],
00795                                  stdout=subprocess.PIPE, stderr=subprocess.PIPE)
00796         out = zgrep.communicate()[0].decode('UTF-8')
00797         # we do not check the return code, since zgrep -m1 often errors out
00798         # with 'stdout: broken pipe'
00799         if out:
00800             package = out.split()[1].split(',')[0].split('/')[-1]
00801 
00802         return package
00803 
00804     @classmethod
00805     def _build_apt_sandbox(klass, apt_root, apt_sources):
00806         # pre-create directories, to avoid apt.Cache() printing "creating..."
00807         # messages on stdout
00808         if not os.path.exists(os.path.join(apt_root, 'var', 'lib', 'apt')):
00809             os.makedirs(os.path.join(apt_root, 'var', 'lib', 'apt', 'lists', 'partial'))
00810             os.makedirs(os.path.join(apt_root, 'var', 'cache', 'apt', 'archives', 'partial'))
00811             os.makedirs(os.path.join(apt_root, 'var', 'lib', 'dpkg'))
00812             os.makedirs(os.path.join(apt_root, 'etc', 'apt', 'apt.conf.d'))
00813             os.makedirs(os.path.join(apt_root, 'etc', 'apt', 'preferences.d'))
00814 
00815         # install apt sources
00816         list_d = os.path.join(apt_root, 'etc', 'apt', 'sources.list.d')
00817         if os.path.exists(list_d):
00818             shutil.rmtree(list_d)
00819         if os.path.isdir(apt_sources + '.d'):
00820             shutil.copytree(apt_sources + '.d', list_d)
00821         else:
00822             os.makedirs(list_d)
00823         with open(apt_sources) as src:
00824             with open(os.path.join(apt_root, 'etc', 'apt', 'sources.list'), 'w') as dest:
00825                 dest.write(src.read())
00826 
00827         # install apt keyrings; prefer the ones from the config dir, fall back
00828         # to system
00829         trusted_gpg = os.path.join(os.path.dirname(apt_sources), 'trusted.gpg')
00830         if os.path.exists(trusted_gpg):
00831             shutil.copy(trusted_gpg, os.path.join(apt_root, 'etc', 'apt'))
00832         elif os.path.exists('/etc/apt/trusted.gpg'):
00833             shutil.copy('/etc/apt/trusted.gpg', os.path.join(apt_root, 'etc', 'apt'))
00834 
00835         trusted_d = os.path.join(apt_root, 'etc', 'apt', 'trusted.gpg.d')
00836         if os.path.exists(trusted_d):
00837             shutil.rmtree(trusted_d)
00838 
00839         if os.path.exists(trusted_gpg + '.d'):
00840             shutil.copytree(trusted_gpg + '.d', trusted_d)
00841         elif os.path.exists('/etc/apt/trusted.gpg.d'):
00842             shutil.copytree('/etc/apt/trusted.gpg.d', trusted_d)
00843         else:
00844             os.makedirs(trusted_d)
00845 
00846     @classmethod
00847     def _deb_version(klass, pkg):
00848         '''Return the version of a .deb file'''
00849 
00850         dpkg = subprocess.Popen(['dpkg-deb', '-f', pkg, 'Version'], stdout=subprocess.PIPE)
00851         out = dpkg.communicate(input)[0].decode('UTF-8')
00852         assert dpkg.returncode == 0
00853         assert out
00854         return out
00855 
00856     def compare_versions(self, ver1, ver2):
00857         '''Compare two package versions.
00858 
00859         Return -1 for ver < ver2, 0 for ver1 == ver2, and 1 for ver1 > ver2.'''
00860 
00861         return apt.apt_pkg.version_compare(ver1, ver2)
00862 
00863     def enabled(self):
00864         '''Return whether Apport should generate crash reports.
00865 
00866         Signal crashes are controlled by /proc/sys/kernel/core_pattern, but
00867         some init script needs to set that value based on a configuration file.
00868         This also determines whether Apport generates reports for Python,
00869         package, or kernel crashes.
00870 
00871         Implementations should parse the configuration file which controls
00872         Apport (such as /etc/default/apport in Debian/Ubuntu).
00873         '''
00874 
00875         try:
00876             with open(self.configuration) as f:
00877                 conf = f.read()
00878         except IOError:
00879             # if the file does not exist, assume it's enabled
00880             return True
00881 
00882         return re.search('^\s*enabled\s*=\s*0\s*$', conf, re.M) is None
00883 
00884 impl = __AptDpkgPackageInfo()