[Merge] lp:~mwhudson/ubuntu-archive-scripts/find-rcbuggy-problem-packages into lp:ubuntu-archive-scripts
Michael Hudson-Doyle
mwhudsonlp at fastmail.fm
Mon Sep 2 02:32:11 UTC 2019
The output looks like this fwiw: https://paste.ubuntu.com/p/6NC4VPMJTw/
Diff comments:
> === added file 'find-rcbuggy-problem-packages'
> --- find-rcbuggy-problem-packages 1970-01-01 00:00:00 +0000
> +++ find-rcbuggy-problem-packages 2019-09-02 02:30:25 +0000
> @@ -0,0 +1,195 @@
> +#!/usr/bin/python3
> +
> +# This script highlights packages that are being kept out of Debian
> +# testing by release critical bugs whose removal from the Ubuntu devel
> +# series would aid the migration of other packages out of proposed.
> +#
> +# The packages that are being kept out of Debian testing by release
> +# critical bugs can be found fairly easily by looking at the output
> +# from Debian's britney runs. We do this first (and call these
> +# "rc-gone packages").
> +#
> +# Such packages can inhibit the migration of other packages in two
> +# ways:
> +#
> +# 1) autopkgtest regressions
> +# 2) by becoming uninstallable
> +#
> +# The first is fairly easy to find: scan through Ubuntu's excuses.yaml
> +# and look for the rc-gone package in the list of autopkgtest
> +# regressions for any package.
> +#
> +# The second is a bit more mind-bending to detect. If the package is
> +# caught up in a transtiton, a rebuild will be attempted as a matter
> +# of course, and if this succeeds and passes its autopkgtests then
> +# removing the package will not aid proposed migration. So we look for
> +# rc-gone packages in proposed that are missing builds or failing
> +# autopkgtests. For such source packages, we see if any of their
> +# binary packages are reported as being made uninstallable by the
> +# migration of any source package in proposed. If so, removing the
> +# rc-gone package will help proposed migration.
> +
> +import argparse
> +import glob
> +import os
> +import shlex
> +import sys
> +import subprocess
> +
> +import attr
> +
> +import yaml
> +
> +def parse_args():
> + parser = argparse.ArgumentParser()
> + parser.add_argument('--ubuntu-excuses', action='store')
> + parser.add_argument('--ubuntu-update_output', action='store')
> + parser.add_argument('--debian-excuses', action='store')
> + return parser.parse_args()
> +
> +args = parse_args()
> +
> +def run_output(*cmd, **extra):
> + kw = dict(check=True, encoding='ascii', stdout=subprocess.PIPE)
> + kw.update(extra)
> + cp = subprocess.run(cmd, **kw)
> + return cp.stdout.strip()
> +
> +def extract_uninstallable_to_src_pkg(output_fp):
> + # Extract a mapping from binary package name to the set of source
> + # packages whose migration from proposed makes that package
> + # uninstallable.
> +
> + # We're looking for sequences of lines like this:
> +
> + # skipped: camlp5 (0, 3, 57)
> + # got: 13+0: a-1:a-0:a-0:i-6:p-0:s-6
> + # * s390x: hol-light, libaac-tactics-ocaml-dev, libcoq-ocaml-dev, libledit-ocaml-dev, ocaml-ulex08
> +
> + # (Britney tries to migrate batches of packages but it always
> + # tries each package on its own as well).
> +
> + r = {}
> + srcpkg = None
> + for line in output_fp:
> + parts = line.split()
> + if len(parts) >= 2:
> + if parts[0] == 'skipped:':
> + srcpkg = None
> + # If parts[2] is '(' then this line is about trying to
> + # migrate a single package, which is what we are
> + # looking for.
> + if parts[2].startswith('('):
> + srcpkg = parts[1]
> + if srcpkg is not None and parts[0] == '*':
> + # parts[1] is "${arch}:"
> + # parts[2:] is a comma+space separated list of binary package names.
> + for binpkg in parts[2:]:
> + binpkg = binpkg.strip(',')
> + r.setdefault(binpkg, set()).add(srcpkg)
> + return r
> +
> +with open(args.ubuntu_excuses) as fp:
> + ubuntu_excuses = yaml.load(fp, Loader=yaml.CSafeLoader)
> +with open(args.ubuntu_update_output) as fp:
> + uninstallable_to_src_pkg = extract_uninstallable_to_src_pkg(fp)
> +with open(args.debian_excuses) as fp:
> + debian_excuses = yaml.load(fp, Loader=yaml.CSafeLoader)
> +
> + at attr.s
> +class RCGone:
> + source_package_name = attr.ib()
> + bugs = attr.ib()
> + block_by_regression = attr.ib(default=attr.Factory(set))
> + block_by_uninstallability = attr.ib(default=attr.Factory(list))
> + suites = attr.ib(default=attr.Factory(set))
> +
> +rc_gones = {}
> +
> +for source in debian_excuses['sources']:
> + if source['old-version'] == '-':
> + info = source['policy_info']
> + rc_bugs = info.get('rc-bugs', {})
> + if rc_bugs.get('verdict') == "REJECTED_PERMANENTLY":
> + bugs = []
> + for k in 'shared-bugs', 'unique-source-bugs', 'unique-target-bugs':
> + bugs.extend(rc_bugs[k])
> + rc_gones[source['item-name']] = RCGone(
> + source_package_name=source['item-name'],
> + bugs=bugs)
> +
> +in_proposed_by_autopkgtest_or_missing_binaries = set()
> +
> +if 'SERIES' in os.environ:
> + series = os.environ['SERIES']
> +else:
> + series = run_output('distro-info', '-d')
> +
> +for source in ubuntu_excuses['sources']:
> + item = source['item-name']
> + if 'autopkgtest' in source['reason']:
> + in_proposed_by_autopkgtest_or_missing_binaries.add(item)
> + for package, results in sorted(source['policy_info']['autopkgtest'].items()):
> + package = package.split('/')[0]
> + if package not in rc_gones:
> + continue
> + for arch, result in sorted(results.items()):
> + outcome, log, history, wtf1, wtf2 = result
> + if outcome == "REGRESSION":
> + rc_gones[package].block_by_regression.add(item)
> + break
> + if 'missing-builds' in source:
> + in_proposed_by_autopkgtest_or_missing_binaries.add(item)
> + if item in rc_gones:
> + if source['new-version'] != '-':
> + rc_gones[item].suites.add(series+"-proposed")
> + if source['old-version'] != '-':
> + rc_gones[item].suites.add(series)
> +
> +for pkg in rc_gones:
> + if pkg not in in_proposed_by_autopkgtest_or_missing_binaries:
> + continue
> + bin_pkgs = run_output("chdist", "grep-dctrl-packages", series, "-SwnsPackage", pkg, check=False)
Using chdist grep-dctrl-packages here is a touch gross I guess but I couldn't very quickly see how to do this with the python apt packages :/
> + for bin_pkg in set(bin_pkgs.splitlines()):
> + rc_gones[pkg].block_by_uninstallability.extend(uninstallable_to_src_pkg.get(bin_pkg, []))
> +
> +for _, rc_gone in sorted(rc_gones.items()):
> + if not rc_gone.block_by_regression and not rc_gone.block_by_uninstallability:
> + continue
> +
> + comment = "removed from testing (Debian "
> + if len(rc_gone.bugs) == 1:
> + comment += "bug #" + str(rc_gone.bugs[0])
> + else:
> + comment += "bugs " + ', '.join('#' + str(bug) for bug in rc_gone.bugs)
> + comment += ")"
> + if rc_gone.block_by_regression:
> + comment += ', blocks {} by regression'.format(', '.join(sorted(rc_gone.block_by_regression)))
> + if rc_gone.block_by_uninstallability:
> + comment += ', blocks {} by uninstallability'.format(', '.join(sorted(rc_gone.block_by_uninstallability)))
> +
> + rdeps = run_output("reverse-depends", 'src:' + rc_gone.source_package_name)
> + rbdeps = run_output("reverse-depends", '-b', 'src:' + rc_gone.source_package_name)
> +
> + if rdeps == 'No reverse dependencies found':
> + rdeps = ''
> + if rbdeps == 'No reverse dependencies found':
> + rbdeps = ''
> +
> + if rdeps or rbdeps:
> + print(rc_gone.source_package_name, 'has rdeps:')
> + if rdeps and rbdeps:
> + rdeps_lines = (rdeps + '\n' + rbdeps).splitlines()
> + elif rdeps:
> + rdeps_lines = rdeps.splitlines()
> + else:
> + rdeps_lines = rbdeps.splitlines()
> + if len(rdeps_lines) > 10:
> + rdeps_lines = rdeps_lines[:10] + ['... ({} more lines)'.format(len(rdeps_lines)-10)]
> + print('\n'.join(rdeps_lines))
> + prefix = '#'
> + else:
> + prefix = ''
> + for suite in sorted(rc_gone.suites):
> + print(prefix + "remove-package", "-s", suite, "-m", shlex.quote(comment), rc_gone.source_package_name)
> + print()
--
https://code.launchpad.net/~mwhudson/ubuntu-archive-scripts/find-rcbuggy-problem-packages/+merge/372119
Your team Ubuntu Package Archive Administrators is requested to review the proposed merge of lp:~mwhudson/ubuntu-archive-scripts/find-rcbuggy-problem-packages into lp:ubuntu-archive-scripts.
More information about the ubuntu-archive
mailing list