[Merge] lp:~cyphermox/ubuntu-archive-tools/what-next into lp:ubuntu-archive-tools

Steve Langasek steve.langasek at canonical.com
Thu Feb 22 04:38:16 UTC 2018


drive-by comments from me as well :)

Diff comments:

> === added file 'what_next.py'
> --- what_next.py	1970-01-01 00:00:00 +0000
> +++ what_next.py	2018-02-21 21:45:37 +0000
> @@ -0,0 +1,535 @@
> +#!/usr/bin/python3
> +# -*- coding: utf-8 -*-
> +
> +# Copyright (C) 2018  Canonical Ltd.
> +# Author: Mathieu Trudel-Lapierre <cyphermox at ubuntu.com>
> +
> +# This program is free software: you can redistribute it and/or modify
> +# it under the terms of the GNU General Public License as published by
> +# the Free Software Foundation; version 3 of the License.
> +#
> +# This program is distributed in the hope that it will be useful,
> +# but WITHOUT ANY WARRANTY; without even the implied warranty of
> +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
> +# GNU General Public License for more details.
> +#
> +# You should have received a copy of the GNU General Public License
> +# along with this program.  If not, see <http://www.gnu.org/licenses/>.
> +
> +"""Analyze britney's excuses output and suggest a course of action for
> +proposed migration.
> +"""
> +
> +# FIXME: Refactor this majorly
> +#
> +# FIXME: Various parts of slangasek's pseudocode (in comments where relevant)
> +#        are not well implemented.
> +#
> +# TODO: finish moving things from requests to urllib; no sense in using two
> +#       different modules for URL retrieval.
> +
> +import yaml
> +import sys
> +import os
> +import time
> +import subprocess
> +import argparse
> +import requests
> +import tempfile
> +
> +from enum import Enum
> +
> +from urllib.request import FancyURLopener
> +
> +from launchpadlib.launchpad import Launchpad
> +
> +DEBIAN_CURRENT_SERIES = 'unstable'
> +MAX_CACHE_AGE = 14400   # excuses cache should not be older than 4 hours
> +
> +lp_cachedir = os.path.expanduser(os.path.join("~", ".launchpadlib/cache"))
> +lp = Launchpad.login_anonymously('what-next', 'production', lp_cachedir, version='devel')
> +
> +ubuntu = lp.distributions["ubuntu"]
> +archive = ubuntu.main_archive
> +series = ubuntu.current_series
> +
> +excuses = {}
> +
> +level = 0
> +seen = []
> +
> +class Message(Enum):
> +
> +  NONE = 1
> +  PASS = 2
> +  FAIL = 3
> +  INFO = 4
> +
> +  @staticmethod
> +  def lprint(level, state, text, **kwargs):
> +    if state is Message.NONE:
> +      state = ''
> +    elif state is Message.PASS:
> +      state = "\033[92m✔\033[0m"
> +    elif state is Message.FAIL:
> +      state = "\033[91m✘\033[0m"
> +    elif state is Message.INFO:
> +      state = "\033[94m\033[0m"
> +
> +    if level == 0:
> +      msg = "{} {}".format(state,
> +                           text)
> +    else:
> +      msg = "{} {} {}".format(" " * level,
> +                              state,
> +                              text)

This gives every level below the first one extra character indent, which seems inconsistent.  It looks good for levels 0 and 1 because the first character is double-wide, but after that, subsequent levels basically get only a half-indent.  I think this could be condensed to:

    msg = "{}{} {}".format(" " * level * 2,
                           state,
                           text)

> +    print(msg, **kwargs)
> +
> +
> +def report_download(blocks_read, block_size, total_size):
> +    size_read = blocks_read * block_size
> +    percent = size_read/total_size*100
> +    print("%.0f %%" % (percent), end='\r')
> +
> +def get_debian_ci_results(source, arch):
> +  try:
> +    url = "https://ci.debian.net/data/packages/unstable/{}/{}/latest.json"
> +    results_url = url.format("amd64", get_pkg_archive_path(source))
> +    resp = requests.get(url=results_url)
> +    return resp.json()
> +  except:
> +    return None
> +
> +def find_excuses(src):
> +  for item in excuses['sources']:
> +    item_name = item.get('item-name')
> +    if item_name == src:
> +      process(item)
> +
> +def get_pkg_archive_path(package):
> +  try:
> +    path = subprocess.check_output("apt-cache show %s | grep Filename:" % package, shell=True)
> +    path = path.decode('utf-8')
> +    path = path.split(' ')[1].split('/')
> +    path = "/".join(path[2:4])
> +    return path
> +  except:
> +    return None
> +
> +def get_source_package(binary):
> +  source = None
> +  try:
> +    source = subprocess.check_output("apt-cache show %s | grep Source:" % binary, shell=True)
> +  except subprocess.CalledProcessError as e:
> +    source = subprocess.check_output("apt-cache show %s | grep Package:" % binary, shell=True)
> +
> +  if source is not None:
> +    if source.startswith(b"Source:") or source.startswith(b"Package:"):
> +      name = source.decode('utf-8').split()[1]
> +      return name
> +
> +  return None
> +
> +def package_exists_in_distro(package, distro='ubuntu', proposed=False):
> +
> +  distroseries = series.name
> +  if distro == 'debian':
> +    distroseries = DEBIAN_CURRENT_SERIES
> +  if proposed:
> +    distroseries += "-proposed"
> +
> +  url = "https://qa.debian.org/cgi-bin/madison.cgi?package={}&table={}&a=&c=&s={}".format(package, distro, distroseries)
> +  resp = requests.get(url=url)
> +
> +  for line in resp.text.split('\n'):
> +    if not line.startswith(" {}".format(package)):
> +      continue
> +    if distroseries in line:
> +      return True
> +  return False
> +
> +def lp_build_results(source_name, arch):
> +  global level
> +  # TODO: Process missing builds; suggest options
> +  #
> +  #  - missing build on $arch / has no binaries on any arch
> +  #    - is this an architecture-specific build failure?
> +  #    - has Debian removed the binaries for this architecture?
> +  #    - ask AA to remove the binaries as ANAIS
> +  #  - else
> +  #    - try to fix
> +  #
> +  #  - is this a build failure on all archs?
> +  #    - are there bugs filed about this failure in Debian?
> +  #      - is the package in sync with Debian and does the package require sourceful changes to fix?
> +  #        - remove from -proposed
> +  #
> +  #  - does the package fail to build in Debian?
> +  #    - file a bug in Debian
> +  #    - is the package in sync with Debian and does the package require sourceful changes to fix?
> +  #      - remove from -proposed
> +  #
> +  #  - is this a dep-wait?
> +  #    - does this package have this build-dependency in Debian?
> +  #      - is this an architecture-specific dep-wait?
> +  #        - has Debian removed the binaries for this architecture?
> +  #          - ask AA to remove the binaries as ANAIS
> +  #        - else
> +  #          - try to fix
> +  #      - does this binary package exist in Debian?
> +  #        - look what source package provides this binary package in Debian
> +  #        - is this source package ftbfs or dep-wait in -proposed?
> +  #          - recurse
> +  #        - else
> +  #          - is this source package on the sync blacklist?
> +  #            - file a bug with the Ubuntu package
> +  #          - else
> +  #            - fix by syncing or merging the source
> +  #      - else
> +  #        - make sure a bug is filed in Debian about the issue
> +  #        - was the depended-on package removed from Debian, and is this a sync?
> +  #          - ask AA to remove the package from -proposed
> +  #        - else
> +  #          - leave the package in -proposed
> +
> +  level += 1
> +
> +  release = "bionic"
> +
> +  spph = archive.getPublishedSources(exact_match=True, source_name=source_name, distro_series=series, pocket="Proposed", order_by_date=True)[0]
> +  builds = spph.getBuilds()
> +
> +  any_failure = False
> +  for build in builds:
> +    if arch is None or (arch == build.arch_tag):
> +      Message.lprint(level, Message.FAIL, "{} is missing a build on {}:".format(source_name, build.arch_tag))
> +      Message.lprint(level+1, Message.NONE, "[%s] %s" % (build.buildstate, build.build_log_url))
> +      if not "Successfully" in build.buildstate:
> +        failed = True
> +        any_failure = failed if any_failure is False else True
> +
> +  level -= 1
> +
> +  return any_failure
> +
> +def process_unsatisfiable_depends(source):
> +  global level
> +
> +  affected_sources = []
> +
> +  level += 1
> +
> +  unsatisfiable = []
> +  for excuse in source.get('excuses'):
> +    if 'unsatisfiable' in excuse:
> +      eline = excuse.split(' ')[2:]
> +      unsatisfiable.append(eline)
> +      try:
> +        pkg = get_source_package(eline[1])
> +        if pkg not in affected_sources:
> +          affected_sources.append(pkg)
> +      except:
> +        try:
> +          if package_exists_in_distro(eline[1], distro='ubuntu'):
> +            affected_sources.append(eline[1])
> +        except:
> +          if package_exists_in_distro(eline[1], distro='ubuntu', proposed=True):
> +            affected_sources.append(eline[1])
> +
> +  if len(affected_sources) < 1 and len(unsatisfiable) < 1:
> +    level -= 1
> +    return
> +
> +  Message.lprint(level, Message.NONE, "Fix unsatisfiable dependencies in {}:".format(source.get('source')))
> +
> +  # TODO: Check for component-mismatches.
> +  # - is the unsatisfied dependency due to a component mismatch?
> +  #  http://people.canonical.com/~ubuntu-archive/component-mismatches-proposed.svg
> +  #  - is there an MIR?
> +  #    - has the MIR been rejected?
> +  #      - upload or submit to sponsorship queue to drop the dependency
> +  #  - else
> +  #    - file an MIR
> +
> +  # TODO: Check version comparisons for removal requests/fixes
> +  # - is the unsatisfied dependency due to a package dropped in Ubuntu but not
> +  #   Debian, which may come back as a sync later (i.e. not blacklisted)?
> +  #   - leave in -proposed
> +  # - is this package Ubuntu-specific?
> +  #   - is there an open bug in launchpad about this issue, with no action?
> +  #     - subscribe ubuntu-archive and request the package's removal
> +  #   - else
> +  #     - open a bug report and assign to the package's maintainer
> +  # - is the package in Debian, but the dependency is part of Ubuntu delta?
> +  #   - fix
> +
> +  for depends in unsatisfiable:
> +    in_archive = False
> +    in_proposed = False
> +    in_debian = False
> +
> +    Message.lprint(level+1, Message.FAIL, "{} can not be satisfied.".format(" ".join(depends)))
> +    if package_exists_in_distro(depends[1], distro='ubuntu'):
> +      in_archive = True
> +    if package_exists_in_distro(depends[1], distro='ubuntu', proposed=True):
> +      in_proposed = True
> +    if package_exists_in_distro(depends[1], distro='debian'):
> +      in_debian = True
> +
> +    if in_archive and not in_proposed:
> +      Message.lprint(level+2, Message.FAIL, "{} exists in the Ubuntu primary archive.".format(depends[1]))
> +      Message.lprint(level+2, Message.INFO, "Is the right version available?")
> +    elif not in_archive and in_proposed:
> +      Message.lprint(level+2, Message.FAIL, "{} is only in -proposed.".format(depends[1]))
> +      Message.lprint(level+2, Message.INFO, "Has this package been dropped in Ubuntu, but not in Debian?")
> +    elif not in_archive and not in_proposed and in_debian:
> +      Message.lprint(level+2, Message.FAIL, "{} only exists in Debian.".format(depends[1]))
> +      Message.lprint(level+2, Message.INFO, "Is this package blacklisted? Should it be synced?")
> +    elif not in_archive and not in_proposed and not in_debian:
> +      Message.lprint(level+2, Message.FAIL, "{} is not found.".format(depends[1]))
> +      Message.lprint(level+2, Message.INFO, "Has this package been removed?")
> +      Message.lprint(level+2, Message.INFO, "Consider filing a bug for the removal of {}".format(source.get('source')))
> +    else:
> +      Message.lprint(level+2, Message.INFO, "+++ This case is not implemented: +++")
> +      Message.lprint(level+2, Message.INFO, "in_archive: {}".format(in_archive))
> +      Message.lprint(level+2, Message.INFO, "in_proposed: {}".format(in_proposed))
> +      Message.lprint(level+2, Message.INFO, "in_debian: {}".format(in_debian))
> +
> +  if len(affected_sources) > 0:
> +    for src in affected_sources:
> +      find_excuses(src)
> +
> +  level -= 1
> +  
> +def process_autopkgtest(source):
> +  global level
> +
> +  autopkgtests = source.get('policy_info').get('autopkgtest')
> +
> +  level += 1
> +
> +  Message.lprint(level, Message.NONE, "Fix autopkgtests triggered by this package for:")
> +
> +  for key in autopkgtests.keys():
> +    test = autopkgtests[key]
> +    for arch in test.keys():
> +      arch_test = test[arch]
> +      if 'REGRESSION' in arch_test:
> +        Message.lprint(level+1, Message.FAIL, "{} {} {}".format(key, arch, arch_test[2]))
> +        # lprint(level+2, None, arch_test[1])
> +        if arch == "amd64":
> +          if '/' in key:
> +            pkgname = key.split('/')[0]
> +          else:
> +            pkgname = key
> +          ci_results = get_debian_ci_results(pkgname, "amd64")
> +          if ci_results is not None:
> +            result = ci_results.get('status')
> +            Message.lprint(level+2, Message.PASS if result == 'pass' else Message.FAIL, "CI tests {} in Debian".format(result))
> +            if result != 'pass':
> +              Message.lprint(level+3, Message.INFO, "Consider filing a bug (usertag: autopkgtest) in Debian if none exist.")
> +            else:
> +              # TODO: (cyphermox) detect this case? check versions?
> +              Message.lprint(level+3, Message.INFO, "If synced from Debian and requires sourceful changes to the package, "
> +                                    " file a bug for removal from -proposed.")
> +
> +  level -= 1
> +
> +
> +def process_blocking(source):
> +  global level
> +
> +  bugs = source.get('policy_info').get('block-bugs')
> +
> +  level += 1
> +
> +  Message.lprint(level, Message.NONE, "Resolve blocking bugs:")
> +
> +  for bug in bugs.keys():
> +    lp_bug = lp.bugs[bug]
> +    Message.lprint(level+1, Message.NONE, "[LP: #{}] {} {}".format(lp_bug.id, lp_bug.title, lp_bug.web_link))
> +    tasks = lp_bug.bug_tasks
> +    for task in tasks:
> +      ok = Message.FAIL
> +      if 'Fix Released' in task.status or 'Fix Committed' in task.status:
> +        ok = Message.PASS
> +      elif 'Invalid' in task.status or "Won't Fix" in task.status:
> +        continue
> +      Message.lprint(level+2, ok, "{}({}) in {}".format(task.status, task.importance, task.bug_target_display_name))
> +
> +    # guesstimate whether this is a removal request
> +    if 'emove {}'.format(source.get('source')) in lp_bug.title:
> +      Message.lprint(level+2, Message.INFO, "This looks like a removal request. Consider pinging #ubuntu-release for processing.")
> +
> +  hints = source.get('hints', None)
> +  if hints is not None:
> +    Message.lprint(level, Message.NONE, "Update manual hinting (contact #ubuntu-release):")
> +    for excuse in source.get('excuses'):
> +      if 'block request' not in excuse:
> +        continue
> +      Message.lprint(level+1, Message.INFO, excuse)
> +  
> +
> +  level -= 1
> +
> +def process_dependencies(source):
> +  global level
> +
> +  dependencies = source.get('dependencies')
> +  blocked_by = dependencies.get('blocked-by', None)
> +  migrate_after = dependencies.get('migrate-after', None)
> +
> +  level += 1
> +
> +  Message.lprint(level, Message.NONE, "Clear outstanding promotion interdependencies:")
> +
> +  if migrate_after is not None:
> +    Message.lprint(level+1, Message.FAIL, "{} will migrate after {}".format(source.get('source'),
> +                                                                            ", ".join(migrate_after)))
> +    Message.lprint(level+2, Message.INFO, "Investigate what packages are conflicting, by looking at "
> +                                          "'Trying easy on autohinter' lines in update_output.txt for "
> +                                          "{}".format(source.get('source')))
> +    Message.lprint(level+2, Message.INFO, "See http://people.canonical.com/~ubuntu-archive/proposed-migration/update_output.txt")
> +
> +  if blocked_by is not None:
> +    Message.lprint(level+1, Message.FAIL, "{} is blocked by the migration of {}".format(source.get('source'),
> +                                                                                        ", ".join(blocked_by)))
> +    level += 2
> +    for blocker in blocked_by:
> +      if blocker in seen:
> +        continue
> +      find_excuses(blocker)
> +    level -= 2
> +
> +  level -= 1
> +
> +def process_missing_builds(source):
> +  global level
> +  source_name = source.get('source')
> +  missing_builds = source.get('missing-builds', None)
> +  reasons = source.get('reason')
> +
> +  level += 1
> +
> +  Message.lprint(level, Message.NONE, "Fix missing builds:")
> +
> +  # TODO: handle different missing binaries per arch
> +  # TODO: have britney output this in a more easily parseable way
> +  excuses = source.get('excuses')
> +  missing_binaries = []
> +  for excuse in excuses:
> +    if 'missing build' not in excuse:
> +      continue
> +
> +    binaries = excuse.split(' ')[6:-4]
> +    for binary in binaries:
> +      if binary not in missing_binaries:
> +        missing_binaries.append(binary)
> +
> +  any_failure = False
> +  if missing_builds is not None:
> +    for arch in missing_builds.get('on-architectures'):
> +      failed = lp_build_results(source_name, arch)
> +      any_failure = failed if any_failure is False else True
> +
> +    if any_failure is False:
> +      Message.lprint(level+1, Message.INFO, "No failed builds found.")
> +      Message.lprint(level+1, Message.INFO, "{} may have dropped a binary package.".format(source_name))
> +      Message.lprint(level+1, Message.INFO, "Consider requesting the removal of {} "
> +                                            "on #ubuntu-release.".format(", ".join(missing_binaries)))

$ ./what_next.py khmer
✘ khmer is not considered.
 Next steps for khmer 2.1.2+dfsg-3: []
   Fix missing builds:
     No failed builds found.
     khmer may have dropped a binary package.
     Consider requesting the removal of khmer,, liboxli-dev,, liboxli1 on #ubuntu-release.

The recommendation is wrong because the package is built on amd64, arm64 archs, but ppc64el has been dropped by the Debian maintainer.  It should be possible to automatically detect that there are no build records for this package on the architecture for which binaries are missing, and make a different recommendation.

Also, it would be great to give the recommendation in the precise syntax needed by an archive admin; in this case: 'remove-package -a ppc64el khmer liboxli-dev liboxli1 -m ANAIS' (but for other cases, should be -m NBS)

Also, note the doubling of commas.  Either this should be " ".join(missing_binaries), or the commas should be removed elsewhere when splitting the binary package names.

> +
> +  elif 'no-binaries' in reasons:
> +    any_failure = lp_build_results(source_name, None)
> +
> +  level -= 1
> + 
> +def process(source):
> +  global level
> +  global seen
> +
> +  source_name = source.get('source')
> +  reasons = source.get('reason')
> +
> +  seen.append(source_name)
> +
> +  candidate = Message.PASS if source.get('is-candidate') else Message.FAIL
> +  Message.lprint(level, candidate, "{} is {}.".format(source_name,
> +                                                      "a valid candidate" if source.get('is-candidate')
> +                                                        else "not considered"))
> +
> +  Message.lprint(level, Message.NONE, "Next steps for {} {}: {}".format(source_name, source.get('new-version'), reasons))
> +
> +  work_needed = False
> +
> +  missing_builds = source.get('missing-builds', None)
> +  if missing_builds is not None or 'no-binaries' in reasons:
> +    work_needed = True
> +    process_missing_builds(source)
> +
> +  if 'depends' in reasons:
> +    work_needed = True
> +    process_unsatisfiable_depends(source)
> +
> +  if 'block' in reasons:
> +    work_needed = True
> +    process_blocking(source)
> +
> +  if 'autopkgtest' in reasons:
> +    work_needed = True
> +    process_autopkgtest(source)
> +
> +  dependencies = source.get('dependencies', None)
> +  if dependencies is not None:
> +    work_needed = True
> +    process_dependencies(source)
> +
> +  if work_needed is False:
> +    Message.lprint(level+1, Message.PASS, "Well job!")
> +    Message.lprint(level+2, Message.INFO, "Investigate if packages are conflicting, by looking at "
> +                                          "'Trying easy on autohinter' lines in update_output.txt for "
> +                                          "{}".format(source.get('source')))
> +    Message.lprint(level+2, Message.INFO, "See http://people.canonical.com/~ubuntu-archive/proposed-migration/update_output.txt")
> +
> +
> +parser = argparse.ArgumentParser(description='Evaluate next steps for proposed migration')
> +parser.add_argument('source',
> +                    help='the package to evaluate')
> +parser.add_argument('--no-cache', dest='do_not_cache', action='store_const',
> +                    const=True, default=False,
> +                    help='Do not cache excuses')
> +parser.add_argument('--refresh', action='store_const',
> +                    const=True, default=False,
> +                    help='Force refresh of cached excuses')
> +
> +args = parser.parse_args()
> +
> +refresh_due = False
> +excuses_path = os.path.expanduser(os.path.join('~', '.excuses.yaml'))
> +if args.do_not_cache:
> +  fp = tempfile.NamedTemporaryFile()
> +else:
> +  try:
> +    fp = open(excuses_path, 'r')
> +  except FileNotFoundError:
> +    refresh_due = True
> +    pass
> +  finally:
> +    fp = open(excuses_path, 'a+')
> +
> +  file_state = os.stat(excuses_path)
> +  mtime = file_state.st_mtime
> +  now = time.time()
> +  if (now - mtime) > MAX_CACHE_AGE:
> +    refresh_due = True
> +
> +with fp:
> +  if args.refresh or refresh_due:
> +    url_opener = FancyURLopener()
> +    excuses_data = url_opener.retrieve('http://people.canonical.com/~ubuntu-archive/proposed-migration/update_excuses.yaml',
> +                                       fp.name,
> +                                       report_download)
> +  fp.seek(0)
> +
> +  excuses = yaml.load(fp)
> +  find_excuses(args.source)
> +


-- 
https://code.launchpad.net/~cyphermox/ubuntu-archive-tools/what-next/+merge/338472
Your team Ubuntu Package Archive Administrators is requested to review the proposed merge of lp:~cyphermox/ubuntu-archive-tools/what-next into lp:ubuntu-archive-tools.



More information about the ubuntu-archive mailing list