[kteam-tools PATCH] duplicate-bugs: find duplicates given two cycles

Kleber Souza kleber.souza at canonical.com
Thu Nov 2 13:58:04 UTC 2017


On 11/02/17 12:34, Thadeu Lima de Souza Cascardo wrote:
> Given two cycles and a series, find bugs that match them, and mark the
> bugs from the old cycle as duplicate of those from the new cycle.
> 
> Signed-off-by: Thadeu Lima de Souza Cascardo <cascardo at canonical.com>
> ---
>  stable/duplicate-bugs | 206 ++++++++++++++++++++++++++++++++++++++++++++++++++
>  1 file changed, 206 insertions(+)
>  create mode 100755 stable/duplicate-bugs
> 
> diff --git a/stable/duplicate-bugs b/stable/duplicate-bugs
> new file mode 100755
> index 00000000..fc15e1ef
> --- /dev/null
> +++ b/stable/duplicate-bugs
> @@ -0,0 +1,206 @@
> +#!/usr/bin/env python3
> +#
> +import sys
> +import os
> +
> +sys.path.append(os.path.realpath(os.path.join(os.path.dirname(__file__), '..', 'py3')))
> +
> +
> +from datetime                           import datetime, timedelta
> +from argparse                           import ArgumentParser, RawDescriptionHelpFormatter
> +from logging                            import basicConfig, DEBUG, INFO, WARNING
> +from ktl.log                            import center, cleave, cdebug, cinfo, Clog
> +from ktl.launchpad                      import Launchpad
> +from ktl.ubuntu                         import Ubuntu
> +
> +# AppError
> +#
> +# A general exception that can be raised when an error is encountered in the app.
> +#
> +class AppError(Exception):
> +    # __init__
> +    #
> +    def __init__(self, error=''):
> +        self.msg = error
> +
> +# Tracking
> +#
> +class Tracking():
> +    '''
> +    '''
> +    # __init__
> +    #
> +    def __init__(s, args):
> +        s.args = args
> +        s.launchpad = Launchpad('start-sru-cycle').service
> +        s.project_tracked = 'kernel-sru-workflow'
> +        ubuntu  = Ubuntu()
> +        series = ubuntu.supported_series_version
> +        series.append(ubuntu.development_series_version)
> +        s.series = []
> +        for ss in sorted(series):
> +            s.series.append(ubuntu.db[ss]['name'])
> +
> +    def get_master(s, project, cycle, series):
> +        '''
> +        Return the bug id of the master bug
> +        '''
> +        center(s.__class__.__name__ + '.get_master')
> +
> +        retval = None
> +
> +        cdebug('project: %s' % project)
> +        cycle = 'kernel-sru-cycle-' + cycle
> +        search_tags            = [cycle, series, 'kernel-sru-master-kernel']
> +        search_tags_combinator = "All"
> +        # A list of the bug statuses that we care about
> +        #
> +        search_status          = ["New", "In Progress", "Incomplete", "Fix Committed", "Invalid"]
> +        # The tracking bugs that we are interested in should have been created recently (days).
> +        #
> +        search_since           = datetime.utcnow() - timedelta(days=30)
> +        lp_project = s.launchpad.projects[project]
> +        tasks = lp_project.searchTasks(status=search_status, tags=search_tags, tags_combinator=search_tags_combinator, modified_since=search_since, omit_duplicates=False)
> +
> +        if len(tasks) == 1:
> +            retval = tasks[0].bug.id
> +
> +        cleave(s.__class__.__name__ + '.get_master')
> +        return retval
> +
> +    def get_derivatives(s, project, bugid):
> +        '''
> +        Return the list of bug ids that are derivatives or backports of bugid
> +        '''
> +        center(s.__class__.__name__ + '.get_master')
> +
> +        retval = []
> +
> +        cdebug('project: %s' % project)
> +        search_tags            = ["kernel-sru-derivative-of-" + str(bugid), "kernel-sru-backport-of-" + str(bugid)]
> +        search_tags_combinator = "Any"
> +        # A list of the bug statuses that we care about
> +        #
> +        search_status          = ["New", "In Progress", "Incomplete", "Fix Committed", "Invalid"]
> +        # The tracking bugs that we are interested in should have been created recently (days).
> +        #
> +        search_since           = datetime.utcnow() - timedelta(days=30)
> +        lp_project = s.launchpad.projects[project]
> +        tasks = lp_project.searchTasks(status=search_status, tags=search_tags, tags_combinator=search_tags_combinator, modified_since=search_since, omit_duplicates=False)
> +
> +        for task in tasks:
> +            retval.append(task.bug.id)
> +
> +        cleave(s.__class__.__name__ + '.get_master')
> +        return retval
> +
> +    def get_bugs(s, project, cycle, series):
> +        master = s.get_master(project, cycle, series)
> +        if not master:
> +            return []
> +        bugs = s.get_derivatives(s.project_tracked, master)
> +        bugs.append(master)
> +        return bugs
> +
> +    def get_series(s, lpbug):
> +        '''
> +            Get series for a given bug
> +        '''
> +        for series in s.series:
> +            if series in lpbug.tags:
> +                return series
> +        return None
> +
> +    # main
> +    #
> +    def main(s):
> +        retval = 1
> +        try:
> +            previous_cycle = s.get_bugs(s.project_tracked, s.args.sru_cycle, s.args.series)
> +            next_cycle = s.get_bugs(s.project_tracked, s.args.next_cycle, s.args.series)
> +
> +            previous_bugs = {}
> +            next_bugs = {}
> +            for bug in previous_cycle:
> +                lpbug = s.launchpad.bugs[bug]
> +                package = lpbug.title.split(":")[0]
> +                series = s.get_series(lpbug)
> +                key = package + ":" + series
> +                if previous_bugs.get(key):
> +                    raise AppError("duplicate package in previous cycle: %s" % (key))
> +                previous_bugs[key] = lpbug
> +            for bug in next_cycle:
> +                lpbug = s.launchpad.bugs[bug]
> +                package = lpbug.title.split(":")[0]
> +                series = s.get_series(lpbug)
> +                key = package + ":" + series
> +                if next_bugs.get(key):
> +                    raise AppError("duplicate package in next cycle: %s" % (key))
> +                next_bugs[key] = lpbug
> +
> +            for package in next_bugs:
> +                if not previous_bugs.get(package):
> +                    raise AppError("could not find package %s in previous cycle" % (package))
> +            for package in previous_bugs:
> +                if not next_bugs.get(package):
> +                    raise AppError("could not find package %s in next cycle" % (package))

Would it make sense to just show a warning message here instead of
aborting the script? We should have a 1x1 relationship between the old
and the new tracking bugs, but if for some reason that's the not the
case we can still use the script to duplicate automatically most of the
bugs.

> +
> +            for pkg in previous_bugs:
> +                bug = next_bugs[pkg]
> +                prev = previous_bugs[pkg]
> +                prev.duplicate_of = bug
> +                prev.lp_save()
> +                print("Marked #{} as duplicate of #{}: {}".format(prev.id, bug.id, bug.title))
> +
> +            retval = 0
> +
> +        except AppError as e:
> +            print("ERROR: " + str(e), file=sys.stderr)
> +
> +        # Handle the user presses <ctrl-C>.
> +        #
> +        except KeyboardInterrupt:
> +            print("Aborting ...")
> +
> +        if retval > 0:
> +            print("")
> +            print("Due to the above error(s), this script is unable to continue and is terminating.")
> +            print("")
> +
> +        return retval
> +
> +if __name__ == '__main__':
> +    app_description = '''
> +    '''
> +
> +    app_epilog = '''
> +    '''
> +
> +    parser = ArgumentParser(description=app_description, epilog=app_epilog, formatter_class=RawDescriptionHelpFormatter)
> +    parser.add_argument('--info',  action='store_true', default=False, help='')
> +    parser.add_argument('--debug', action='store_true', default=False, help='')
> +    parser.add_argument('--dry-run', action='store_true', default=False, help='')
> +    parser.add_argument('--re-run', action='store_true', default=False, help='')

The two options above seems to be not used anywhere. '--dry-run' would
be easy to implement though, and quite useful :-).

> +    parser.add_argument('--sru-cycle', action='store', required=True, help='')
> +    parser.add_argument('--next-cycle', action='store', required=True, help='')
> +    parser.add_argument('--series', action='store', required=True, help="")
> +    args = parser.parse_args()
> +
> +    # If logging parameters were set on the command line, handle them
> +    # here.
> +    #
> +    Clog.color = True
> +    if args.debug:
> +        log_format = "%(levelname)s - %(message)s"
> +        basicConfig(level=DEBUG, format=log_format)
> +        Clog.dbg = True
> +    elif args.info:
> +        log_format = "%(message)s"
> +        basicConfig(level=INFO, format=log_format)
> +    else:
> +        log_format = "%(message)s"
> +        basicConfig(level=WARNING, format=log_format)
> +
> +    exit(Tracking(args).main())
> +
> +# vi:set ts=4 sw=4 expandtab syntax=python:
> 




More information about the kernel-team mailing list