Rev 3206: Remove multiparent plugin in file:///home/pqm/archives/thelove/bzr/%2Btrunk/

Canonical.com Patch Queue Manager pqm at pqm.ubuntu.com
Wed Jan 30 07:47:02 GMT 2008


At file:///home/pqm/archives/thelove/bzr/%2Btrunk/

------------------------------------------------------------
revno: 3206
revision-id:pqm at pqm.ubuntu.com-20080130074652-2t36df3w0l3jhwyf
parent: pqm at pqm.ubuntu.com-20080130061143-ek1afpxqoq89jbp6
parent: aaron at aaronbentley.com-20080130042333-38mz446p5c0ve04a
committer: Canonical.com Patch Queue Manager <pqm at pqm.ubuntu.com>
branch nick: +trunk
timestamp: Wed 2008-01-30 07:46:52 +0000
message:
  Remove multiparent plugin
removed:
  bzrlib/plugins/multiparent.py  mpregen-20070411063203-5x9z7i73add0d6f6-1
    ------------------------------------------------------------
    revno: 3204.2.1
    revision-id:aaron at aaronbentley.com-20080130042333-38mz446p5c0ve04a
    parent: pqm at pqm.ubuntu.com-20080129045844-u41ywp910i6jv8bz
    committer: Aaron Bentley <aaron at aaronbentley.com>
    branch nick: bzr.mpplugin
    timestamp: Tue 2008-01-29 23:23:33 -0500
    message:
      Remove multiparent plugin
    removed:
      bzrlib/plugins/multiparent.py  mpregen-20070411063203-5x9z7i73add0d6f6-1
=== removed file 'bzrlib/plugins/multiparent.py'
--- a/bzrlib/plugins/multiparent.py	2007-10-16 16:02:01 +0000
+++ b/bzrlib/plugins/multiparent.py	1970-01-01 00:00:00 +0000
@@ -1,177 +0,0 @@
-"""Implementation of multiparent diffs for versionedfile-like storage
-
-Provides mp-regen and mp-extract commands.
-Focus is on comparing size/performance to knits.
-"""
-
-from bzrlib.lazy_import import lazy_import
-
-lazy_import(globals(), """
-import (
-        errno,
-        os,
-        sys,
-        time,
-        )
-
-from bzrlib import (
-    commands,
-    urlutils
-    )
-from bzrlib.workingtree import WorkingTree
-from bzrlib.tests import TestUtil
-
-from bzrlib.plugins.multiparent.multiparent import (
-    MultiVersionedFile,
-    MultiMemoryVersionedFile,
-    )
-""")
-
-class cmd_mp_regen(commands.Command):
-    """Generate a multiparent versionedfile"""
-
-    hidden = True
-
-    takes_args = ['file?']
-
-    takes_options = [commands.Option('sync-snapshots',
-                                     help='Snapshots follow source.'),
-                     commands.Option('snapshot-interval', type=int,
-                                     help='Take snapshots every x revisions.'),
-                     commands.Option('outfile', type=unicode,
-                                     help='Write pseudo-knit to this file.'),
-                     commands.Option('memory', help='Use memory, not disk.'),
-                     commands.Option('extract', help='Test extract time.'),
-                     commands.Option('single', help='Use a single parent.'),
-                     commands.Option('verify', help='Verify added texts.'),
-                     commands.Option('cache', help='Aggresively cache.'),
-                     commands.Option('size', help='Aggressive size.'),
-                     commands.Option('build', help='Aggressive build.'),
-                    ]
-    hidden = True
-
-    def run(self, file=None, sync_snapshots=False, snapshot_interval=26,
-            lsprof_timed=False, dump=False, extract=False, single=False,
-            verify=False, outfile=None, memory=False, cache=False,
-            size=False, build=False):
-        file_weave = get_file_weave(file)
-        url = file_weave.transport.abspath(file_weave.filename)
-        sys.stderr.write('Importing: %s\n' % \
-            urlutils.local_path_from_url(url))
-        if sync_snapshots:
-            sys.stderr.write('Snapshots follow input\n')
-        else:
-            sys.stderr.write('Snapshot interval: %d\n' % snapshot_interval)
-        if not memory:
-            if outfile is None:
-                filename = 'pknit'
-            else:
-                filename = outfile
-            vf = MultiVersionedFile(filename, snapshot_interval)
-        else:
-            vf = MultiMemoryVersionedFile(snapshot_interval)
-        vf.destroy()
-        old_snapshots = set(r for r in file_weave.versions() if
-                        file_weave._index.get_method(r) == 'fulltext')
-        if sync_snapshots:
-            to_sync = old_snapshots
-        elif size or build:
-            assert memory
-            to_sync = set()
-        else:
-            to_sync = vf.select_snapshots(file_weave)
-        sys.stderr.write("%d fulltext(s)\n" % len(old_snapshots))
-        sys.stderr.write("%d planned snapshots\n" % len(to_sync))
-
-        try:
-            vf.import_versionedfile(file_weave, to_sync, single_parent=single,
-                                    verify=verify, no_cache=not cache)
-            if size:
-                snapshots = vf.select_by_size(len(old_snapshots))
-                for version_id in snapshots:
-                    vf.make_snapshot(version_id)
-            if build:
-                ranking = vf.get_build_ranking()
-                snapshots = ranking[:len(old_snapshots) -\
-                    len(vf._snapshots)]
-                for version_id in snapshots:
-                    vf.make_snapshot(version_id)
-        except:
-            vf.destroy()
-            raise
-        try:
-            sys.stderr.write("%d actual snapshots\n" % len(vf._snapshots))
-            if not cache:
-                vf.clear_cache()
-            if memory:
-                if outfile is not None:
-                    vf_file = MultiVersionedFile(outfile)
-                vf_file.import_diffs(vf)
-            else:
-                vf_file = vf
-        finally:
-            if outfile is None:
-                vf.destroy()
-            else:
-                vf_file.save()
-
-class cmd_mp_extract(commands.Command):
-    """Test extraction time multiparent knits"""
-
-    hidden = True
-
-    takes_options = [
-        commands.Option('lsprof-timed', help='Use lsprof.'),
-        commands.Option('parallel', help='Extract multiple versions at once.'),
-        commands.Option('count', help='Number of cycles to do.', type=int),
-        ]
-
-    takes_args = ['filename', 'vfile?']
-
-    def run(self, filename, vfile=None, lsprof_timed=False, count=1000,
-            parallel=False):
-        vf = MultiVersionedFile(filename)
-        vf.load()
-        snapshots = [r for r in vf.versions() if vf.get_diff(r).is_snapshot()]
-        print '%d snapshots' % len(snapshots)
-        revisions = list(vf.versions())
-        revisions = revisions[-count:]
-        print 'Testing extract time of %d revisions' % len(revisions)
-        if parallel:
-            revisions_list = [revisions]
-        else:
-            revisions_list = [[r] for r in revisions]
-        start = time.clock()
-        for revisions in revisions_list:
-            vf = MultiVersionedFile(filename)
-            vf.load()
-            vf.get_line_list(revisions)
-        sys.stderr.write(time.clock() - start)
-        sys.stderr.write('\n')
-        if lsprof_timed:
-            from bzrlib.lsprof import profile
-            vf.clear_cache()
-            ret, stats = profile(vf.get_line_list, revisions_list[-1][-1])
-            stats.sort()
-            stats.pprint()
-        start = time.clock()
-        for revisions in revisions_list:
-            file_weave = get_file_weave(vfile)
-            file_weave.get_line_list(revisions)
-        sys.stderr.write(time.clock() - start)
-        sys.stderr.write('\n')
-
-
-def get_file_weave(filename=None, wt=None):
-    if filename is None:
-        wt, path = WorkingTree.open_containing('.')
-        return wt.branch.repository.get_inventory_weave()
-    else:
-        wt, path = WorkingTree.open_containing(filename)
-        file_id = wt.path2id(path)
-        bt = wt.branch.repository.revision_tree(wt.last_revision())
-        return bt.get_weave(file_id)
-
-
-commands.register_command(cmd_mp_regen)
-commands.register_command(cmd_mp_extract)




More information about the bazaar-commits mailing list