Rev 5772: (jelmer) Make GroupCHKStreamSource independent of KnitPackStreamSource. in file:///home/pqm/archives/thelove/bzr/%2Btrunk/

Canonical.com Patch Queue Manager pqm at pqm.ubuntu.com
Fri Apr 8 13:38:05 UTC 2011


At file:///home/pqm/archives/thelove/bzr/%2Btrunk/

------------------------------------------------------------
revno: 5772 [merge]
revision-id: pqm at pqm.ubuntu.com-20110408133740-znydrnxzi0ucm2ep
parent: pqm at pqm.ubuntu.com-20110408120454-og821s42l3dkaei2
parent: jelmer at samba.org-20110405173910-jhed3krf8k25jmdz
committer: Canonical.com Patch Queue Manager <pqm at pqm.ubuntu.com>
branch nick: +trunk
timestamp: Fri 2011-04-08 13:37:40 +0000
message:
  (jelmer) Make GroupCHKStreamSource independent of KnitPackStreamSource.
   (Jelmer Vernooij)
modified:
  bzrlib/repofmt/groupcompress_repo.py repofmt.py-20080715094215-wp1qfvoo7093c8qr-1
  bzrlib/repofmt/knitpack_repo.py knitpack_repo.py-20110405143430-6p75yrk99v6pb770-1
  bzrlib/repofmt/pack_repo.py    pack_repo.py-20070813041115-gjv5ma7ktfqwsjgn-1
  bzrlib/tests/test_repository.py test_repository.py-20060131075918-65c555b881612f4d
=== modified file 'bzrlib/repofmt/groupcompress_repo.py'
--- a/bzrlib/repofmt/groupcompress_repo.py	2011-04-05 14:48:54 +0000
+++ b/bzrlib/repofmt/groupcompress_repo.py	2011-04-05 15:07:09 +0000
@@ -46,14 +46,16 @@
 from bzrlib.repofmt.pack_repo import (
     Pack,
     NewPack,
-    KnitPackRepository,
-    KnitPackStreamSource,
+    PackRepository,
     PackRootCommitBuilder,
     RepositoryPackCollection,
     RepositoryFormatPack,
     ResumedPack,
     Packer,
     )
+from bzrlib.repository import (
+    StreamSource,
+    )
 from bzrlib.static_tuple import StaticTuple
 
 
@@ -829,13 +831,13 @@
         return result
 
 
-class CHKInventoryRepository(KnitPackRepository):
-    """subclass of KnitPackRepository that uses CHK based inventories."""
+class CHKInventoryRepository(PackRepository):
+    """subclass of PackRepository that uses CHK based inventories."""
 
     def __init__(self, _format, a_bzrdir, control_files, _commit_builder_class,
         _serializer):
         """Overridden to change pack collection class."""
-        KnitPackRepository.__init__(self, _format, a_bzrdir, control_files,
+        PackRepository.__init__(self, _format, a_bzrdir, control_files,
             _commit_builder_class, _serializer)
         # and now replace everything it did :)
         index_transport = self._transport.clone('indices')
@@ -1147,7 +1149,7 @@
         return super(CHKInventoryRepository, self)._get_source(to_format)
 
 
-class GroupCHKStreamSource(KnitPackStreamSource):
+class GroupCHKStreamSource(StreamSource):
     """Used when both the source and target repo are GroupCHK repos."""
 
     def __init__(self, from_repository, to_format):
@@ -1240,6 +1242,13 @@
             self._chk_p_id_roots = None
         yield 'chk_bytes', _get_parent_id_basename_to_file_id_pages()
 
+    def _get_text_stream(self):
+        # Note: We know we don't have to handle adding root keys, because both
+        # the source and target are the identical network name.
+        text_stream = self.from_repository.texts.get_record_stream(
+                        self._text_keys, self._text_fetch_order, False)
+        return ('texts', text_stream)
+
     def get_stream(self, search):
         def wrap_and_count(pb, rc, stream):
             """Yield records from stream while showing progress."""

=== modified file 'bzrlib/repofmt/knitpack_repo.py'
--- a/bzrlib/repofmt/knitpack_repo.py	2011-04-05 14:47:30 +0000
+++ b/bzrlib/repofmt/knitpack_repo.py	2011-04-05 15:07:09 +0000
@@ -24,6 +24,10 @@
     xml6,
     xml7,
     )
+from bzrlib.knit import (
+    KnitPlainFactory,
+    KnitVersionedFiles,
+    )
 """)
 
 from bzrlib import (
@@ -35,10 +39,21 @@
     )
 from bzrlib.repofmt.pack_repo import (
     RepositoryFormatPack,
-    KnitPackRepository,
     PackCommitBuilder,
+    PackRepository,
     PackRootCommitBuilder,
     )
+from bzrlib.repository import (
+    StreamSource,
+    )
+
+
+class KnitPackRepository(PackRepository):
+
+    def _get_source(self, to_format):
+        if to_format.network_name() == self._format.network_name():
+            return KnitPackStreamSource(self, to_format)
+        return PackRepository._get_source(self, to_format)
 
 
 class RepositoryFormatKnitPack1(RepositoryFormatPack):
@@ -390,3 +405,76 @@
         """See RepositoryFormat.get_format_description()."""
         return ("Development repository format, currently the same as "
             "1.6.1-subtree with B+Tree indices.\n")
+
+
+class KnitPackStreamSource(StreamSource):
+    """A StreamSource used to transfer data between same-format KnitPack repos.
+
+    This source assumes:
+        1) Same serialization format for all objects
+        2) Same root information
+        3) XML format inventories
+        4) Atomic inserts (so we can stream inventory texts before text
+           content)
+        5) No chk_bytes
+    """
+
+    def __init__(self, from_repository, to_format):
+        super(KnitPackStreamSource, self).__init__(from_repository, to_format)
+        self._text_keys = None
+        self._text_fetch_order = 'unordered'
+
+    def _get_filtered_inv_stream(self, revision_ids):
+        from_repo = self.from_repository
+        parent_ids = from_repo._find_parent_ids_of_revisions(revision_ids)
+        parent_keys = [(p,) for p in parent_ids]
+        find_text_keys = from_repo._serializer._find_text_key_references
+        parent_text_keys = set(find_text_keys(
+            from_repo._inventory_xml_lines_for_keys(parent_keys)))
+        content_text_keys = set()
+        knit = KnitVersionedFiles(None, None)
+        factory = KnitPlainFactory()
+        def find_text_keys_from_content(record):
+            if record.storage_kind not in ('knit-delta-gz', 'knit-ft-gz'):
+                raise ValueError("Unknown content storage kind for"
+                    " inventory text: %s" % (record.storage_kind,))
+            # It's a knit record, it has a _raw_record field (even if it was
+            # reconstituted from a network stream).
+            raw_data = record._raw_record
+            # read the entire thing
+            revision_id = record.key[-1]
+            content, _ = knit._parse_record(revision_id, raw_data)
+            if record.storage_kind == 'knit-delta-gz':
+                line_iterator = factory.get_linedelta_content(content)
+            elif record.storage_kind == 'knit-ft-gz':
+                line_iterator = factory.get_fulltext_content(content)
+            content_text_keys.update(find_text_keys(
+                [(line, revision_id) for line in line_iterator]))
+        revision_keys = [(r,) for r in revision_ids]
+        def _filtered_inv_stream():
+            source_vf = from_repo.inventories
+            stream = source_vf.get_record_stream(revision_keys,
+                                                 'unordered', False)
+            for record in stream:
+                if record.storage_kind == 'absent':
+                    raise errors.NoSuchRevision(from_repo, record.key)
+                find_text_keys_from_content(record)
+                yield record
+            self._text_keys = content_text_keys - parent_text_keys
+        return ('inventories', _filtered_inv_stream())
+
+    def _get_text_stream(self):
+        # Note: We know we don't have to handle adding root keys, because both
+        # the source and target are the identical network name.
+        text_stream = self.from_repository.texts.get_record_stream(
+                        self._text_keys, self._text_fetch_order, False)
+        return ('texts', text_stream)
+
+    def get_stream(self, search):
+        revision_ids = search.get_keys()
+        for stream_info in self._fetch_revision_texts(revision_ids):
+            yield stream_info
+        self._revision_keys = [(rev_id,) for rev_id in revision_ids]
+        yield self._get_filtered_inv_stream(revision_ids)
+        yield self._get_text_stream()
+

=== modified file 'bzrlib/repofmt/pack_repo.py'
--- a/bzrlib/repofmt/pack_repo.py	2011-04-08 10:54:03 +0000
+++ b/bzrlib/repofmt/pack_repo.py	2011-04-08 13:37:40 +0000
@@ -61,7 +61,6 @@
     RepositoryFormat,
     RepositoryWriteLockResult,
     RootCommitBuilder,
-    StreamSource,
     )
 from bzrlib.trace import (
     mutter,
@@ -2208,7 +2207,7 @@
             self._resume_pack(token)
 
 
-class KnitPackRepository(KnitRepository):
+class PackRepository(KnitRepository):
     """Repository with knit objects stored inside pack containers.
 
     The layering for a KnitPackRepository is:
@@ -2299,11 +2298,6 @@
         self.revisions._index._key_dependencies.clear()
         self._pack_collection._abort_write_group()
 
-    def _get_source(self, to_format):
-        if to_format.network_name() == self._format.network_name():
-            return KnitPackStreamSource(self, to_format)
-        return super(KnitPackRepository, self)._get_source(to_format)
-
     def _make_parents_provider(self):
         return graph.CachingParentsProvider(self)
 
@@ -2441,79 +2435,6 @@
                 repo.unlock()
 
 
-class KnitPackStreamSource(StreamSource):
-    """A StreamSource used to transfer data between same-format KnitPack repos.
-
-    This source assumes:
-        1) Same serialization format for all objects
-        2) Same root information
-        3) XML format inventories
-        4) Atomic inserts (so we can stream inventory texts before text
-           content)
-        5) No chk_bytes
-    """
-
-    def __init__(self, from_repository, to_format):
-        super(KnitPackStreamSource, self).__init__(from_repository, to_format)
-        self._text_keys = None
-        self._text_fetch_order = 'unordered'
-
-    def _get_filtered_inv_stream(self, revision_ids):
-        from_repo = self.from_repository
-        parent_ids = from_repo._find_parent_ids_of_revisions(revision_ids)
-        parent_keys = [(p,) for p in parent_ids]
-        find_text_keys = from_repo._serializer._find_text_key_references
-        parent_text_keys = set(find_text_keys(
-            from_repo._inventory_xml_lines_for_keys(parent_keys)))
-        content_text_keys = set()
-        knit = KnitVersionedFiles(None, None)
-        factory = KnitPlainFactory()
-        def find_text_keys_from_content(record):
-            if record.storage_kind not in ('knit-delta-gz', 'knit-ft-gz'):
-                raise ValueError("Unknown content storage kind for"
-                    " inventory text: %s" % (record.storage_kind,))
-            # It's a knit record, it has a _raw_record field (even if it was
-            # reconstituted from a network stream).
-            raw_data = record._raw_record
-            # read the entire thing
-            revision_id = record.key[-1]
-            content, _ = knit._parse_record(revision_id, raw_data)
-            if record.storage_kind == 'knit-delta-gz':
-                line_iterator = factory.get_linedelta_content(content)
-            elif record.storage_kind == 'knit-ft-gz':
-                line_iterator = factory.get_fulltext_content(content)
-            content_text_keys.update(find_text_keys(
-                [(line, revision_id) for line in line_iterator]))
-        revision_keys = [(r,) for r in revision_ids]
-        def _filtered_inv_stream():
-            source_vf = from_repo.inventories
-            stream = source_vf.get_record_stream(revision_keys,
-                                                 'unordered', False)
-            for record in stream:
-                if record.storage_kind == 'absent':
-                    raise errors.NoSuchRevision(from_repo, record.key)
-                find_text_keys_from_content(record)
-                yield record
-            self._text_keys = content_text_keys - parent_text_keys
-        return ('inventories', _filtered_inv_stream())
-
-    def _get_text_stream(self):
-        # Note: We know we don't have to handle adding root keys, because both
-        # the source and target are the identical network name.
-        text_stream = self.from_repository.texts.get_record_stream(
-                        self._text_keys, self._text_fetch_order, False)
-        return ('texts', text_stream)
-
-    def get_stream(self, search):
-        revision_ids = search.get_keys()
-        for stream_info in self._fetch_revision_texts(revision_ids):
-            yield stream_info
-        self._revision_keys = [(rev_id,) for rev_id in revision_ids]
-        yield self._get_filtered_inv_stream(revision_ids)
-        yield self._get_text_stream()
-
-
-
 class RepositoryFormatPack(MetaDirRepositoryFormat):
     """Format logic for pack structured repositories.
 

=== modified file 'bzrlib/tests/test_repository.py'
--- a/bzrlib/tests/test_repository.py	2011-03-12 00:01:34 +0000
+++ b/bzrlib/tests/test_repository.py	2011-04-05 17:39:01 +0000
@@ -57,6 +57,7 @@
 from bzrlib.repofmt import (
     groupcompress_repo,
     knitrepo,
+    knitpack_repo,
     pack_repo,
     )
 
@@ -747,25 +748,25 @@
         source = self.make_repository('source', format='pack-0.92')
         target = self.make_repository('target', format='pack-0.92')
         stream_source = source._get_source(target._format)
-        self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
+        self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource)
 
     def test_source_to_exact_pack_rich_root_pack(self):
         source = self.make_repository('source', format='rich-root-pack')
         target = self.make_repository('target', format='rich-root-pack')
         stream_source = source._get_source(target._format)
-        self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
+        self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource)
 
     def test_source_to_exact_pack_19(self):
         source = self.make_repository('source', format='1.9')
         target = self.make_repository('target', format='1.9')
         stream_source = source._get_source(target._format)
-        self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
+        self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource)
 
     def test_source_to_exact_pack_19_rich_root(self):
         source = self.make_repository('source', format='1.9-rich-root')
         target = self.make_repository('target', format='1.9-rich-root')
         stream_source = source._get_source(target._format)
-        self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
+        self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource)
 
     def test_source_to_remote_exact_pack_19(self):
         trans = self.make_smart_server('target')
@@ -774,7 +775,7 @@
         target = self.make_repository('target', format='1.9')
         target = repository.Repository.open(trans.base)
         stream_source = source._get_source(target._format)
-        self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
+        self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource)
 
     def test_stream_source_to_non_exact(self):
         source = self.make_repository('source', format='pack-0.92')




More information about the bazaar-commits mailing list