Rev 3391: Review feedback. in http://people.ubuntu.com/~robertc/baz2.0/shallow-branch

Robert Collins robertc at robertcollins.net
Tue Jul 1 04:49:59 BST 2008


At http://people.ubuntu.com/~robertc/baz2.0/shallow-branch

------------------------------------------------------------
revno: 3391
revision-id: robertc at robertcollins.net-20080701034954-5r0c28cocphgv6td
parent: robertc at robertcollins.net-20080701033835-dwbt0f0x3vmf5atr
committer: Robert Collins <robertc at robertcollins.net>
branch nick: stacking-knits
timestamp: Tue 2008-07-01 13:49:54 +1000
message:
  Review feedback.
modified:
  bzrlib/knit.py                 knit.py-20051212171256-f056ac8f0fbe1bd9
  bzrlib/tests/test_knit.py      test_knit.py-20051212171302-95d4c00dd5f11f2b
  bzrlib/versionedfile.py        versionedfile.py-20060222045106-5039c71ee3b65490
=== modified file 'bzrlib/knit.py'
--- a/bzrlib/knit.py	2008-07-01 03:38:35 +0000
+++ b/bzrlib/knit.py	2008-07-01 03:49:54 +0000
@@ -1007,7 +1007,6 @@
                 if record.storage_kind == 'absent':
                     continue
                 missing_keys.remove(record.key)
-                bytes = record.get_bytes_as('fulltext')
                 lines = split_lines(record.get_bytes_as('fulltext'))
                 text_map[record.key] = lines
                 content_map[record.key] = PlainKnitContent(lines, record.key)
@@ -1065,9 +1064,9 @@
         :return: A mapping from keys to parents. Absent keys are absent from
             the mapping.
         """
-        return self._get_parent_map(keys)[0]
+        return self._get_parent_map_with_sources(keys)[0]
 
-    def _get_parent_map(self, keys):
+    def _get_parent_map_with_sources(self, keys):
         """Get a map of the parents of keys.
 
         :param keys: The keys to look up parents for.
@@ -1179,7 +1178,7 @@
                 if not result:
                     needed_from_fallback.add(key)
         # Double index lookups here : need a unified api ?
-        global_map, parent_maps = self._get_parent_map(keys)
+        global_map, parent_maps = self._get_parent_map_with_sources(keys)
         if ordering == 'topological':
             # Global topological sort
             present_keys = topo_sort(global_map)
@@ -1208,9 +1207,6 @@
         for key in absent_keys:
             yield AbsentContentFactory(key)
         # restrict our view to the keys we can answer.
-        our_keys = parent_maps[0]
-        # keys - needed_from_fallback
-        # keys = keys - absent_keys
         # XXX: Memory: TODO: batch data here to cap buffered data at (say) 1MB.
         # XXX: At that point we need to consider the impact of double reads by
         # utilising components multiple times.

=== modified file 'bzrlib/tests/test_knit.py'
--- a/bzrlib/tests/test_knit.py	2008-06-25 01:29:48 +0000
+++ b/bzrlib/tests/test_knit.py	2008-07-01 03:49:54 +0000
@@ -1411,8 +1411,8 @@
         test.add_lines(key_cross_border, (key_basis,), ['foo\n'])
         self.assertEqual('fulltext', test._index.get_method(key_cross_border))
         self.assertEqual([("get_parent_map", set([key_basis]))], basis.calls)
-        basis.calls = []
         # Subsequent adds do delta.
+        basis.calls = []
         test.add_lines(key_delta, (key_cross_border,), ['foo\n'])
         self.assertEqual('line-delta', test._index.get_method(key_delta))
         self.assertEqual([], basis.calls)
@@ -1438,7 +1438,7 @@
         self.assertEqual([("annotate", key_basis)], basis.calls)
 
     def test_check(self):
-        # check() must not check the fallback files, its none of its business.
+        # check() must not check the fallback files, it's none of its business.
         basis, test = self.get_basis_and_test_knit()
         basis.check = None
         test.check()
@@ -1493,7 +1493,7 @@
                     record.get_bytes_as(record.storage_kind))
                 self.assertEqual(reference.get_bytes_as('fulltext'),
                     record.get_bytes_as('fulltext'))
-        # Its not strictly minimal, but it seems reasonable for now for it to
+        # It's not strictly minimal, but it seems reasonable for now for it to
         # ask which fallbacks have which parents.
         self.assertEqual([
             ("get_parent_map", set([key_basis, key_missing])),
@@ -1539,7 +1539,7 @@
             self.assertEqual(record.sha1, result[1])
             self.assertEqual(record.storage_kind, result[2])
             self.assertEqual(record.get_bytes_as('fulltext'), result[3])
-        # Its not strictly minimal, but it seems reasonable for now for it to
+        # It's not strictly minimal, but it seems reasonable for now for it to
         # ask which fallbacks have which parents.
         self.assertEqual([
             ("get_parent_map", set([key_basis, key_basis_2, key_missing])),
@@ -1577,7 +1577,7 @@
                 self.assertEqual(reference.storage_kind, record.storage_kind)
                 self.assertEqual(reference.get_bytes_as(reference.storage_kind),
                     record.get_bytes_as(record.storage_kind))
-        # Its not strictly minimal, but it seems reasonable for now for it to
+        # It's not strictly minimal, but it seems reasonable for now for it to
         # ask which fallbacks have which parents.
         self.assertEqual([
             ("get_parent_map", set([key_basis, key_missing])),
@@ -1623,7 +1623,7 @@
             self.assertEqual(record.sha1, result[1])
             self.assertEqual(record.storage_kind, result[2])
             self.assertEqual(record.get_bytes_as(record.storage_kind), result[3])
-        # Its not strictly minimal, but it seems reasonable for now for it to
+        # It's not strictly minimal, but it seems reasonable for now for it to
         # ask which fallbacks have which parents.
         self.assertEqual([
             ("get_parent_map", set([key_basis, key_basis_2, key_missing])),
@@ -1655,7 +1655,7 @@
 
     def test_insert_record_stream(self):
         # records are inserted as normal; insert_record_stream builds on
-        # add_lines, so a smoke test should be all thats needed:
+        # add_lines, so a smoke test should be all that's needed:
         key = ('foo',)
         key_basis = ('bar',)
         key_delta = ('zaphod',)
@@ -1720,7 +1720,7 @@
 
     def test_add_mpdiffs(self):
         # records are inserted as normal; add_mpdiff builds on
-        # add_lines, so a smoke test should be all thats needed:
+        # add_lines, so a smoke test should be all that's needed:
         key = ('foo',)
         key_basis = ('bar',)
         key_delta = ('zaphod',)
@@ -1767,7 +1767,8 @@
             ("get_parent_map", set([key_left, key_right])),
             ],
             basis.calls[:3])
-        self.assertEqual(set([key_left, key_right]), set(basis.calls[3][1]))
-        self.assertEqual('get_record_stream', basis.calls[3][0])
-        self.assertEqual('unordered', basis.calls[3][2])
-        self.assertEqual(True, basis.calls[3][3])
+        last_call = basis.calls[3]
+        self.assertEqual('get_record_stream', last_call[0])
+        self.assertEqual(set([key_left, key_right]), set(last_call[1]))
+        self.assertEqual('unordered', last_call[2])
+        self.assertEqual(True, last_call[3])

=== modified file 'bzrlib/versionedfile.py'
--- a/bzrlib/versionedfile.py	2008-07-01 03:38:35 +0000
+++ b/bzrlib/versionedfile.py	2008-07-01 03:49:54 +0000
@@ -551,7 +551,7 @@
 
     def iter_lines_added_or_present_in_keys(self, keys, pb=None):
         self.calls.append(("iter_lines_added_or_present_in_keys", copy(keys)))
-        return self._backing_vf.iter_lines_added_or_present_in_keys(keys)
+        return self._backing_vf.iter_lines_added_or_present_in_keys(keys, pb=pb)
 
     def keys(self):
         self.calls.append(("keys",))
@@ -838,7 +838,7 @@
 
         :param keys: The names of the keys to lookup
         :return: a dict from key to sha1 digest. Keys of texts which are not
-            present in the store are not not present in the returned
+            present in the store are not present in the returned
             dictionary.
         """
         raise NotImplementedError(self.get_sha1s)




More information about the bazaar-commits mailing list