Rev 3902: Get rid of the 'delta' flag to GroupCompressor. It didn't do anything anyway. in http://bzr.arbash-meinel.com/branches/bzr/brisbane/lazy_gc_stream

John Arbash Meinel john at arbash-meinel.com
Tue Mar 17 16:12:39 GMT 2009


At http://bzr.arbash-meinel.com/branches/bzr/brisbane/lazy_gc_stream

------------------------------------------------------------
revno: 3902
revision-id: john at arbash-meinel.com-20090317161231-nzb4dk8t35ucw84u
parent: john at arbash-meinel.com-20090317051139-6za5bo6i7atmpvqr
committer: John Arbash Meinel <john at arbash-meinel.com>
branch nick: lazy_gc_stream
timestamp: Tue 2009-03-17 11:12:31 -0500
message:
  Get rid of the 'delta' flag to GroupCompressor. It didn't do anything anyway.
-------------- next part --------------
=== modified file 'bzrlib/groupcompress.py'
--- a/bzrlib/groupcompress.py	2009-03-17 05:11:39 +0000
+++ b/bzrlib/groupcompress.py	2009-03-17 16:12:31 +0000
@@ -674,11 +674,8 @@
        left side.
     """
 
-    def __init__(self, delta=True):
-        """Create a GroupCompressor.
-
-        :param delta: If False, do not compress records.
-        """
+    def __init__(self):
+        """Create a GroupCompressor."""
         # Consider seeding the lines with some sort of GC Start flag, or
         # putting it as part of the output stream, rather than in the
         # compressed bytes.
@@ -1322,7 +1319,7 @@
                 return adapter
         # This will go up to fulltexts for gc to gc fetching, which isn't
         # ideal.
-        self._compressor = GroupCompressor(self._delta)
+        self._compressor = GroupCompressor()
         self._unadded_refs = {}
         keys_to_add = []
         basis_end = 0
@@ -1336,7 +1333,7 @@
             self._index.add_records(nodes, random_id=random_id)
             self._unadded_refs = {}
             del keys_to_add[:]
-            self._compressor = GroupCompressor(self._delta)
+            self._compressor = GroupCompressor()
 
         last_prefix = None
         last_fulltext_len = None

=== modified file 'bzrlib/tests/test_groupcompress.py'
--- a/bzrlib/tests/test_groupcompress.py	2009-03-17 05:11:39 +0000
+++ b/bzrlib/tests/test_groupcompress.py	2009-03-17 16:12:31 +0000
@@ -35,12 +35,12 @@
     """Tests for GroupCompressor"""
 
     def test_empty_delta(self):
-        compressor = groupcompress.GroupCompressor(True)
+        compressor = groupcompress.GroupCompressor()
         self.assertEqual([], compressor.lines)
 
     def test_one_nosha_delta(self):
         # diff against NUKK
-        compressor = groupcompress.GroupCompressor(True)
+        compressor = groupcompress.GroupCompressor()
         sha1, end_point, _, _ = compressor.compress(('label',),
             'strange\ncommon\n', None)
         self.assertEqual(sha_string('strange\ncommon\n'), sha1)
@@ -66,7 +66,7 @@
                              self._chunks_to_repr_lines(actual))
 
     def test_two_nosha_delta(self):
-        compressor = groupcompress.GroupCompressor(True)
+        compressor = groupcompress.GroupCompressor()
         sha1_1, _, _, _ = compressor.compress(('label',),
             'strange\ncommon long line\nthat needs a 16 byte match\n', None)
         expected_lines = list(compressor.lines)
@@ -91,7 +91,7 @@
     def test_three_nosha_delta(self):
         # The first interesting test: make a change that should use lines from
         # both parents.
-        compressor = groupcompress.GroupCompressor(True)
+        compressor = groupcompress.GroupCompressor()
         sha1_1, end_point, _, _ = compressor.compress(('label',),
             'strange\ncommon very very long line\nwith some extra text\n', None)
         sha1_2, _, _, _ = compressor.compress(('newlabel',),
@@ -121,7 +121,7 @@
         self.assertEqual(sum(map(len, expected_lines)), end_point)
 
     def test_stats(self):
-        compressor = groupcompress.GroupCompressor(True)
+        compressor = groupcompress.GroupCompressor()
         compressor.compress(('label',), 'strange\ncommon long line\n'
                                         'plus more text\n', None)
         compressor.compress(('newlabel',),
@@ -135,7 +135,7 @@
     def test_extract_from_compressor(self):
         # Knit fetching will try to reconstruct texts locally which results in
         # reading something that is in the compressor stream already.
-        compressor = groupcompress.GroupCompressor(True)
+        compressor = groupcompress.GroupCompressor()
         sha1_1, _, _, _ = compressor.compress(('label',),
             'strange\ncommon long line\nthat needs a 16 byte match\n', None)
         expected_lines = list(compressor.lines)



More information about the bazaar-commits mailing list