Rev 75: Fix up some failing tests. in http://bzr.arbash-meinel.com/plugins/groupcompress_rabin
John Arbash Meinel
john at arbash-meinel.com
Mon Mar 2 19:43:41 GMT 2009
At http://bzr.arbash-meinel.com/plugins/groupcompress_rabin
------------------------------------------------------------
revno: 75
revision-id: john at arbash-meinel.com-20090302194337-f0x1quasnm4p7x9m
parent: john at arbash-meinel.com-20090302193629-51hqsvh1rhh71gku
committer: John Arbash Meinel <john at arbash-meinel.com>
branch nick: groupcompress_rabin
timestamp: Mon 2009-03-02 13:43:37 -0600
message:
Fix up some failing tests.
-------------- next part --------------
=== modified file 'groupcompress.py'
--- a/groupcompress.py 2009-03-02 19:36:29 +0000
+++ b/groupcompress.py 2009-03-02 19:43:37 +0000
@@ -62,14 +62,14 @@
(action, label_line, sha1_line, len_line,
delta_bytes) = bytes.split('\n', 4)
if (action not in ('fulltext', 'delta')
- or not label_line.startswith('label: ')
- or not sha1_line.startswith('sha1: ')
- or not len_line.startswith('len: ')
+ or not label_line.startswith('label:')
+ or not sha1_line.startswith('sha1:')
+ or not len_line.startswith('len:')
):
raise AssertionError("bad text record %r" % (bytes,))
- label = tuple(label_line[7:].split('\x00'))
- sha1 = sha1_line[6:]
- length = int(len_line[5:])
+ label = tuple(label_line[6:].split('\x00'))
+ sha1 = sha1_line[5:]
+ length = int(len_line[4:])
if not len(delta_bytes) == length:
raise AssertionError("bad length record %r" % (bytes,))
return action, label, sha1, delta_bytes
@@ -166,7 +166,7 @@
if _NO_LABELS:
new_chunks = []
else:
- new_chunks = ['label: %s\nsha1: %s\n' % (label, sha1)]
+ new_chunks = ['label:%s\nsha1:%s\n' % (label, sha1)]
delta = self._delta_index.make_delta(target_text)
if (delta is None
or len(delta) > len(target_text) / 2):
@@ -176,7 +176,7 @@
new_chunks = ['f']
else:
new_chunks.insert(0, 'fulltext\n')
- new_chunks.append('len: %s\n' % (input_len,))
+ new_chunks.append('len:%s\n' % (input_len,))
unadded_bytes = sum(map(len, new_chunks))
self._delta_index.add_source(target_text, unadded_bytes)
new_chunks.append(target_text)
@@ -185,7 +185,7 @@
new_chunks = ['d']
else:
new_chunks.insert(0, 'delta\n')
- new_chunks.append('len: %s\n' % (len(delta),))
+ new_chunks.append('len:%s\n' % (len(delta),))
unadded_bytes = sum(map(len, new_chunks))
new_chunks.append(delta)
delta_start = (self.endpoint, len(self.lines))
=== modified file 'tests/test_groupcompress.py'
--- a/tests/test_groupcompress.py 2009-02-27 19:54:27 +0000
+++ b/tests/test_groupcompress.py 2009-03-02 19:43:37 +0000
@@ -63,13 +63,10 @@
['strange\n', 'common\n'], None)
self.assertEqual(sha_strings(['strange\n', 'common\n']), sha1)
expected_lines = [
- 'label: label\n',
- 'sha1: %s\n' % sha1,
- 'i,3\n',
- 'strange\n',
- 'common\n',
- '\n', # the last \n in a text is removed, which allows safe
- # serialisation of lines without trailing \n.
+ 'fulltext\n',
+ 'label:label\nsha1:%s\n' % sha1,
+ 'len:15\n',
+ 'strange\ncommon\n',
]
self.assertEqual(expected_lines, compressor.lines)
self.assertEqual(sum(map(len, expected_lines)), end_point)
@@ -77,22 +74,22 @@
def test_two_nosha_delta(self):
compressor = groupcompress.GroupCompressor(True)
sha1_1, _ = compressor.compress(('label',),
- ['strange\n', 'common long line\n'], None)
+ ['strange\n', 'common very very very long line\n'], None)
expected_lines = list(compressor.lines)
sha1_2, end_point = compressor.compress(('newlabel',),
- ['common long line\n', 'different\n'], None)
- self.assertEqual(sha_strings(['common long line\n', 'different\n']),
- sha1_2)
+ ['common very very very long line\n', 'different\n'], None)
+ self.assertEqual(sha_strings(['common very very very long line\n',
+ 'different\n']), sha1_2)
expected_lines.extend([
'delta\n'
- 'label: newlabel\n',
- 'sha1: %s\n' % sha1_2,
+ 'label:newlabel\n',
+ 'sha1:%s\n' % sha1_2,
+ 'len:16\n',
# copy the line common
'c,72,17\n',
# add the line different, and the trailing newline
'i,2\n',
'different\n',
- '\n'
])
self.assertEqualDiff(''.join(expected_lines), ''.join(compressor.lines))
self.assertEqual(sum(map(len, expected_lines)), end_point)
@@ -114,8 +111,10 @@
'moredifferent\n']),
sha1_3)
expected_lines.extend([
- 'label: label3\n',
- 'sha1: %s\n' % sha1_3,
+ 'delta\n',
+ 'label:label3\n',
+ 'sha1:%s\n' % sha1_3,
+ 'len:11\n',
# insert new
'i,1\n',
'new\n',
@@ -147,8 +146,8 @@
sha_2, _ = compressor.compress(('newlabel',),
['common\n', 'different\n', 'moredifferent\n'], None)
# get the first out
- self.assertEqual((['strange\n', 'common\n'], sha_1),
+ self.assertEqual((['strange\ncommon\n'], sha_1),
compressor.extract(('label',)))
# and the second
- self.assertEqual((['common\n', 'different\n', 'moredifferent\n'],
+ self.assertEqual((['common\ndifferent\nmoredifferent\n'],
sha_2), compressor.extract(('newlabel',)))
More information about the bazaar-commits
mailing list