Rev 3810: when splitting a readv, we need to start at the last offset, in http://bzr.arbash-meinel.com/branches/bzr/1.9-dev/remote_readv_sections
John Arbash Meinel
john at arbash-meinel.com
Wed Oct 29 19:23:58 GMT 2008
At http://bzr.arbash-meinel.com/branches/bzr/1.9-dev/remote_readv_sections
------------------------------------------------------------
revno: 3810
revision-id: john at arbash-meinel.com-20081029192344-inemp5hyyi61n39h
parent: john at arbash-meinel.com-20081029185313-ftm8kk89jhg452tk
committer: John Arbash Meinel <john at arbash-meinel.com>
branch nick: remote_readv_sections
timestamp: Wed 2008-10-29 14:23:44 -0500
message:
when splitting a readv, we need to start at the last offset,
we were accidentally calling .next() twice without yielding the result.
-------------- next part --------------
=== modified file 'bzrlib/transport/remote.py'
--- a/bzrlib/transport/remote.py 2008-10-29 18:53:13 +0000
+++ b/bzrlib/transport/remote.py 2008-10-29 19:23:44 +0000
@@ -331,16 +331,19 @@
continue
cur_request.append(c)
cur_len += c.length
+ if cur_request:
+ requests.append(cur_request)
if 'hpss' in debug.debug_flags:
- trace.mutter('%s.readv %s offsets => %s coalesced => %s requests',
+ trace.mutter('%s.readv %s offsets => %s coalesced'
+ ' => %s requests (%s)',
self.__class__.__name__, len(offsets), len(coalesced),
- len(requests))
- if cur_request:
- requests.append(cur_request)
+ len(requests), sum(map(len, requests)))
# Cache the results, but only until they have been fulfilled
data_map = {}
- # turn the list of offsets into a stack
+ # turn the list of offsets into a single stack to iterate
offset_stack = iter(offsets)
+ # using a list so it can be modified when passing down and coming back
+ next_offset = [cur_offset_and_size.next()]
for cur_request in requests:
try:
result = self._client.call_with_body_readv_array(
@@ -357,12 +360,13 @@
for res in self._handle_response(offset_stack, cur_request,
response_handler,
- data_map):
+ data_map,
+ next_offset):
yield res
def _handle_response(self, offset_stack, coalesced, response_handler,
- data_map):
- cur_offset_and_size = offset_stack.next()
+ data_map, next_offset):
+ cur_offset_and_size = next_offset[0]
# FIXME: this should know how many bytes are needed, for clarity.
data = response_handler.read_body_bytes()
data_offset = 0
@@ -383,7 +387,7 @@
# not have a real string.
if key == cur_offset_and_size:
yield cur_offset_and_size[0], this_data
- cur_offset_and_size = offset_stack.next()
+ cur_offset_and_size = next_offset[0] = offset_stack.next()
else:
data_map[key] = this_data
data_offset += c_offset.length
@@ -392,7 +396,7 @@
while cur_offset_and_size in data_map:
this_data = data_map.pop(cur_offset_and_size)
yield cur_offset_and_size[0], this_data
- cur_offset_and_size = offset_stack.next()
+ cur_offset_and_size = next_offset[0] = offset_stack.next()
def rename(self, rel_from, rel_to):
self._call('rename',
More information about the bazaar-commits
mailing list