Rev 3806: Change the default readv() packing rules. in http://bzr.arbash-meinel.com/branches/bzr/1.9-dev/remote_readv_sections
John Arbash Meinel
john at arbash-meinel.com
Wed Oct 29 18:02:52 GMT 2008
At http://bzr.arbash-meinel.com/branches/bzr/1.9-dev/remote_readv_sections
------------------------------------------------------------
revno: 3806
revision-id: john at arbash-meinel.com-20081029180237-0ny6gh1punn3dcck
parent: pqm at pqm.ubuntu.com-20081028202057-u3csau9zvf0hapya
committer: John Arbash Meinel <john at arbash-meinel.com>
branch nick: remote_readv_sections
timestamp: Wed 2008-10-29 13:02:37 -0500
message:
Change the default readv() packing rules.
-------------- next part --------------
=== modified file 'bzrlib/transport/__init__.py'
--- a/bzrlib/transport/__init__.py 2008-10-27 15:30:29 +0000
+++ b/bzrlib/transport/__init__.py 2008-10-29 18:02:37 +0000
@@ -713,6 +713,8 @@
offsets, in start-to-end order, with no duplicated regions,
expanded by the transports recommended page size.
"""
+ # never make a single request larger than 1MB
+ max_length = 1*1024*1024
offsets = sorted(offsets)
# short circuit empty requests
if len(offsets) == 0:
@@ -747,9 +749,12 @@
current_finish = current_length + current_offset
for offset, length in new_offsets[1:]:
finish = offset + length
- if offset > current_finish:
- # there is a gap, output the current accumulator and start
- # a new one for the region we're examining.
+ if (offset > current_finish
+ or (max_length and length + current_length > max_length)):
+ # there is a gap, or adding this section would create a range
+ # longer than max_length, either way, output the current
+ # accumulator and start a new one for the region we're
+ # examining.
offsets.append((current_offset, current_length))
current_offset = offset
current_length = length
=== modified file 'bzrlib/transport/remote.py'
--- a/bzrlib/transport/remote.py 2008-10-15 18:45:28 +0000
+++ b/bzrlib/transport/remote.py 2008-10-29 18:02:37 +0000
@@ -316,29 +316,50 @@
limit=self._max_readv_combine,
fudge_factor=self._bytes_to_read_before_seek))
- try:
- result = self._client.call_with_body_readv_array(
- ('readv', self._remote_path(relpath),),
- [(c.start, c.length) for c in coalesced])
- resp, response_handler = result
- except errors.ErrorFromSmartServer, err:
- self._translate_error(err.error_tuple)
-
- if resp[0] != 'readv':
- # This should raise an exception
- response_handler.cancel_read_body()
- raise errors.UnexpectedSmartServerResponse(resp)
-
- return self._handle_response(offsets, coalesced, response_handler)
-
- def _handle_response(self, offsets, coalesced, response_handler):
+ max_combined = 50*1024*1024
+ # now that we've coallesced things, try to avoid making enormous
+ # requests
+ requests = []
+ cur_request = []
+ cur_len = 0
+ for c in coalesced:
+ if c.length + cur_len > max_combined:
+ requests.append(cur_request)
+ cur_request = []
+ cur_len = 0
+ continue
+ cur_request.append(c)
+ cur_len += c.length
+ if cur_request:
+ requests.append(cur_request)
+ # Cache the results, but only until they have been fulfilled
+ data_map = {}
# turn the list of offsets into a stack
offset_stack = iter(offsets)
+ for cur_request in requests:
+ try:
+ result = self._client.call_with_body_readv_array(
+ ('readv', self._remote_path(relpath),),
+ [(c.start, c.length) for c in cur_request])
+ resp, response_handler = result
+ except errors.ErrorFromSmartServer, err:
+ self._translate_error(err.error_tuple)
+
+ if resp[0] != 'readv':
+ # This should raise an exception
+ response_handler.cancel_read_body()
+ raise errors.UnexpectedSmartServerResponse(resp)
+
+ for res in self._handle_response(offset_stack, cur_request,
+ response_handler,
+ data_map):
+ yield res
+
+ def _handle_response(self, offset_stack, coalesced, response_handler,
+ data_map):
cur_offset_and_size = offset_stack.next()
# FIXME: this should know how many bytes are needed, for clarity.
data = response_handler.read_body_bytes()
- # Cache the results, but only until they have been fulfilled
- data_map = {}
data_offset = 0
for c_offset in coalesced:
if len(data) < c_offset.length:
More information about the bazaar-commits
mailing list