Rev 3810: Finish using the page cache as part of _check_remap, add debugging functions in http://bzr.arbash-meinel.com/branches/bzr/brisbane/xml_cache

John Arbash Meinel john at arbash-meinel.com
Tue Dec 9 06:39:47 GMT 2008


At http://bzr.arbash-meinel.com/branches/bzr/brisbane/xml_cache

------------------------------------------------------------
revno: 3810
revision-id: john at arbash-meinel.com-20081209063920-uc32b8850x0vftty
parent: john at arbash-meinel.com-20081209061005-fuyy6qnfnbyjniuo
committer: John Arbash Meinel <john at arbash-meinel.com>
branch nick: xml_cache
timestamp: Tue 2008-12-09 00:39:20 -0600
message:
  Finish using the page cache as part of _check_remap, add debugging functions
  to give a count of what happens with _check_remap()
-------------- next part --------------
=== modified file 'bzrlib/chk_map.py'
--- a/bzrlib/chk_map.py	2008-12-09 06:10:05 +0000
+++ b/bzrlib/chk_map.py	2008-12-09 06:39:20 +0000
@@ -43,11 +43,12 @@
 lazy_import.lazy_import(globals(), """
 from bzrlib import versionedfile
 """)
-from bzrlib.lru_cache import LRUCache
+from bzrlib import lru_cache
 
 # approx 2MB
-_PAGE_CACHE_SIZE = 2*1024*1024 / 4*1024
-_page_cache = LRUCache(_PAGE_CACHE_SIZE)
+_PAGE_CACHE_SIZE = 2*1024*1024
+# We are caching bytes so len(value) is perfectly accurate
+_page_cache = lru_cache.LRUSizeCache(_PAGE_CACHE_SIZE)
 
 
 class CHKMap(object):
@@ -970,11 +971,15 @@
             else:
                 if isinstance(node, InternalNode):
                     # Without looking at any leaf nodes, we are sure
+                    def child_is_internal_node(): pass
+                    child_is_internal_node()
                     return self
                 for key, value in node._items.iteritems():
                     if new_leaf._map_no_split(key, value):
                         # Adding this key would cause a split, so we know we
                         # don't need to collapse
+                        def child_causes_split(): pass
+                        child_causes_split()
                         return self
         if keys:
             # Look in the page cache for some more bytes
@@ -989,9 +994,13 @@
                     self._items[prefix] = node
                     if isinstance(node, InternalNode):
                         # We have done enough to know that we can stop
+                        def page_is_internal(): pass
+                        page_is_internal()
                         return self
                     for key, value in node._items.iteritems():
                         if new_leaf._map_no_split(key, value):
+                            def page_causes_split(): pass
+                            page_causes_split()
                             return self
             for chk in found_keys:
                 del keys[chk]
@@ -1013,19 +1022,27 @@
             # don't need to continue. We requested the bytes, we may as well
             # use them
             for record in stream:
-                node = _deserialise(record.get_bytes_as('fulltext'), record.key)
+                bytes = record.get_bytes_as('fulltext')
+                node = _deserialise(bytes, record.key)
                 self._items[keys[record.key]] = node
+                _page_cache[record.key] = bytes
                 nodes.append(node)
             for node in nodes:
                 if isinstance(node, InternalNode):
                     # We know we won't fit
+                    def stream_is_internal(): pass
+                    stream_is_internal()
                     return self
                 for key, value in node._items.iteritems():
                     if new_leaf._map_no_split(key, value):
+                        def stream_causes_split(): pass
+                        stream_causes_split()
                         return self
 
         # We have gone to every child, and everything fits in a single leaf
         # node, we no longer need this internal node
+        def check_remap_collapsed(): pass
+        check_remap_collapsed()
         return new_leaf
 
 



More information about the bazaar-commits mailing list