Rev 22: Ensure compatibility up to bzr-1.6.1 (at least). in file:///net/bigmamac/Volumes/home/vila/.bazaar/plugins/fix277537/

Vincent Ladeuil v.ladeuil+lp at free.fr
Tue Feb 3 20:22:14 GMT 2009


At file:///net/bigmamac/Volumes/home/vila/.bazaar/plugins/fix277537/

------------------------------------------------------------
revno: 22
revision-id: v.ladeuil+lp at free.fr-20090203202214-6bt37vi9y2pl2g2d
parent: v.ladeuil+lp at free.fr-20090202165822-z7odfgkrfyc3y80b
committer: Vincent Ladeuil <v.ladeuil+lp at free.fr>
branch nick: fix277537
timestamp: Tue 2009-02-03 21:22:14 +0100
message:
  Ensure compatibility up to bzr-1.6.1 (at least).
  
  * reconcile.py:
  (InventoryAncestryReconcilePacker._copy_fixed_nodes): Ensure
  old (as far as 1.6.1) bzr versions compatibility by using either
  'chunked' or 'fulltext' storage kind. Fix another edge case in
  batch calculation
-------------- next part --------------
=== modified file 'reconcile.py'
--- a/reconcile.py	2009-02-02 16:58:22 +0000
+++ b/reconcile.py	2009-02-03 20:22:14 +0000
@@ -172,6 +172,23 @@
                 add_callback=self.new_pack.text_index.add_nodes,
                 deltas=True, parents=True, is_locked=repo.is_locked),
             data_access=data_access, max_delta_chain=200)
+        # 'chunked' storage kind allows better optomizations but requires
+        # recent bzr version, to ease plugin deployment we want to relax that
+        # requirement
+        if getattr(osutils, 'chunks_to_lines', None) is None:
+            storage_kind = 'fulltext'
+            convert_bytes = osutils.split_lines
+            # It seems that no matter how small we chose the following values,
+            # we just can't reduce memory consumption... near 1.4GB :-/
+            # -- vila 20090203
+            min_batch_size = 16
+            max_batch_size = 128
+        else:
+            storage_kind = 'chunked'
+            convert_bytes = osutils.chunks_to_lines
+            min_batch_size = 16
+            max_batch_size = 128
+
         pb = ui.ui_factory.nested_progress_bar()
         try:
             num_texts = len(fixed_nodes)
@@ -196,8 +213,10 @@
             while remaining_nodes:
                 next_key = remaining_nodes.pop()
                 if (file_id is None
-                    or len(next_batch) < 10
-                    or (len(next_batch) < 100 and file_id == next_key[0])):
+                    or not remaining_nodes
+                    or len(next_batch) < min_batch_size
+                    or (len(next_batch) <= max_batch_size
+                        and file_id == next_key[0])):
                     next_batch.append(next_key)
                     file_id = next_key[0]
                     if remaining_nodes:
@@ -208,7 +227,7 @@
                 chunks = {}
                 for record in repo.texts.get_record_stream(this_batch,
                                                            'unordered', True):
-                    chunks[record.key] = record.get_bytes_as('chunked')
+                    chunks[record.key] = record.get_bytes_as(storage_kind)
 
                 for key in this_batch:
                     if parent_file_id is None or parent_file_id != key[0]:
@@ -222,7 +241,7 @@
                     self.new_pack.flush()
                     pb.update('Copying fulltext', count, num_texts)
                     ideal_parents = fixed_nodes[key]
-                    lines = osutils.chunks_to_lines(chunks.pop(key))
+                    lines = convert_bytes(chunks.pop(key))
                     try:
                         _, _, content = output_texts.add_lines(key,
                             ideal_parents, lines, random_id=True,



More information about the bazaar-commits mailing list