Rev 2963: Factor out inventory text copying in Packer to a single helper method. in http://people.ubuntu.com/~robertc/baz2.0/reconcile
Robert Collins
robertc at robertcollins.net
Sun Nov 4 20:48:32 GMT 2007
At http://people.ubuntu.com/~robertc/baz2.0/reconcile
------------------------------------------------------------
revno: 2963
revision-id:robertc at robertcollins.net-20071104204820-8a8tz6g8kiu1in4h
parent: robertc at robertcollins.net-20071104204151-amehfv5prpvhn92c
committer: Robert Collins <robertc at robertcollins.net>
branch nick: reconcile
timestamp: Mon 2007-11-05 07:48:20 +1100
message:
Factor out inventory text copying in Packer to a single helper method.
modified:
bzrlib/repofmt/pack_repo.py pack_repo.py-20070813041115-gjv5ma7ktfqwsjgn-1
=== modified file 'bzrlib/repofmt/pack_repo.py'
--- a/bzrlib/repofmt/pack_repo.py 2007-11-04 20:41:51 +0000
+++ b/bzrlib/repofmt/pack_repo.py 2007-11-04 20:48:20 +0000
@@ -496,6 +496,9 @@
# The index layer keys for the revisions being copied. None for 'all
# objects'.
self._revision_keys = None
+ # What text keys to copy. None for 'all texts'. This is set by
+ # _copy_inventory_texts
+ self._text_filter = None
def pack(self, pb=None):
"""Create a new pack by reading data from other packs.
@@ -563,25 +566,13 @@
time.time() - self.new_pack.start_time)
self._revision_keys = revision_keys
- def _create_pack_from_packs(self):
- self.pb.update("Opening pack", 0, 5)
- self.new_pack = self.open_pack()
- new_pack = self.new_pack
- # buffer data - we won't be reading-back during the pack creation and
- # this makes a significant difference on sftp pushes.
- new_pack.set_write_cache_size(1024*1024)
- if 'pack' in debug.debug_flags:
- plain_pack_list = ['%s%s' % (a_pack.pack_transport.base, a_pack.name)
- for a_pack in self.packs]
- if self.revision_ids is not None:
- rev_count = len(self.revision_ids)
- else:
- rev_count = 'all'
- mutter('%s: create_pack: creating pack from source packs: '
- '%s%s %s revisions wanted %s t=0',
- time.ctime(), self._pack_collection._upload_transport.base, new_pack.random_name,
- plain_pack_list, rev_count)
- self._copy_revision_texts()
+ def _copy_inventory_texts(self):
+ """Copy the inventory texts to the new pack.
+
+ self._revision_keys is used to determine what inventories to copy.
+
+ Sets self._text_filter appropriately.
+ """
# select inventory keys
inv_keys = self._revision_keys # currently the same keyspace, and note that
# querying for keys here could introduce a bug where an inventory item
@@ -595,7 +586,7 @@
# at this point.
self.pb.update("Copying inventory texts", 2)
inv_lines = self._copy_nodes_graph(inv_nodes, inventory_index_map,
- new_pack._writer, new_pack.inventory_index, output_lines=True)
+ self.new_pack._writer, self.new_pack.inventory_index, output_lines=True)
if self.revision_ids:
fileid_revisions = self._pack_collection.repo._find_file_ids_from_xml_inventory_lines(
inv_lines, self.revision_ids)
@@ -609,14 +600,38 @@
text_filter = None
if 'pack' in debug.debug_flags:
mutter('%s: create_pack: inventories copied: %s%s %d items t+%6.3fs',
+ time.ctime(), self._pack_collection._upload_transport.base,
+ self.new_pack.random_name,
+ self.new_pack.inventory_index.key_count(),
+ time.time() - new_pack.start_time)
+ self._text_filter = text_filter
+
+ def _create_pack_from_packs(self):
+ self.pb.update("Opening pack", 0, 5)
+ self.new_pack = self.open_pack()
+ new_pack = self.new_pack
+ # buffer data - we won't be reading-back during the pack creation and
+ # this makes a significant difference on sftp pushes.
+ new_pack.set_write_cache_size(1024*1024)
+ if 'pack' in debug.debug_flags:
+ plain_pack_list = ['%s%s' % (a_pack.pack_transport.base, a_pack.name)
+ for a_pack in self.packs]
+ if self.revision_ids is not None:
+ rev_count = len(self.revision_ids)
+ else:
+ rev_count = 'all'
+ mutter('%s: create_pack: creating pack from source packs: '
+ '%s%s %s revisions wanted %s t=0',
time.ctime(), self._pack_collection._upload_transport.base, new_pack.random_name,
- new_pack.inventory_index.key_count(),
- time.time() - new_pack.start_time)
+ plain_pack_list, rev_count)
+ self._copy_revision_texts()
+ self._copy_inventory_texts()
# select text keys
text_index_map = self._pack_collection._packs_list_to_pack_map_and_index_list(
self.packs, 'text_index')[0]
- text_nodes = self._pack_collection._index_contents(text_index_map, text_filter)
- if text_filter is not None:
+ text_nodes = self._pack_collection._index_contents(text_index_map,
+ self._text_filter)
+ if self._text_filter is not None:
# We could return the keys copied as part of the return value from
# _copy_nodes_graph but this doesn't work all that well with the
# need to get line output too, so we check separately, and as we're
@@ -625,7 +640,7 @@
# mising records.
text_nodes = set(text_nodes)
present_text_keys = set(_node[1] for _node in text_nodes)
- missing_text_keys = set(text_filter) - present_text_keys
+ missing_text_keys = set(self._text_filter) - present_text_keys
if missing_text_keys:
# TODO: raise a specific error that can handle many missing
# keys.
More information about the bazaar-commits
mailing list