Rev 3807: Hack in some other code, so we can determine how much compression we get. in http://bzr.arbash-meinel.com/branches/bzr/brisbane/chk_map
John Arbash Meinel
john at arbash-meinel.com
Wed Dec 3 04:32:44 GMT 2008
At http://bzr.arbash-meinel.com/branches/bzr/brisbane/chk_map
------------------------------------------------------------
revno: 3807
revision-id: john at arbash-meinel.com-20081203043230-f0riipwg6wkjr6ae
parent: john at arbash-meinel.com-20081203041138-ecssp5m0mqxjgzhu
committer: John Arbash Meinel <john at arbash-meinel.com>
branch nick: chk_map
timestamp: Tue 2008-12-02 22:32:30 -0600
message:
Hack in some other code, so we can determine how much compression we get.
This just tracks the 'old size' of all the packs that are getting combined versus the
'new size' of the newly created pack file.
-------------- next part --------------
=== modified file 'bzrlib/repofmt/pack_repo.py'
--- a/bzrlib/repofmt/pack_repo.py 2008-12-03 04:11:38 +0000
+++ b/bzrlib/repofmt/pack_repo.py 2008-12-03 04:32:30 +0000
@@ -1304,7 +1304,8 @@
# XXX: the following may want to be a class, to pack with a given
# policy.
mutter('Auto-packing repository %s, which has %d pack files, '
- 'containing %d revisions into no more than %d packs.', self,
+ 'containing %d revisions into no more than %d packs.',
+ self.transport.base,
total_packs, total_revisions,
self._max_pack_count(total_revisions))
# determine which packs need changing
@@ -1327,8 +1328,17 @@
existing_packs.append((revision_count, pack))
pack_operations = self.plan_autopack_combinations(
existing_packs, pack_distribution)
- self._execute_pack_operations(pack_operations)
- mutter('Auto-packing repository %s completed', self)
+ old_size, new_size = self._execute_pack_operations(pack_operations)
+ if old_size is None:
+ old_size = -1
+ else:
+ old_size /= (1024.0*1024)
+ if new_size is None:
+ new_size = -1
+ else:
+ new_size /= (1024.0*1024)
+ mutter('Auto-packing repository %s completed %.3fMB => %.3fMB',
+ self.transport.base, old_size, new_size)
return True
def _execute_pack_operations(self, pack_operations, _packer_class=Packer):
@@ -1338,19 +1348,32 @@
:param _packer_class: The class of packer to use (default: Packer).
:return: None.
"""
+ new_size = 0
for revision_count, packs in pack_operations:
# we may have no-ops from the setup logic
if len(packs) == 0:
continue
- _packer_class(self, packs, '.autopack').pack()
+ new_pack = _packer_class(self, packs, '.autopack').pack()
+ try:
+ new_size += new_pack.pack_transport.stat(new_pack.name + '.pack').st_size
+ except errors.TransportNotPossible:
+ new_size = None
for pack in packs:
self._remove_pack_from_memory(pack)
# record the newly available packs and stop advertising the old
# packs
+ if new_size is None:
+ old_size = None
+ else:
+ old_size = 0
+ for revision_count, packs in pack_operations:
+ for a_pack in packs:
+ old_size += a_pack.pack_transport.stat(a_pack.name + '.pack').st_size
self._save_pack_names(clear_obsolete_packs=True)
# Move the old packs out of the way now they are no longer referenced.
for revision_count, packs in pack_operations:
self._obsolete_packs(packs)
+ return old_size, new_size
def lock_names(self):
"""Acquire the mutex around the pack-names index.
More information about the bazaar-commits
mailing list