Rev 4466: Teach commit_write_group to return hint information for pack(). in http://people.ubuntu.com/~robertc/baz2.0/pending/autopack-cross-format-fetch
Robert Collins
robertc at robertcollins.net
Mon Jun 22 03:25:35 BST 2009
At http://people.ubuntu.com/~robertc/baz2.0/pending/autopack-cross-format-fetch
------------------------------------------------------------
revno: 4466
revision-id: robertc at robertcollins.net-20090622022509-qn2rjozy7g1hsmpv
parent: robertc at robertcollins.net-20090621235117-zvjywxin20usblpn
committer: Robert Collins <robertc at robertcollins.net>
branch nick: autopack-cross-format-fetch
timestamp: Mon 2009-06-22 12:25:09 +1000
message:
Teach commit_write_group to return hint information for pack().
=== modified file 'NEWS'
--- a/NEWS 2009-06-21 23:51:17 +0000
+++ b/NEWS 2009-06-22 02:25:09 +0000
@@ -63,6 +63,10 @@
properly fetch the minimum number of texts for non-smart fetching.
(John Arbash Meinel)
+* ``Repository.commit_write_group`` now returns opaque data about what
+ was committed, for passing to the ``Repository.pack``. Repositories
+ without atomic commits will still return None. (Robert Collins)
+
* ``Repository.pack`` now takes an optional ``hint`` parameter
which will support doing partial packs for repositories that can do
that. (Robert Collins)
=== modified file 'bzrlib/repofmt/pack_repo.py'
--- a/bzrlib/repofmt/pack_repo.py 2009-06-21 23:51:17 +0000
+++ b/bzrlib/repofmt/pack_repo.py 2009-06-22 02:25:09 +0000
@@ -1459,12 +1459,12 @@
in synchronisation with certain steps. Otherwise the names collection
is not flushed.
- :return: True if packing took place.
+ :return: Something evaluating true if packing took place.
"""
while True:
try:
return self._do_autopack()
- except errors.RetryAutopack, e:
+ except errors.RetryAutopack:
# If we get a RetryAutopack exception, we should abort the
# current action, and retry.
pass
@@ -1474,7 +1474,7 @@
total_revisions = self.revision_index.combined_index.key_count()
total_packs = len(self._names)
if self._max_pack_count(total_revisions) >= total_packs:
- return False
+ return None
# determine which packs need changing
pack_distribution = self.pack_distribution(total_revisions)
existing_packs = []
@@ -1502,10 +1502,10 @@
'containing %d revisions. Packing %d files into %d affecting %d'
' revisions', self, total_packs, total_revisions, num_old_packs,
num_new_packs, num_revs_affected)
- self._execute_pack_operations(pack_operations,
+ result = self._execute_pack_operations(pack_operations,
reload_func=self._restart_autopack)
mutter('Auto-packing repository %s completed', self)
- return True
+ return result
def _execute_pack_operations(self, pack_operations, _packer_class=Packer,
reload_func=None):
@@ -1513,7 +1513,7 @@
:param pack_operations: A list of [revision_count, packs_to_combine].
:param _packer_class: The class of packer to use (default: Packer).
- :return: None.
+ :return: The new pack names.
"""
for revision_count, packs in pack_operations:
# we may have no-ops from the setup logic
@@ -1535,10 +1535,11 @@
self._remove_pack_from_memory(pack)
# record the newly available packs and stop advertising the old
# packs
- self._save_pack_names(clear_obsolete_packs=True)
+ result = self._save_pack_names(clear_obsolete_packs=True)
# Move the old packs out of the way now they are no longer referenced.
for revision_count, packs in pack_operations:
self._obsolete_packs(packs)
+ return result
def _flush_new_pack(self):
if self._new_pack is not None:
@@ -1938,6 +1939,7 @@
:param clear_obsolete_packs: If True, clear out the contents of the
obsolete_packs directory.
+ :return: A list of the names saved that were not previously on disk.
"""
self.lock_names()
try:
@@ -1958,6 +1960,7 @@
self._unlock_names()
# synchronise the memory packs list with what we just wrote:
self._syncronize_pack_names_from_disk_nodes(disk_nodes)
+ return [new_node[0][0] for new_node in new_nodes]
def reload_pack_names(self):
"""Sync our pack listing with what is present in the repository.
@@ -2097,7 +2100,7 @@
if not self.autopack():
# when autopack takes no steps, the names list is still
# unsaved.
- self._save_pack_names()
+ return self._save_pack_names()
def _suspend_write_group(self):
tokens = [pack.name for pack in self._resumed_packs]
=== modified file 'bzrlib/repository.py'
--- a/bzrlib/repository.py 2009-06-21 23:51:17 +0000
+++ b/bzrlib/repository.py 2009-06-22 02:25:09 +0000
@@ -1413,8 +1413,9 @@
raise errors.BzrError('mismatched lock context %r and '
'write group %r.' %
(self.get_transaction(), self._write_group))
- self._commit_write_group()
+ result = self._commit_write_group()
self._write_group = None
+ return result
def _commit_write_group(self):
"""Template method for per-repository write group cleanup.
=== modified file 'bzrlib/tests/per_repository/test_write_group.py'
--- a/bzrlib/tests/per_repository/test_write_group.py 2009-06-10 03:56:49 +0000
+++ b/bzrlib/tests/per_repository/test_write_group.py 2009-06-22 02:25:09 +0000
@@ -68,11 +68,14 @@
repo.commit_write_group()
repo.unlock()
- def test_commit_write_group_gets_None(self):
+ def test_commit_write_group_does_not_error(self):
repo = self.make_repository('.')
repo.lock_write()
repo.start_write_group()
- self.assertEqual(None, repo.commit_write_group())
+ # commit_write_group can either return None (for repositories without
+ # isolated transactions) or a hint for pack(). So we only check it
+ # works in this interface test, because all repositories are exercised.
+ repo.commit_write_group()
repo.unlock()
def test_unlock_in_write_group(self):
=== modified file 'bzrlib/tests/test_pack_repository.py'
--- a/bzrlib/tests/test_pack_repository.py 2009-06-17 17:57:15 +0000
+++ b/bzrlib/tests/test_pack_repository.py 2009-06-22 02:25:09 +0000
@@ -238,6 +238,35 @@
pack_names = [node[1][0] for node in index.iter_all_entries()]
self.assertTrue(large_pack_name in pack_names)
+ def test_commit_write_group_returns_new_pack_names(self):
+ format = self.get_format()
+ tree = self.make_branch_and_tree('foo', format=format)
+ tree.commit('first post')
+ repo = tree.branch.repository
+ repo.lock_write()
+ try:
+ repo.start_write_group()
+ try:
+ inv = inventory.Inventory(revision_id="A")
+ inv.root.revision = "A"
+ repo.texts.add_lines((inv.root.file_id, "A"), [], [])
+ rev = _mod_revision.Revision(timestamp=0, timezone=None,
+ committer="Foo Bar <foo at example.com>", message="Message",
+ revision_id="A")
+ rev.parent_ids = ()
+ repo.add_revision("A", rev, inv=inv)
+ except:
+ repo.abort_write_group()
+ raise
+ else:
+ old_names = repo._pack_collection._names.keys()
+ result = repo.commit_write_group()
+ cur_names = repo._pack_collection._names.keys()
+ new_names = list(set(cur_names) - set(old_names))
+ self.assertEqual(new_names, result)
+ finally:
+ repo.unlock()
+
def test_fail_obsolete_deletion(self):
# failing to delete obsolete packs is not fatal
format = self.get_format()
More information about the bazaar-commits
mailing list