Rev 4734: change tack. Always try to obsolete our current list. in http://bazaar.launchpad.net/~jameinel/bzr/2.0.4-autopack-rename-507557
John Arbash Meinel
john at arbash-meinel.com
Thu Jan 21 19:24:49 GMT 2010
At http://bazaar.launchpad.net/~jameinel/bzr/2.0.4-autopack-rename-507557
------------------------------------------------------------
revno: 4734
revision-id: john at arbash-meinel.com-20100121192426-uqybtanmedljv60o
parent: john at arbash-meinel.com-20100121191756-f9q5w6pw995w7drr
committer: John Arbash Meinel <john at arbash-meinel.com>
branch nick: 2.0.4-autopack-rename-507557
timestamp: Thu 2010-01-21 13:24:26 -0600
message:
change tack. Always try to obsolete our current list.
If I could have found a clean way to track the 'new-pack' that triggered
the autopack which is triggering the obsoletion, then we could have just
added that to the allowed packs to obsolete. But the layering does not
make that very easy.
-------------- next part --------------
=== modified file 'bzrlib/repofmt/pack_repo.py'
--- a/bzrlib/repofmt/pack_repo.py 2010-01-21 19:17:56 +0000
+++ b/bzrlib/repofmt/pack_repo.py 2010-01-21 19:24:26 +0000
@@ -1985,10 +1985,13 @@
# synchronise the memory packs list with what we just wrote:
self._syncronize_pack_names_from_disk_nodes(disk_nodes)
if obsolete_packs:
- orig_disk_names = set([x[0][0] for x in orig_disk_nodes])
+ # TODO: We could add one more condition here. "if o.name not in
+ # orig_disk_nodes and o != the new_pack we haven't written to
+ # disk yet. However, the new pack object is not easily
+ # accessible here (it would have to be passed through the
+ # autopacking code, etc.)
obsolete_packs = [o for o in obsolete_packs
- if o.name not in already_obsolete
- and (o.name in orig_disk_names or isinstance(o, NewPack))]
+ if o.name not in already_obsolete]
self._obsolete_packs(obsolete_packs)
return [new_node[0][0] for new_node in new_nodes]
=== modified file 'bzrlib/tests/test_repository.py'
--- a/bzrlib/tests/test_repository.py 2010-01-21 19:17:56 +0000
+++ b/bzrlib/tests/test_repository.py 2010-01-21 19:24:26 +0000
@@ -1375,7 +1375,6 @@
('bogus-rev',), (), None, 'bogus-content\n')])
# This should trigger an autopack, which will combine everything into a
# single pack file.
- import pdb; pdb.set_trace()
new_names = r.commit_write_group()
names = packs.names()
self.assertEqual(1, len(names))
@@ -1430,26 +1429,6 @@
obsolete_names = set([osutils.splitext(n)[0] for n in obsolete_packs])
self.assertEqual([pack.name], sorted(obsolete_names))
- def test__save_pack_names_obsolete_already_marked(self):
- tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
- names = packs.names()
- # Queue up an entry to be deleted
- names = packs.names()
- pack = packs.get_pack_by_name(names[0])
- packs._remove_pack_from_memory(pack)
- # Simulate a concurrent operation by removing 'pack.name' from the
- # pack-names file.
- builder = packs._index_builder_class()
- for key, value in packs._diff_pack_names()[0]:
- builder.add_node(key, value)
- packs.transport.put_file('pack-names', builder.finish())
- packs._save_pack_names(obsolete_packs=[pack])
- # We should not try to obsolete the given pack file in this process,
- # because another process already removed it from the pack-names file.
- cur_packs = packs._pack_transport.list_dir('.')
- self.assertEqual([n + '.pack' for n in names], sorted(cur_packs))
- self.assertEqual([], packs.transport.list_dir('obsolete_packs'))
-
class TestPack(TestCaseWithTransport):
More information about the bazaar-commits
mailing list