[MERGE] Refactor fetch: add get_data_about_revision_ids to repository, and other changes.

Andrew Bennetts andrew at canonical.com
Fri Aug 3 04:36:52 BST 2007


This bundle is split out from my repo-refactor branch, which is working towards
smart server support for streaming sets of revisions in a single request, rather
than the easily thousands of requests that can occur right now.

This particular bundle rearranges fetch.py a little.  Most interestingly, it
moves some of the logic out of fetch.py, and moves it to a new method on
Repository, get_data_about_revision_ids(revision_ids).

I've gone through some small contortions to keep the progress bar handling the
same as it was before.  I'm pretty sure this will need to change eventually, it
doesn't feel right as it is, as the comments in the code now say.  I'd love to
hear other people's opinions on what to do about this.  Similarly, the
inventory_weave caching that was done in fetch.py is now a bit strangely done by
get_data_about_revision_ids.  Probably this part should be moved back into
fetch.py.

-Andrew.

-------------- next part --------------
# Bazaar merge directive format 2 (Bazaar 0.19)
# revision_id: andrew.bennetts at canonical.com-20070802080306-\
#   s81tmi8j90hie7qe
# target_branch: http://bazaar-vcs.org/bzr/bzr.dev
# testament_sha1: cd4a40c6b96d7c4e9205c4d18c455f2d44422699
# timestamp: 2007-08-02 18:04:21 +1000
# source_branch: http://people.ubuntu.com/~andrew/bzr/fetch-refactor
# base_revision_id: pqm at pqm.ubuntu.com-20070802072205-gjk1eev6rlw7ght8
# 
# Begin patch
=== modified file 'bzrlib/fetch.py'
--- bzrlib/fetch.py	2007-06-22 22:19:13 +0000
+++ bzrlib/fetch.py	2007-08-02 08:03:06 +0000
@@ -89,10 +89,7 @@
         # result variables.
         self.failed_revisions = []
         self.count_copied = 0
-        if to_repository.control_files._transport.base == from_repository.control_files._transport.base:
-            # check that last_revision is in 'from' and then return a no-operation.
-            if last_revision not in (None, NULL_REVISION):
-                to_repository.get_revision(last_revision)
+        if self._same_repo(to_repository, from_repository, last_revision):
             return
         self.to_repository = to_repository
         self.from_repository = from_repository
@@ -116,6 +113,14 @@
         finally:
             self.from_repository.unlock()
 
+    def _same_repo(self, to_repository, from_repository, last_revision):
+        if to_repository.control_files._transport.base == from_repository.control_files._transport.base:
+            # check that last_revision is in 'from' and then return a no-operation.
+            if last_revision not in (None, NULL_REVISION):
+                to_repository.get_revision(last_revision)
+            return True
+        return False
+
     def __fetch(self):
         """Primary worker function.
 
@@ -132,19 +137,63 @@
         try:
             pp.next_phase()
             revs = self._revids_to_fetch()
-            # something to do ?
-            if revs:
-                pp.next_phase()
-                self._fetch_weave_texts(revs)
-                pp.next_phase()
-                self._fetch_inventory_weave(revs)
-                pp.next_phase()
-                self._fetch_revision_texts(revs)
-                self.count_copied += len(revs)
+            self._fetch_everything_for_revisions(revs, pp)
         finally:
             self.pb.clear()
 
+    def _fetch_everything_for_revisions(self, revs, pp):
+        """Fetch all data for the given set of revisions."""
+        if revs is None:
+            return
+        # The first phase is "file".  We pass the progress bar for it directly
+        # into get_data_about_revision_ids, which has more information about how
+        # that phase is progressing than we do.  Progress updates for the other
+        # phases are taken care of in this function.
+        # XXX: there should be a clear owner of the progress reporting.  Perhaps
+        # get_data_about_revision_ids should have a richer API than it does at
+        # the moment, so that it can feed the progress information back to this
+        # function?
+        phase = 'file'
+        pb = bzrlib.ui.ui_factory.nested_progress_bar()
+        try:
+            what_to_do = self.from_repository.get_data_about_revision_ids(revs, pb)
+            for knit_kind, file_id, revisions in what_to_do:
+                if knit_kind != phase:
+                    phase = knit_kind
+                    # Make a new progress bar for this phase
+                    pb.finished()
+                    pp.next_phase()
+                    pb = bzrlib.ui.ui_factory.nested_progress_bar()
+                if knit_kind == "file":
+                    self._fetch_weave_text(file_id, revisions)
+                elif knit_kind == "inventory":
+                    # XXX:
+                    # Once we've processed all the files, then we generate the root
+                    # texts (if necessary), then we process the inventory.  It's a
+                    # bit distasteful to have knit_kind == "inventory" mean this,
+                    # perhaps it should happen on the first non-"file" knit, in case
+                    # it's not always inventory?
+                    self._generate_root_texts(revs)
+                    self._fetch_inventory_weave(revs, pb)
+                elif knit_kind == "signatures":
+                    # Nothing to do here; this will be taken care of when
+                    # _fetch_revision_texts happens.
+                    pass
+                elif knit_kind == "revisions":
+                    self._fetch_revision_texts(revs, pb)
+                else:
+                    raise AssertionError("Unknown knit kind %r" % knit_kind)
+        finally:
+            if pb is not None:
+                pb.finished()
+        self.count_copied += len(revs)
+        
     def _revids_to_fetch(self):
+        """Determines the exact revisions needed from self.from_repository to
+        install self._last_revision in self.to_repository.
+
+        If no revisions need to be fetched, then this just returns None.
+        """
         mutter('fetch up to rev {%s}', self._last_revision)
         if self._last_revision is NULL_REVISION:
             # explicit limit of no revisions needed
@@ -159,65 +208,55 @@
         except errors.NoSuchRevision:
             raise InstallFailed([self._last_revision])
 
-    def _fetch_weave_texts(self, revs):
-        texts_pb = bzrlib.ui.ui_factory.nested_progress_bar()
-        try:
-            # fileids_altered_by_revision_ids requires reading the inventory
-            # weave, we will need to read the inventory weave again when
-            # all this is done, so enable caching for that specific weave
-            inv_w = self.from_repository.get_inventory_weave()
-            inv_w.enable_cache()
-            file_ids = self.from_repository.fileids_altered_by_revision_ids(revs)
-            count = 0
-            num_file_ids = len(file_ids)
-            for file_id, required_versions in file_ids.items():
-                texts_pb.update("fetch texts", count, num_file_ids)
-                count +=1
-                to_weave = self.to_weaves.get_weave_or_empty(file_id,
-                    self.to_repository.get_transaction())
-                from_weave = self.from_weaves.get_weave(file_id,
-                    self.from_repository.get_transaction())
-                # we fetch all the texts, because texts do
-                # not reference anything, and its cheap enough
-                to_weave.join(from_weave, version_ids=required_versions)
-                # we don't need *all* of this data anymore, but we dont know
-                # what we do. This cache clearing will result in a new read 
-                # of the knit data when we do the checkout, but probably we
-                # want to emit the needed data on the fly rather than at the
-                # end anyhow.
-                # the from weave should know not to cache data being joined,
-                # but its ok to ask it to clear.
-                from_weave.clear_cache()
-                to_weave.clear_cache()
-        finally:
-            texts_pb.finished()
-
-    def _fetch_inventory_weave(self, revs):
-        pb = bzrlib.ui.ui_factory.nested_progress_bar()
-        try:
-            pb.update("fetch inventory", 0, 2)
-            to_weave = self.to_control.get_weave('inventory',
-                    self.to_repository.get_transaction())
-    
-            child_pb = bzrlib.ui.ui_factory.nested_progress_bar()
-            try:
-                # just merge, this is optimisable and its means we don't
-                # copy unreferenced data such as not-needed inventories.
-                pb.update("fetch inventory", 1, 3)
-                from_weave = self.from_repository.get_inventory_weave()
-                pb.update("fetch inventory", 2, 3)
-                # we fetch only the referenced inventories because we do not
-                # know for unselected inventories whether all their required
-                # texts are present in the other repository - it could be
-                # corrupt.
-                to_weave.join(from_weave, pb=child_pb, msg='merge inventory',
-                              version_ids=revs)
-                from_weave.clear_cache()
-            finally:
-                child_pb.finished()
-        finally:
-            pb.finished()
-
+    def _fetch_weave_text(self, file_id, required_versions):
+        to_weave = self.to_weaves.get_weave_or_empty(file_id,
+            self.to_repository.get_transaction())
+        from_weave = self.from_weaves.get_weave(file_id,
+            self.from_repository.get_transaction())
+        # we fetch all the texts, because texts do
+        # not reference anything, and its cheap enough
+        to_weave.join(from_weave, version_ids=required_versions)
+        # we don't need *all* of this data anymore, but we dont know
+        # what we do. This cache clearing will result in a new read 
+        # of the knit data when we do the checkout, but probably we
+        # want to emit the needed data on the fly rather than at the
+        # end anyhow.
+        # the from weave should know not to cache data being joined,
+        # but its ok to ask it to clear.
+        from_weave.clear_cache()
+        to_weave.clear_cache()
+
+    def _fetch_inventory_weave(self, revs, pb):
+        pb.update("fetch inventory", 0, 2)
+        to_weave = self.to_control.get_weave('inventory',
+                self.to_repository.get_transaction())
+
+        child_pb = bzrlib.ui.ui_factory.nested_progress_bar()
+        try:
+            # just merge, this is optimisable and its means we don't
+            # copy unreferenced data such as not-needed inventories.
+            pb.update("fetch inventory", 1, 3)
+            from_weave = self.from_repository.get_inventory_weave()
+            pb.update("fetch inventory", 2, 3)
+            # we fetch only the referenced inventories because we do not
+            # know for unselected inventories whether all their required
+            # texts are present in the other repository - it could be
+            # corrupt.
+            to_weave.join(from_weave, pb=child_pb, msg='merge inventory',
+                          version_ids=revs)
+            from_weave.clear_cache()
+        finally:
+            child_pb.finished()
+
+    def _generate_root_texts(self, revs):
+        """This will be called by __fetch between fetching weave texts and
+        fetching the inventory weave.
+
+        Subclasses should override this if they need to generate root texts
+        after fetching weave texts.
+        """
+        pass
+        
 
 class GenericRepoFetcher(RepoFetcher):
     """This is a generic repo to repo fetcher.
@@ -226,37 +265,29 @@
     It triggers a reconciliation after fetching to ensure integrity.
     """
 
-    def _fetch_revision_texts(self, revs):
+    def _fetch_revision_texts(self, revs, pb):
         """Fetch revision object texts"""
-        rev_pb = bzrlib.ui.ui_factory.nested_progress_bar()
-        try:
-            to_txn = self.to_transaction = self.to_repository.get_transaction()
-            count = 0
-            total = len(revs)
-            to_store = self.to_repository._revision_store
-            for rev in revs:
-                pb = bzrlib.ui.ui_factory.nested_progress_bar()
-                try:
-                    pb.update('copying revisions', count, total)
-                    try:
-                        sig_text = self.from_repository.get_signature_text(rev)
-                        to_store.add_revision_signature_text(rev, sig_text, to_txn)
-                    except errors.NoSuchRevision:
-                        # not signed.
-                        pass
-                    to_store.add_revision(self.from_repository.get_revision(rev),
-                                          to_txn)
-                    count += 1
-                finally:
-                    pb.finished()
-            # fixup inventory if needed: 
-            # this is expensive because we have no inverse index to current ghosts.
-            # but on local disk its a few seconds and sftp push is already insane.
-            # so we just-do-it.
-            # FIXME: repository should inform if this is needed.
-            self.to_repository.reconcile()
-        finally:
-            rev_pb.finished()
+        to_txn = self.to_transaction = self.to_repository.get_transaction()
+        count = 0
+        total = len(revs)
+        to_store = self.to_repository._revision_store
+        for rev in revs:
+            pb.update('copying revisions', count, total)
+            try:
+                sig_text = self.from_repository.get_signature_text(rev)
+                to_store.add_revision_signature_text(rev, sig_text, to_txn)
+            except errors.NoSuchRevision:
+                # not signed.
+                pass
+            to_store.add_revision(self.from_repository.get_revision(rev),
+                                  to_txn)
+            count += 1
+        # fixup inventory if needed: 
+        # this is expensive because we have no inverse index to current ghosts.
+        # but on local disk its a few seconds and sftp push is already insane.
+        # so we just-do-it.
+        # FIXME: repository should inform if this is needed.
+        self.to_repository.reconcile()
     
 
 class KnitRepoFetcher(RepoFetcher):
@@ -267,7 +298,7 @@
     copy revision texts.
     """
 
-    def _fetch_revision_texts(self, revs):
+    def _fetch_revision_texts(self, revs, pb):
         # may need to be a InterRevisionStore call here.
         from_transaction = self.from_repository.get_transaction()
         to_transaction = self.to_repository.get_transaction()
@@ -357,12 +388,10 @@
         GenericRepoFetcher.__init__(self, to_repository, from_repository,
                                     last_revision, pb)
 
-    def _fetch_weave_texts(self, revs):
-        GenericRepoFetcher._fetch_weave_texts(self, revs)
-        # Now generate a weave for the tree root
+    def _generate_root_texts(self, revs):
         self.helper.generate_root_texts(revs)
 
-    def _fetch_inventory_weave(self, revs):
+    def _fetch_inventory_weave(self, revs, pb):
         self.helper.regenerate_inventory(revs)
  
 
@@ -375,12 +404,10 @@
         KnitRepoFetcher.__init__(self, to_repository, from_repository,
                                  last_revision, pb)
 
-    def _fetch_weave_texts(self, revs):
-        KnitRepoFetcher._fetch_weave_texts(self, revs)
-        # Now generate a weave for the tree root
+    def _generate_root_texts(self, revs):
         self.helper.generate_root_texts(revs)
 
-    def _fetch_inventory_weave(self, revs):
+    def _fetch_inventory_weave(self, revs, pb):
         self.helper.regenerate_inventory(revs)
         
 

=== modified file 'bzrlib/repository.py'
--- bzrlib/repository.py	2007-07-31 02:07:34 +0000
+++ bzrlib/repository.py	2007-08-02 08:03:06 +0000
@@ -624,6 +624,54 @@
             pb.finished()
         return result
 
+    def get_data_about_revision_ids(self, revision_ids, files_pb=None):
+        """Get an iterable about data for a given set of revision IDs.
+
+        The named data will be ordered so that it can be fetched and inserted in
+        that order safely.
+        
+        :returns: (knit-kind, file-id, versions)
+        """
+        # XXX: it's a bit weird to control the inventory weave caching in this
+        # generator.  Ideally the caching would be done in fetch.py I think.  Or
+        # maybe this generator should explicitly have the contract that it
+        # should not be iterated until the previously yielded item has been
+        # processed?
+        inv_w = self.get_inventory_weave()
+        inv_w.enable_cache()
+
+        # file ids that changed
+        file_ids = self.fileids_altered_by_revision_ids(revision_ids)
+        count = 0
+        num_file_ids = len(file_ids)
+        for file_id, altered_versions in file_ids.iteritems():
+            if files_pb is not None:
+                files_pb.update("fetch texts", count, num_file_ids)
+            count += 1
+            yield ("file", file_id, altered_versions)
+        # We're done with the files_pb.  Note that it finished by the caller,
+        # just as it was created by the caller.
+        del files_pb
+
+        # inventory
+        yield ("inventory", None, revision_ids)
+        inv_w.clear_cache()
+
+        # signatures
+        revisions_with_signatures = set()
+        for rev_id in revision_ids:
+            try:
+                self.get_signature_text(rev_id)
+            except errors.NoSuchRevision:
+                # not signed.
+                pass
+            else:
+                revisions_with_signatures.add(rev_id)
+        yield ("signatures", None, revisions_with_signatures)
+
+        # revisions
+        yield ("revisions", None, revision_ids)
+
     @needs_read_lock
     def get_inventory_weave(self):
         return self.control_weaves.get_weave('inventory',

=== modified file 'bzrlib/tests/repository_implementations/test_repository.py'
--- bzrlib/tests/repository_implementations/test_repository.py	2007-07-25 00:52:21 +0000
+++ bzrlib/tests/repository_implementations/test_repository.py	2007-08-02 08:03:06 +0000
@@ -210,6 +210,21 @@
         rev2_tree = knit3_repo.revision_tree('rev2')
         self.assertEqual('rev1', rev2_tree.inventory.root.revision)
 
+    def makeARepoWithSignatures(self):
+        wt = self.make_branch_and_tree('a-repo-with-sigs')
+        wt.commit('rev1', allow_pointless=True, rev_id='rev1')
+        repo = wt.branch.repository
+        repo.sign_revision('rev1', bzrlib.gpg.LoopbackGPGStrategy(None))
+        return repo
+
+    def test_fetch_copies_signatures(self):
+        source_repo = self.makeARepoWithSignatures()
+        target_repo = self.make_repository('target')
+        target_repo.fetch(source_repo, revision_id=None)
+        self.assertEqual(
+            source_repo.get_signature_text('rev1'),
+            target_repo.get_signature_text('rev1'))
+
     def test_get_revision_delta(self):
         tree_a = self.make_branch_and_tree('a')
         self.build_tree(['a/foo'])

# Begin bundle
IyBCYXphYXIgcmV2aXNpb24gYnVuZGxlIHY0CiMKQlpoOTFBWSZTWfSmYq4ACHd/gGRUQQF7////
/+//4L////BgEujuvnr3oA7w7nvT0OolDjo02X3cUdUfR9Ae9o2Aa3u5EdNSOISMirhKCJpMg0DQ
JqemgxAaammmSNqGQ9J6gAeUekDQTICZAQppAaGamgGQAAA0AB6jIGmIiTJNTRoNDQaZNABoNGgA
AAaAAJCSZSYVP0p6aPUjwypk8keowjJp6R6gDI0aaAA0GlTJoaBoAGmgAAAMQA0BoAAAJEgjQIAR
op4TRPTRlKe1NNTxqnpppNNpkygPSBoy5SkRMQiiIAeGEP3X71MyLLSujRwcl6V3HCD6orBQQRMr
l9tfu652uKmIGE6x1o/9bBDhck/eGq9Z5zrQvIr/3t2ks3YdfGava4bpEsjgJcBCiYXPPTHgcdT9
92beiZGKug1Oxqf7WQKc5Iu1ETEFMMZBKmaIDpwPVN3vd3ZLMg7EWTKxFq9Oi+jNmtiNs13OTh8+
1awcM9vU19fO62hvHARETyxtDmPYelnVK7nPPabrNeRwTsQhJHs9iISQLCQkjYlbi4UbqquOg1y2
fPlxD7LBY5rgmJsQ22dWPQKjGw21/5xjO+Fxrr4z1swrE1T5bOxdVdDbOltzGBLpi1S5mEzCVsJu
+ZzJbNaNi8l43iut3VysWL5UH2LLdKvebQd8cgWS0UmOn4EH64fkq+rGsgy/UIaWRqeuLhj1ee10
lVmwRlivrYb1WYRnwtQYiORrB4CryOc7oYYUI/RJ6SIYjKDOE5rKxUi2AvJqcsQiUZqgGYCv+ZE1
eaiCIDxWVVMmJjrmkJIInKR0To/Z179MUsNa5asqjELYece9usWySSrGLoyHZ4Q8e74yeMTSi3Mj
4YXIw+mFWB2PywcMi8HJ1bg+mV3m6iOjiTwON+WogZfDlGdUbgSKivSsapLdihF3aCCHMKiNuxWr
UukLUkGv8cHaVeUIMR1kMFC9iAtKQzWAcQMKdzD89IZRfHalfkdup5QEMZkDQBsJvHkrmZkA+9xT
O0dszICRgGD3tOaabQzW6MIqt3FsaApOxnsYt4o2jsH6ThgmZ9q6iVoVe7VeMX0jarCGhcPniinI
ZLGFvnTUZa5Zg/Jqq6NWxiVn18TDFI54CxZDCw0RH54CRi+XKlTZZ6FxUb+ALu3XtbHyU2K9vU6b
SfpnnR4anpth3HKqDBQuFsudZBKdHe25XDpghgzomxN0EjcM/UmNjGfKmBHBnUOmJ1Em8jtPTPD2
9cN8ZNJgenM4GF2iW59sDANXV89cjVOzrPMDbrJWxh5TGSsnG7u9DLsKYonzW6J5LaU3O7OiwJLG
KTS23iw51nDtsZ7NcmZtPAdCdsh6+Zxkq1IiJmc8RgsO+pz8NuOwyz5mNs6GnnDuHx29FKcZjv07
cWibpoj8CJjZFAEjDJ1GIqEQgObwfN3YIzdwFrzgF+S+xbvaajWFmd+YwCp5HYT+ywa9N/pXvfPw
oPp5wRnqouszT0s/soMcYzONCWDn0U+pZacV+7CSmcSlWWwjUbEJyhnhp/YIBUvLrtI1ppTG1IcM
K4FHgUVFI8eEm2UIUX1QdYXr4po7w/dbpwgPwBt8tzZt2yhbnJBHcfFiUxsw0cbHx8GQs7W80g76
KDAsGLBTGCuKTteVZMJJTLwULF2TSCgSHI7iQBIFYqXKCxcEKXXuJkjvOGJmC9dCPx2gaMUmmMeu
PFDniSkRr/LYQsoMQ2Fi/16ESHWCmeye/iT1rS8tc9wXkFBkNDLAO4TAXGbeGQSXdjkPpSQDizmB
T4tB1Cg7wE8gLbkZbmPd1Eym85AltLphkgKxoQTLE5WDksOYYrZi4cNLEhITQVCy5jApCdiuIQnA
iqKDce6Mg4oOxHvStJIcGYVOwIqKVxyYTDGFUVEV5SYqjgPHoyHQezGQ3EuBoO7wElrLnUwmO2ij
YYyEAORQnOZeevjBK4ah8jIsrTN5Qkbh3GhuLB0mrE9vdnhvNCvM3CraAyMFBzdNYzkXQTOdxMzs
IJDYKL3FRLaVmTnnJTFbgFbdCmFcWG44k8sCeBrNfM16hljbgojQvzNWTwKz5I1GljZYsaFBmwkd
HaHCpdTZaJJA4hGzEgobd5mWobZSHfsNgqGqpIqWPKpYgPx2guFnXS/N+/bunBQyBKe0cHvBVMsr
rziSL62fTpebSf8Onjs2Ku0yLHyAuEaum9IhAtwz1jnoCTXroxByKOxWWCvMMQlfJzKKNhgUDDD2
ByoxxLoR3IiW+UZczq3MP0KORw/6sBTNPTY2wl61vk2Vi6X0M4jp3MNkCZEmuaDT1nSoHlZ4/Ijc
u4NRkfnYB39HWY9JNHZWwpmRINjXtWcS02jMqm9+QaHewxSrPKEYoIvOHEft58tuk6yz5whUqioq
nix4fideyVv+cULXs4btX0+Nd+pf7vU4S/CPDv5vOuvma7kz/XjMCfdFK1+c9jyyYpSiQTGL6Shu
4WuWPD/Nfpu9s5gPS1s2HJlbzs6G5ILMouP4yUtsHh5cN6/tUr8TnWQZa/5VO21m7znslEF31w9H
OYc585VY/hwUlVR6t9K1SKau/if4IQyKnX2dzKlnqmVBh3MaaY6qVUFUVdJpOwhBPBLKJ6yckkKq
DE22m147GkGgslh7++22y3mpZsrNMeb2EDTTYxuSbU2UYYMItgGwxwCWdL7xWdLmQsyeK0wScqNp
jJsLGUkwuWZIy0DYUUxlZyq0LTkNMYNtejymEHiIbD7A7CwNBwMBrFoQ/AwLv3D7IgIZDSMBALHF
xPPILgkNw2Z0tQJt04woHx3gwpc6mZBOKh1TygjU4RuWtkcLQ7832WHW0UUhX/r+9S+CIWN1kLpw
wS9tlKocyPdCcIvKF6RoE8L7Vh3QrNNn4Tsjo96WM+VwEOBnuQtQ0MNGQ5jqzeks1ULmxptCK6d9
y2JxqgZBQUFQcfjNdIQCA58jVArnOXBgdjrXI+jQwOZkWO6yDzaLIVcgl+1kkjtpneFEomQGPRoJ
EaGw1G9fH5TbbUfodp0fdqNiGcOaFCP/kJrBbTMxg7jvU45PzMb2dHzZ7C9w0r2hyHCRTNXAk1Cv
e6v2vkni/NWp0ycWc5QeYiQSwzjIZH0MxMyCjsXNGREClxnMc2e0JEIY4UE2xmqVQMicG+wy7XFa
5TCDUbWDYdzPYfQdt6KCsQr7AmZJqZOAzA2o9gpaGh4DWH10WLP9xLfuuefcZBq1Bqa/YyGKoloN
iDuw8vxXp6aeruqVZ9So4J/bLie+DCjFgKL4kyRVSEMlaM1qG6WL3eBg2EsbzVUoHNlmyvSomeFR
ljloDTeWVg5vHJVcN4NFgzjXJapXORQhEyJrJdp5eXqPA7z0PL5i/xfxgg7AV63bU1ruJOwbDrPs
6H0DJcACiXHqvGUuDEo07zXLpfXYlilPqQIq9zzC0iH1kmZ6bs7npe10VQaCcyVyqbIDm7EYVE+m
uxjaNNfIMEK5+sgWBquzvu1o0Ekf1Xm9MMPX7Jbcukj5ZcxhevPasOkD9u9ClYps5kJ0xEKSEfoN
JNiXUNZrXzjk9ZHZQoJ5wVKTXBZn53aqGmsf5tKewHUbJGol7+q0tUqC2QyLiRJDkja5NaElF6JX
KX8LHFzISR9u0AP6cKCV6WxoOYMXWNJdIwSVC+ELo5dgzYgqwFju25JG98YFDPg5ScEQQ2OiEj7/
ae0Z5FxzKFDWayKYUwBqL9jqmR5uhGF4/k7/UsUEc61TAH9I0Q7m6V+JAbWbNUrQ4Nd55DsmHK6e
2EC47NM+iFhdCaLrrERLZ47fpLzFJe+ndI3HrLJErjWDJvQxqJoXx1tBniDzZwDnZjmQT1cHd2gc
DXutNojINCaZEEU0msZwgWA+JUERk2w+9gNiIjYwzA6uxtRhepmnMCyRZpiTqgKpKhQJ8/bAPdd/
XgRAyJR+e0Ehuil7qE0IJN1TJz7RHYNIm1taS53Aue9FVqJeHymSj2Yx7CB+gc0Lg20Yeei9EHYw
S7got2KS6ih8RBKXR1SWnDLly1yCbM2ZA+BY3Zxw/KakHo16NM9INjN4nbPUxoYNJjAY0kDYf1+v
cb+5bjcYJjabBg21x/f/d6gwASsXBJ+DI6IvZNkTPBoJc2enYZDXbuQlQLR4myFn9IK6gYF+bCdw
+TNHxDsj58P58UuoxYsmBC72CxtXWD6oWnzwddCGhTNiKP65F+WJkzEMRFlHMgSbSVyUsq6MKigN
pkhI4xXiyJrum2BqHoGt68MFvhJTLBGBznt024TL4eQkfeH3V+uQff4erYXgHbuZSg2Wq+I4pNc1
Q7uTGYgSel5+IoLBzS9oSl8VmsNBVjD0SiysCaEtplhjoITRqCGFCokhOISIQdxpAczFVjMiySr8
2YV+uTAFoiCKxH+kIVUhsL50IRwZleBTijiJUIkgq0i1kKUg+CE/xn14CVgvEmkRcGldjCrSKPeL
RWRWTyolBRZ2qMYO0bEK0YQCg+vw710ksLn5vBxOlEicDfbJUsqtCDB+khkIWtHOwy0JQR4Fd1Rp
Z0DTbQDAN0KSs2XEJGLQgmSMUj20Qq1BFKo+eyXDpgJlzLtgtbCkL0kQgxuINBimlfcfs2BRJCaE
ovyYDYwma8BynfX5GlCW9aedn1NEVvT4xLFlt3Elem8VTZs1iZ/ceU2Ug7OgKkHh9BBVjjuCWQtn
lOjDxi2EFmUL7vj7ElXmRi0JXluBnuD7NqqIwT37x3mBbXru8pNA+/6gWsCs/zsvRkmPEZED3URt
h/jsqkjkG4EU5Ig673KkrJPXFvq1Q+DsqOFSTRXv8N1ioMw4+JeeDqvHNIlU7mpiKqzspIONg56H
rQdntIK7e85HtfSuwiQe3RvVu1COopXYUDwyDygoGBwZnfO7mtxNNDZ5iXA3gz7XanHrCtPmF+4J
pS5JbuB4kLrYf0YfzkHkCNDUigalhj4szTQd5CgBts/iV+mYg3MEswaCp53mNJ+uipM5FKT7ikOj
oWBzVKSLIRFxW735jHBXVw4xcuMMcSVF1ISorZK6l6JmQ3AaeThwiUQDepkQKVeOhLE6YcBMc8CL
t5IVcEwI727NRUCjaMjA1aGqGp+exEaQdKZNu+pd6hL3fMFDxGE3Nni0nE5GnhazxdA5ZzqHv2Bs
C5Bm/OlESfuGoOnK8A3tIGMBsBpth2oS1XLOfEWrtxW7hwwLBwQ2IfNxY0GQEhUgyalmcEmGrOk6
17Kl5JnZ3JjDUWAuc5A1wloxdtZ3AmzeGc4i02lPB9bC0w7NmS7K/qQ0AtQJdgzIzTgdLSGgbgGz
p6BWWUDGFBggyhTExrNElIGswvJJNC2kiq2cjjLHIsgBlr2LAaJbouB5nt5wa08Kauv4OOlAxXDR
DaTLkmg2MQGqztIjrUCUajZI6JevgaG5Mis/2gsCLM3ajlqh+1A17KqvVlFk7adLexC0Kao2MYhi
bQomiDaV82ytSXS4ihidPS7MbGMRjUmhbYFTiJQGiSgDBU3vjeH6X/y89e8PLVIwOdBVW+QhlbEH
Amk/qDfeiV1qrQSOW9bZ/cDWIMV2fBBixEK9HFruaSIUIWHf1dmNZjA0CXttIMFpBptCh3+8PXS8
k0lQGDENiNsHyMR7pqpLtb/jjg1ZMEG/o4F1+GU+TGLWeQ5eQbiYHcvEUJ2YCKKjrNpaSomD9MWC
pLtmE3i052WonNeDJVcE/8XckU4UJD0pmKuA


More information about the bazaar mailing list