Rev 24: pass a Logger to the FileLockingInfo structure. in lp:~jameinel/+junk/file_locking

John Arbash Meinel john at arbash-meinel.com
Mon Sep 21 21:56:53 BST 2009


At lp:~jameinel/+junk/file_locking

------------------------------------------------------------
revno: 24
revision-id: john at arbash-meinel.com-20090921205645-90miwm287elvhgvg
parent: john at arbash-meinel.com-20090921202534-ayf0xttklwoo5jcv
committer: John Arbash Meinel <john at arbash-meinel.com>
branch nick: file_locking
timestamp: Mon 2009-09-21 15:56:45 -0500
message:
  pass a Logger to the FileLockingInfo structure.
  
  This allows us to make sure that the log gets updated when actions occur.
  We also now stage changes to a memory buffer until we actually flush the log.
  The idea is that we can just make sure to call flush before we upload the
  new info structure.
-------------- next part --------------
=== modified file 'file_lock.py'
--- a/file_lock.py	2009-09-21 20:25:34 +0000
+++ b/file_lock.py	2009-09-21 20:56:45 +0000
@@ -83,7 +83,7 @@
 
     def __init__(self, wt):
         """Track what locks we currently hold for the given tree.
-        
+
         :param wt: A WorkingTree
         """
         self._wt = wt
@@ -114,7 +114,7 @@
 
     def remove_lock_id(self, lock_id):
         """Claim that we are no longer holding a lock id.
-        
+
         This will raise an error if that lock is not claimed to be currently
         held.
         """
@@ -137,7 +137,7 @@
 
     _HEADER = '# bzr-file-locking locking info v1'
 
-    def from_bytes(self, bytes):
+    def from_bytes(self, bytes, logger=None):
         expected_header = self._HEADER + '\n'
         if not bytes.startswith(expected_header):
             raise ValueError('Content has invalid header.\n'
@@ -151,7 +151,7 @@
             tracked_ids = []
         else:
             tracked_ids = stanza.get_all('t')
-        return FileLockingInfo(tracked_ids, [])
+        return FileLockingInfo(tracked_ids, [], logger)
 
     def to_bytes(self, locking_info):
         content = [self._HEADER, '\n']
@@ -175,14 +175,18 @@
 
 class FileLockingInfo(object):
     """Track information about file locks for this tree.
-    
+
     :ivar _lock_store_url: The location where actual locks are tracked
     :ivar _tracked_ids: The actual locations where content is tracked
     """
 
-    def __init__(self, tracked_ids, active_locks):
+    def __init__(self, tracked_ids, active_locks, logger=None):
         self._tracked_ids = set(tracked_ids)
         self._active_locks = active_locks
+        if logger is None:
+            self._logger = _null_logger
+        else:
+            self._logger = logger
 
     def create_lock(self, file_id):
         # Mostly the same info that is generated for bzr LockDir info files
@@ -192,19 +196,21 @@
             raise KeyError('{%s} is not tracked' % (file_id,))
         global_conf = _mod_config.GlobalConfig()
         nonce = osutils.rand_chars(20)
-        s = rio.Stanza(host=osutils.get_host_name(),
+        s = rio.Stanza(file_id=file_id,
+                   host=osutils.get_host_name(),
+                   nonce=nonce,
                    pid=str(os.getpid()),
                    time=str(int(time.time())),
-                   nonce=nonce,
                    user=global_conf.username(),
-                   file_id=file_id,
                    )
+        self._logger.lock_created(s)
         self._active_locks[file_id] = s
         return s
 
     def remove_lock(self, file_id):
         """Remove the given lock from the 'active' set."""
-        del self._active_locks[file_id]
+        info = self._active_locks.pop(file_id)
+        self._logger.lock_removed(info)
 
     def track(self, file_id):
         """Include this file-id in the set of objects that can be locked."""
@@ -225,6 +231,7 @@
         """Create a new Logger."""
         self._transport = transport
         self._filename = filename
+        self._pending = []
 
     @classmethod
     def initialize(cls, transport, filename):
@@ -240,7 +247,61 @@
         stanza.add('host', osutils.get_host_name())
         transport.put_bytes_non_atomic('log', stanza.to_string())
         return cls(transport, filename=filename)
-        
+
+    def _log_stanza(self, stanza):
+        """Append the stanza to the log file."""
+        self._pending.append('\n')
+        self._pending.extend(stanza.to_lines())
+
+    def flush(self):
+        if self._pending:
+            pending = self._pending
+            self._pending = []
+            self._transport.append_bytes(self._filename, ''.join(pending))
+
+    def lock_created(self, info):
+        """A lock was created, log this info."""
+        s = rio.Stanza(action='lock created')
+        s.add('date', osutils.format_date(int(info['time']),
+            offset=osutils.local_time_offset()))
+        for key, value in info.iter_pairs():
+            s.add(key, value)
+        self._log_stanza(s)
+
+    def lock_removed(self, info):
+        global_conf = _mod_config.GlobalConfig()
+        s = rio.Stanza(action='lock removed')
+        t = int(time.time())
+        s.add('time', str(t))
+        s.add('date', osutils.format_date(t,
+            offset=osutils.local_time_offset()))
+        s.add('user', global_conf.username())
+        s.add('host', osutils.get_host_name())
+        s.add('file_id', info['file_id'])
+        s.add('nonce', info['nonce'])
+        self._log_stanza(s)
+
+
+class NullFileLockLogger(FileLockLogger):
+    """For testing purposes only.
+
+    Skips any actual writing to disk.
+    """
+
+    def __init__(self):
+        """Init is overridden since we don't need transport, etc."""
+        super(NullFileLockLogger, self).__init__(transport=None,
+                                                 filename='<null>')
+
+    def _log_stanza(self, stanza):
+        pass
+
+    def flush(self):
+        if self._pending:
+            self._pending = []
+
+_null_logger = NullFileLockLogger()
+
 
 class FileLockStore(object):
     """This is the actual location where file locks are tracked.
@@ -277,14 +338,14 @@
             transport.put_bytes('README.txt',
                 'This is a Bazaar File Locking Lock Store\n'
                 'You should not need to modify any files in this directory.\n')
-            locking_info = FileLockingInfo([], [])
-            s = FileLockingInfoSerializer()
-            bytes = s.to_bytes(locking_info)
+            logger = FileLockLogger.initialize(transport, 'log')
+            locking_info = FileLockingInfo([], [], logger)
             # Instead of having a separate 'format' file, we put the data into
             # the 'info' file. Which means that we have 1 less object to read
             # from remote.
+            s = FileLockingInfoSerializer()
+            bytes = s.to_bytes(locking_info)
             transport.put_bytes_non_atomic('info', bytes)
-            logger = FileLockLogger.initialize(transport, 'log')
             store = cls(transport, locking_info, lock, logger)
         except:
             lock.unlock()
@@ -297,10 +358,10 @@
         # TODO: This will raise NoSuchFile if the target doesn't exist.
         #       Create a Custom Exception
         info_bytes = transport.get_bytes('info')
+        lock = lockdir.LockDir(transport, 'lock')
+        logger = FileLockLogger(transport, 'log')
         s = FileLockingInfoSerializer()
-        locking_info = s.from_bytes(info_bytes)
-        lock = lockdir.LockDir(transport, 'lock')
-        logger = FileLockLogger(transport, 'log')
+        locking_info = s.from_bytes(info_bytes, logger)
         return cls(transport, locking_info, lock, logger)
 
     def unlock(self):
@@ -344,7 +405,7 @@
 
     def is_locked(self, path):
         """Is the given path already locked?
-        
+
         Note that if the given path isn't marked as tracked, we don't even
         check to see if there is a lock.
         """

=== modified file 'tests/test_file_lock.py'
--- a/tests/test_file_lock.py	2009-09-21 20:25:34 +0000
+++ b/tests/test_file_lock.py	2009-09-21 20:56:45 +0000
@@ -346,3 +346,54 @@
         self.assertIsInstance(fls, file_lock.FileLockStore)
         self.assertEqual(trans, fls._transport)
         self.assertIsInstance(fls._locking_info, file_lock.FileLockingInfo)
+
+
+class TestFileLockLogger(tests.TestCaseWithMemoryTransport):
+
+    def make_logger(self, path='log'):
+        trans = self.get_transport('')
+        return file_lock.FileLockLogger.initialize(trans, path)
+
+    def test_init(self):
+        trans = self.get_transport('')
+        logger = self.make_logger()
+        self.assertContainsRe(trans.get_bytes('log'),
+            'action: Lock Store created\n'
+            'time: \\d+\n'
+            'date: .*\n'
+            'user: .*\n'
+            'host: .*\n', flags=re.MULTILINE)
+
+    def test_lock_created(self):
+        logger = self.make_logger()
+        li = file_lock.FileLockingInfo(['file-id-1', 'file-id-2'], {}, logger)
+        info = li.create_lock('file-id-1')
+        logger.flush()
+        self.assertContainsRe(logger._transport.get_bytes(logger._filename),
+            '\n'# end of previous action
+            '\n'# extra separator line
+            'action: lock created\n'
+            'date: .*\n'
+            'file_id: file-id-1\n'
+            'host: .*\n'
+            'nonce: .*\n'
+            '.*\n', flags=re.MULTILINE)
+
+    def test_lock_removed(self):
+        logger = self.make_logger()
+        li = file_lock.FileLockingInfo(['file-id-1', 'file-id-2'], {},
+                                       logger)
+        info = li.create_lock('file-id-1')
+        li.remove_lock('file-id-1')
+        logger.flush()
+        self.assertContainsRe(logger._transport.get_bytes(logger._filename),
+            '\n'# end of previous action
+            '\n'# extra separator line
+            'action: lock removed\n'
+            'time: \\d+\n'
+            'date: .*\n'
+            'user: .*\n' #User that unlocked it
+            'host: .*\n'
+            'file_id: .*\n'
+            'nonce: .*\n',
+            flags=re.MULTILINE)



More information about the bazaar-commits mailing list