[MERGE] Add an optional 'token' keyword argument to LockableFiles.lock_write

Andrew Bennetts andrew at canonical.com
Mon Feb 12 05:02:26 GMT 2007


Andrew Bennetts wrote:
> This adds the low-level support the branch and repository lock sharing that is
[...]

Ahem.  And here is the bundle.

-Andrew.

-------------- next part --------------
# Bazaar revision bundle v0.8
#
# message:
#   ``LockableFiles.lock_write()`` now accepts a ``token`` keyword argument, so that
#   a seperate LockableFiles instance can share a lock if it has the right token.
#   (Andrew Bennetts, Robert Collins)
#   
# committer: Andrew Bennetts <andrew.bennetts at canonical.com>
# date: Mon 2007-02-12 15:49:32.362999916 +1100

=== modified file NEWS
--- NEWS
+++ NEWS
@@ -77,6 +77,10 @@
       you pass a Unicode string rather than an 8-bit string. Callers need
       to be updated to encode first. (John Arbash Meinel)
 
+    * ``LockableFiles.lock_write()`` now accepts a ``token`` keyword argument,
+      so that a seperate LockableFiles instance can share a lock if it has the
+      right token.  (Andrew Bennetts, Robert Collins)
+
   BUGFIXES:
 
     * ``bzr annotate`` now uses dotted revnos from the viewpoint of the

=== modified file bzrlib/errors.py
--- bzrlib/errors.py
+++ bzrlib/errors.py
@@ -744,6 +744,27 @@
         self.lock = lock
 
 
+class TokenLockingNotSupported(LockError):
+
+    _fmt = "The object %(obj)s does not support token specifying a token when locking."
+
+    internal_error = True
+
+    def __init__(self, obj):
+        self.obj = obj
+
+
+class TokenMismatch(LockError):
+
+    _fmt = "The lock token %(given_token)r does not match lock token %(lock_token)r."
+
+    internal_error = True
+
+    def __init__(self, given_token, lock_token):
+        self.given_token = given_token
+        self.lock_token = lock_token
+
+
 class PointlessCommit(BzrError):
 
     _fmt = "No changes to commit"

=== modified file bzrlib/lockable_files.py
--- bzrlib/lockable_files.py
+++ bzrlib/lockable_files.py
@@ -212,21 +212,33 @@
             raise errors.BzrBadParameterNotString(a_string)
         self.put(path, StringIO(a_string.encode('utf-8')))
 
-    def lock_write(self):
+    def lock_write(self, token=None):
+        """Lock this group of files for writing.
+        
+        :param token: if this is already locked, then lock_write will fail
+            unless the token matches the existing lock.
+        :returns: a token if this instance supports tokens, otherwise None.
+        :raises TokenLockingNotSupported: when a token is given but this
+            instance doesn't support using token locks.
+        :raises MismatchedToken: if the specified token doesn't match the token
+            of the existing lock.
+        """
         # mutter("lock write: %s (%s)", self, self._lock_count)
         # TODO: Upgrade locking to support using a Transport,
         # and potentially a remote locking protocol
         if self._lock_mode:
             if self._lock_mode != 'w' or not self.get_transaction().writeable():
                 raise errors.ReadOnlyError(self)
+            self._lock.validate_token(token)
             self._lock_count += 1
         else:
-            self._lock.lock_write()
+            token_from_lock = self._lock.lock_write(token=token)
             #note('write locking %s', self)
             #traceback.print_stack()
             self._lock_mode = 'w'
             self._lock_count = 1
             self._set_transaction(transactions.WriteTransaction())
+            return token_from_lock
 
     def lock_read(self):
         # mutter("lock read: %s (%s)", self, self._lock_count)
@@ -323,7 +335,9 @@
     def break_lock(self):
         raise NotImplementedError(self.break_lock)
 
-    def lock_write(self):
+    def lock_write(self, token=None):
+        if token is not None:
+            raise errors.TokenLockingNotSupported(self)
         self._lock = self._transport.lock_write(self._escaped_name)
 
     def lock_read(self):
@@ -341,3 +355,8 @@
         # for old-style locks, create the file now
         self._transport.put_bytes(self._escaped_name, '',
                             mode=self._file_modebits)
+
+    def validate_token(self, token):
+        if token is not None:
+            raise errors.TokenLockingNotSupported(self)
+        

=== modified file bzrlib/lockdir.py
--- bzrlib/lockdir.py
+++ bzrlib/lockdir.py
@@ -160,6 +160,7 @@
         self.transport = transport
         self.path = path
         self._lock_held = False
+        self._locked_via_token = False
         self._fake_read_lock = False
         self._held_dir = path + '/held'
         self._held_info_path = self._held_dir + self.__INFO_NAME
@@ -234,15 +235,19 @@
             return
         if not self._lock_held:
             raise LockNotHeld(self)
-        # rename before deleting, because we can't atomically remove the whole
-        # tree
-        tmpname = '%s/releasing.%s.tmp' % (self.path, rand_chars(20))
-        # gotta own it to unlock
-        self.confirm()
-        self.transport.rename(self._held_dir, tmpname)
-        self._lock_held = False
-        self.transport.delete(tmpname + self.__INFO_NAME)
-        self.transport.rmdir(tmpname)
+        if self._locked_via_token:
+            self._locked_via_token = False
+            self._lock_held = False
+        else:
+            # rename before deleting, because we can't atomically remove the
+            # whole tree
+            tmpname = '%s/releasing.%s.tmp' % (self.path, rand_chars(20))
+            # gotta own it to unlock
+            self.confirm()
+            self.transport.rename(self._held_dir, tmpname)
+            self._lock_held = False
+            self.transport.delete(tmpname + self.__INFO_NAME)
+            self.transport.rmdir(tmpname)
 
     def break_lock(self):
         """Break a lock not held by this instance of LockDir.
@@ -415,9 +420,26 @@
             else:
                 raise LockContention(self)
 
-    def lock_write(self):
-        """Wait for and acquire the lock."""
-        self.wait_lock()
+    def lock_write(self, token=None):
+        """Wait for and acquire the lock.
+        
+        :param token: if this is already locked, then lock_write will fail
+            unless the token matches the existing lock.
+        :returns: a token if this instance supports tokens, otherwise None.
+        :raises TokenLockingNotSupported: when a token is given but this
+            instance doesn't support using token locks.
+        :raises MismatchedToken: if the specified token doesn't match the token
+            of the existing lock.
+         
+        XXX: docstring duplicated from LockableFiles.lock_write.
+        """
+        if token is not None:
+            self.validate_token(token)
+            self._lock_held = True
+            self._locked_via_token = True
+        else:
+            self.wait_lock()
+            return self.peek().get('nonce')
 
     def lock_read(self):
         """Compatibility-mode shared lock.
@@ -457,3 +479,14 @@
             'locked %s' % (format_delta(delta),),
             ]
 
+    def validate_token(self, token):
+        if token is not None:
+            info = self.peek()
+            if info is None:
+                # Lock isn't held
+                lock_token = None
+            else:
+                lock_token = info.get('nonce')
+            if token != lock_token:
+                raise errors.TokenMismatch(token, lock_token)
+

=== modified file bzrlib/tests/test_lockable_files.py
--- bzrlib/tests/test_lockable_files.py
+++ bzrlib/tests/test_lockable_files.py
@@ -21,6 +21,7 @@
 import bzrlib.errors as errors
 from bzrlib.errors import BzrBadParameterNotString, NoSuchFile, ReadOnlyError
 from bzrlib.lockable_files import LockableFiles, TransportLock
+from bzrlib import lockdir
 from bzrlib.lockdir import LockDir
 from bzrlib.tests import TestCaseInTempDir
 from bzrlib.tests.test_transactions import DummyWeave
@@ -34,6 +35,15 @@
 # these tests are applied in each parameterized suite for LockableFiles
 class _TestLockableFiles_mixin(object):
 
+    def setUp(self):
+        # Reduce the default timeout, so that if tests fail, they will do so
+        # reasonably quickly.
+        orig_timeout = lockdir._DEFAULT_TIMEOUT_SECONDS
+        def resetTimeout():
+            lockdir._DEFAULT_TIMEOUT_SECONDS = orig_timeout
+        self.addCleanup(resetTimeout)
+        lockdir._DEFAULT_TIMEOUT_SECONDS = 3
+
     def test_read_write(self):
         self.assertRaises(NoSuchFile, self.lockable.get, 'foo')
         self.assertRaises(NoSuchFile, self.lockable.get_utf8, 'foo')
@@ -122,6 +132,116 @@
             self.assertRaises(errors.LockBroken, self.lockable.unlock)
             self.assertFalse(self.lockable.is_locked())
 
+    def test_lock_write_returns_None_refuses_token(self):
+        token = self.lockable.lock_write()
+        try:
+            if token is not None:
+                # This test does not apply, because this lockable supports
+                # tokens.
+                return
+            self.assertRaises(errors.TokenLockingNotSupported,
+                              self.lockable.lock_write, token='token')
+        finally:
+            self.lockable.unlock()
+
+    def test_lock_write_raises_on_token_mismatch(self):
+        token = self.lockable.lock_write()
+        try:
+            if token is None:
+                # This test does not apply, because this lockable refuses
+                # tokens.
+                return
+            different_token = token + 'xxx'
+            # Re-using the same lockable instance with a different token will
+            # raise TokenMismatch.
+            self.assertRaises(errors.TokenMismatch,
+                              self.lockable.lock_write, token=different_token)
+            # A seperate instance for the same lockable will also raise
+            # TokenMismatch.
+            # This detects the case where a caller claims to have a lock (via
+            # the token) for an external resource, but doesn't (the token is
+            # different).  Clients need a seperate lock object to make sure the
+            # external resource is probed, whereas the existing lock object
+            # might cache.
+            new_lockable = self.get_lockable()
+            self.assertRaises(errors.TokenMismatch,
+                              new_lockable.lock_write, token=different_token)
+        finally:
+            self.lockable.unlock()
+
+    def test_lock_write_with_matching_token(self):
+        # If the token matches, so no exception is raised by lock_write.
+        token = self.lockable.lock_write()
+        try:
+            if token is None:
+                # This test does not apply, because this lockable refuses
+                # tokens.
+                return
+            # The same instance will accept a second lock_write if the specified
+            # token matches.
+            self.lockable.lock_write(token=token)
+            self.lockable.unlock()
+            # Calling lock_write on a new instance for the same lockable will
+            # also succeed.
+            new_lockable = self.get_lockable()
+            new_lockable.lock_write(token=token)
+            new_lockable.unlock()
+        finally:
+            self.lockable.unlock()
+
+    def test_unlock_after_lock_write_with_token(self):
+        # If lock_write did not physically acquire the lock (because it was
+        # passed a token), then unlock should not physically release it.
+        token = self.lockable.lock_write()
+        try:
+            if token is None:
+                # This test does not apply, because this lockable refuses
+                # tokens.
+                return
+            new_lockable = self.get_lockable()
+            new_lockable.lock_write(token=token)
+            new_lockable.unlock()
+            self.assertTrue(self.lockable.get_physical_lock_status())
+        finally:
+            self.lockable.unlock()
+
+    def test_lock_write_with_token_fails_when_unlocked(self):
+        # Lock and unlock to get a superficially valid token.  This mimics a
+        # likely programming error, where a caller accidentally tries to lock
+        # with a token that is no longer valid (because the original lock was
+        # released).
+        token = self.lockable.lock_write()
+        self.lockable.unlock()
+        if token is None:
+            # This test does not apply, because this lockable refuses
+            # tokens.
+            return
+
+        self.assertRaises(errors.TokenMismatch,
+                          self.lockable.lock_write, token=token)
+
+    def test_lock_write_reenter_with_token(self):
+        token = self.lockable.lock_write()
+        try:
+            if token is None:
+                # This test does not apply, because this lockable refuses
+                # tokens.
+                return
+            # Relock with a token.
+            self.lockable.lock_write(token=token)
+            self.lockable.unlock()
+        finally:
+            self.lockable.unlock()
+        # The lock should be unlocked on disk.  Verify that with a new lock
+        # instance.
+        new_lockable = self.get_lockable()
+        # Calling lock_write now should work, rather than raise LockContention.
+        new_lockable.lock_write()
+        new_lockable.unlock()
+
+
+
+
 
 # This method of adapting tests to parameters is different to 
 # the TestProviderAdapters used elsewhere, but seems simpler for this 
@@ -130,7 +250,8 @@
                                       _TestLockableFiles_mixin):
 
     def setUp(self):
-        super(TestLockableFiles_TransportLock, self).setUp()
+        TestCaseInTempDir.setUp(self)
+        _TestLockableFiles_mixin.setUp(self)
         transport = get_transport('.')
         transport.mkdir('.bzr')
         self.sub_transport = transport.clone('.bzr')
@@ -152,7 +273,8 @@
     """LockableFile tests run with LockDir underneath"""
 
     def setUp(self):
-        super(TestLockableFiles_LockDir, self).setUp()
+        TestCaseInTempDir.setUp(self)
+        _TestLockableFiles_mixin.setUp(self)
         self.transport = get_transport('.')
         self.lockable = self.get_lockable()
         # the lock creation here sets mode - test_permissions on branch 

=== modified directory  // last-changed:andrew.bennetts at canonical.com-200702120
... 44932-k9keo85c0s9gg5wv
# revision id: andrew.bennetts at canonical.com-20070212044932-k9keo85c0s9gg5wv
# sha1: 220cf0a68dd81eca2b05ecbb804eca5288915b87
# inventory sha1: 47f91a0e4f6e74fc7d535953c5a4136a792f5915
# parent ids:
#   pqm at pqm.ubuntu.com-20070209195330-312ec52588462782
# base id: pqm at pqm.ubuntu.com-20070209195330-312ec52588462782
# properties:
#   branch-nick: bzr.dev.hpss.api.changes



More information about the bazaar mailing list