Rev 3536: cleanup a few imports to be lazily loaded. in http://bzr.arbash-meinel.com/branches/bzr/1.6-dev/lean_imports
John Arbash Meinel
john at arbash-meinel.com
Wed Jul 9 20:57:39 BST 2008
At http://bzr.arbash-meinel.com/branches/bzr/1.6-dev/lean_imports
------------------------------------------------------------
revno: 3536
revision-id: john at arbash-meinel.com-20080709195736-s9cg26gnym3lf2d0
parent: pqm at pqm.ubuntu.com-20080709135859-wq3r1d1fjcafelgw
committer: John Arbash Meinel <john at arbash-meinel.com>
branch nick: lean_imports
timestamp: Wed 2008-07-09 14:57:36 -0500
message:
cleanup a few imports to be lazily loaded.
modified:
bzrlib/knit.py knit.py-20051212171256-f056ac8f0fbe1bd9
bzrlib/lockable_files.py control_files.py-20051111201905-bb88546e799d669f
bzrlib/log.py log.py-20050505065812-c40ce11702fe5fb1
bzrlib/lsprof.py lsprof.py-20051208071030-833790916798ceed
-------------- next part --------------
=== modified file 'bzrlib/knit.py'
--- a/bzrlib/knit.py 2008-07-07 08:34:51 +0000
+++ b/bzrlib/knit.py 2008-07-09 19:57:36 +0000
@@ -64,33 +64,27 @@
from itertools import izip, chain
import operator
import os
-import urllib
-import sys
-import warnings
-from zlib import Z_DEFAULT_COMPRESSION
-import bzrlib
from bzrlib.lazy_import import lazy_import
lazy_import(globals(), """
from bzrlib import (
annotate,
+ debug,
+ diff,
graph as _mod_graph,
index as _mod_index,
lru_cache,
pack,
+ progress,
trace,
+ tsort,
+ tuned_gzip,
)
""")
from bzrlib import (
- cache_utf8,
- debug,
- diff,
errors,
osutils,
patiencediff,
- progress,
- merge,
- ui,
)
from bzrlib.errors import (
FileExists,
@@ -102,7 +96,6 @@
RevisionNotPresent,
RevisionAlreadyPresent,
)
-from bzrlib.graph import Graph
from bzrlib.osutils import (
contains_whitespace,
contains_linebreaks,
@@ -110,9 +103,6 @@
sha_strings,
split_lines,
)
-from bzrlib.tsort import topo_sort
-from bzrlib.tuned_gzip import GzipFile, bytes_to_gzip
-import bzrlib.ui
from bzrlib.versionedfile import (
AbsentContentFactory,
adapter_registry,
@@ -122,7 +112,6 @@
VersionedFile,
VersionedFiles,
)
-import bzrlib.weave
# TODO: Split out code specific to this format into an associated object.
@@ -1181,7 +1170,7 @@
global_map, parent_maps = self._get_parent_map_with_sources(keys)
if ordering == 'topological':
# Global topological sort
- present_keys = topo_sort(global_map)
+ present_keys = tsort.topo_sort(global_map)
# Now group by source:
source_keys = []
current_source = None
@@ -1495,7 +1484,7 @@
:return: the header and the decompressor stream.
as (stream, header_record)
"""
- df = GzipFile(mode='rb', fileobj=StringIO(raw_data))
+ df = tuned_gzip.GzipFile(mode='rb', fileobj=StringIO(raw_data))
try:
# Current serialise
rec = self._check_header(key, df.readline())
@@ -1510,7 +1499,7 @@
# 4168 calls in 2880 217 internal
# 4168 calls to _parse_record_header in 2121
# 4168 calls to readlines in 330
- df = GzipFile(mode='rb', fileobj=StringIO(data))
+ df = tuned_gzip.GzipFile(mode='rb', fileobj=StringIO(data))
try:
record_contents = df.readlines()
except Exception, e:
@@ -1611,7 +1600,7 @@
'data must be plain bytes was %s' % type(bytes))
if lines and lines[-1][-1] != '\n':
raise ValueError('corrupt lines value %r' % lines)
- compressed_bytes = bytes_to_gzip(bytes)
+ compressed_bytes = tuned_gzip.bytes_to_gzip(bytes)
return len(compressed_bytes), compressed_bytes
def _split_header(self, line):
@@ -2698,7 +2687,7 @@
# TODO: this code generates a parent maps of present ancestors; it
# could be split out into a separate method, and probably should use
# iter_ancestry instead. -- mbp and robertc 20080704
- graph = Graph(self._knit)
+ graph = _mod_graph.Graph(self._knit)
head_cache = _mod_graph.FrozenHeadsCache(graph)
search = graph._make_breadth_first_searcher([key])
keys = set()
=== modified file 'bzrlib/lockable_files.py'
--- a/bzrlib/lockable_files.py 2008-05-22 05:48:22 +0000
+++ b/bzrlib/lockable_files.py 2008-07-09 19:57:36 +0000
@@ -15,23 +15,28 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
from cStringIO import StringIO
+
+from bzrlib.lazy_import import lazy_import
+lazy_import(globals(), """
import codecs
-#import traceback
-from warnings import warn
-
-import bzrlib
-from bzrlib.decorators import (needs_read_lock,
- needs_write_lock)
-import bzrlib.errors as errors
-from bzrlib.errors import BzrError
-from bzrlib.osutils import file_iterator, safe_unicode
+import warnings
+
+from bzrlib import (
+ errors,
+ osutils,
+ transactions,
+ urlutils,
+ )
+""")
+
+from bzrlib.decorators import (
+ needs_read_lock,
+ needs_write_lock,
+ )
from bzrlib.symbol_versioning import (
deprecated_in,
deprecated_method,
)
-from bzrlib.trace import mutter, note
-import bzrlib.transactions as transactions
-import bzrlib.urlutils as urlutils
# XXX: The tracking here of lock counts and whether the lock is held is
@@ -108,7 +113,7 @@
if self.is_locked():
# do not automatically unlock; there should have been a
# try/finally to unlock this.
- warn("%r was gc'd while locked" % self)
+ warnings.warn("%r was gc'd while locked" % self)
def break_lock(self):
"""Break the lock of this lockable files group if it is held.
@@ -122,7 +127,7 @@
file_or_path = '/'.join(file_or_path)
if file_or_path == '':
return u''
- return urlutils.escape(safe_unicode(file_or_path))
+ return urlutils.escape(osutils.safe_unicode(file_or_path))
def _find_modes(self):
"""Determine the appropriate modes for files and directories.
@@ -241,7 +246,6 @@
some other way, and need to synchronise this object's state with that
fact.
"""
- # mutter("lock write: %s (%s)", self, self._lock_count)
# TODO: Upgrade locking to support using a Transport,
# and potentially a remote locking protocol
if self._lock_mode:
@@ -252,7 +256,6 @@
return self._token_from_lock
else:
token_from_lock = self._lock.lock_write(token=token)
- #note('write locking %s', self)
#traceback.print_stack()
self._lock_mode = 'w'
self._lock_count = 1
@@ -261,14 +264,12 @@
return token_from_lock
def lock_read(self):
- # mutter("lock read: %s (%s)", self, self._lock_count)
if self._lock_mode:
if self._lock_mode not in ('r', 'w'):
raise ValueError("invalid lock mode %r" % (self._lock_mode,))
self._lock_count += 1
else:
self._lock.lock_read()
- #note('read locking %s', self)
#traceback.print_stack()
self._lock_mode = 'r'
self._lock_count = 1
@@ -277,13 +278,11 @@
self.get_transaction().set_cache_size(5000)
def unlock(self):
- # mutter("unlock: %s (%s)", self, self._lock_count)
if not self._lock_mode:
raise errors.LockNotHeld(self)
if self._lock_count > 1:
self._lock_count -= 1
else:
- #note('unlocking %s', self)
#traceback.print_stack()
self._finish_transaction()
try:
=== modified file 'bzrlib/log.py'
--- a/bzrlib/log.py 2008-07-07 09:15:41 +0000
+++ b/bzrlib/log.py 2008-07-09 19:57:36 +0000
@@ -59,31 +59,28 @@
warn,
)
+from bzrlib.lazy_import import lazy_import
+lazy_import(globals(), """
+
from bzrlib import (
config,
- lazy_regex,
+ errors,
+ repository as _mod_repository,
+ revision as _mod_revision,
+ revisionspec,
+ trace,
+ tsort,
+ )
+""")
+
+from bzrlib import (
registry,
)
-from bzrlib.errors import (
- BzrCommandError,
- )
from bzrlib.osutils import (
format_date,
get_terminal_encoding,
terminal_width,
)
-from bzrlib.repository import _strip_NULL_ghosts
-from bzrlib.revision import (
- NULL_REVISION,
- )
-from bzrlib.revisionspec import (
- RevisionInfo,
- )
-from bzrlib.trace import mutter
-from bzrlib.tsort import (
- merge_sort,
- topo_sort,
- )
def find_touching_revisions(branch, file_id):
@@ -204,7 +201,7 @@
warn("not a LogFormatter instance: %r" % lf)
if specific_fileid:
- mutter('get log for file_id %r', specific_fileid)
+ trace.mutter('get log for file_id %r', specific_fileid)
generate_merge_revisions = getattr(lf, 'supports_merge_revisions', False)
allow_single_merge_revision = getattr(lf,
'supports_single_merge_revision', False)
@@ -265,8 +262,8 @@
generate_single_revision = ((start_rev_id == end_rev_id)
and allow_single_merge_revision)
if not generate_single_revision:
- raise BzrCommandError('Selected log formatter only supports '
- 'mainline revisions.')
+ raise errors.BzrCommandError('Selected log formatter only supports'
+ ' mainline revisions.')
generate_merge_revisions = generate_single_revision
view_revs_iter = get_view_revisions(mainline_revs, rev_nos, branch,
direction, include_merges=generate_merge_revisions)
@@ -349,7 +346,7 @@
if start_revision is None:
start_revno = 1
else:
- if isinstance(start_revision, RevisionInfo):
+ if isinstance(start_revision, revisionspec.RevisionInfo):
start_rev_id = start_revision.rev_id
start_revno = start_revision.revno or 1
else:
@@ -360,19 +357,19 @@
if end_revision is None:
end_revno = branch_revno
else:
- if isinstance(end_revision, RevisionInfo):
+ if isinstance(end_revision, revisionspec.RevisionInfo):
end_rev_id = end_revision.rev_id
end_revno = end_revision.revno or branch_revno
else:
branch.check_real_revno(end_revision)
end_revno = end_revision
- if ((start_rev_id == NULL_REVISION)
- or (end_rev_id == NULL_REVISION)):
- raise BzrCommandError('Logging revision 0 is invalid.')
+ if ((start_rev_id == _mod_revision.NULL_REVISION)
+ or (end_rev_id == _mod_revision.NULL_REVISION)):
+ raise errors.BzrCommandError('Logging revision 0 is invalid.')
if start_revno > end_revno:
- raise BzrCommandError("Start revision must be older than "
- "the end revision.")
+ raise errors.BzrCommandError("Start revision must be older than "
+ "the end revision.")
if end_revno < start_revno:
return None, None, None, None
@@ -474,7 +471,7 @@
# don't request it.
parent_map = dict(((key, value) for key, value in
graph.iter_ancestry(mainline_revisions[1:]) if value is not None))
- sorted_rev_list = topo_sort(parent_map.items())
+ sorted_rev_list = tsort.topo_sort(parent_map.items())
text_keys = [(file_id, rev_id) for rev_id in sorted_rev_list]
modified_text_versions = branch.repository.texts.get_parent_map(text_keys)
ancestry = {}
@@ -536,8 +533,8 @@
parent_map = dict(((key, value) for key, value in
graph.iter_ancestry(mainline_revs[1:]) if value is not None))
# filter out ghosts; merge_sort errors on ghosts.
- rev_graph = _strip_NULL_ghosts(parent_map)
- merge_sorted_revisions = merge_sort(
+ rev_graph = _mod_repository._strip_NULL_ghosts(parent_map)
+ merge_sorted_revisions = tsort.merge_sort(
rev_graph,
mainline_revs[-1],
mainline_revs,
@@ -815,7 +812,7 @@
try:
return log_formatter_registry.make_formatter(name, *args, **kwargs)
except KeyError:
- raise BzrCommandError("unknown log formatter: %r" % name)
+ raise errors.BzrCommandError("unknown log formatter: %r" % name)
def show_one_log(revno, rev, delta, verbose, to_file, show_timezone):
=== modified file 'bzrlib/lsprof.py'
--- a/bzrlib/lsprof.py 2007-10-16 18:26:12 +0000
+++ b/bzrlib/lsprof.py 2008-07-09 19:57:36 +0000
@@ -11,9 +11,6 @@
from _lsprof import Profiler, profiler_entry
-import bzrlib.osutils
-
-
__all__ = ['profile', 'Stats']
_g_threadmap = {}
@@ -131,11 +128,11 @@
otherwise the format is given by the filename extension.
"""
if format is None:
- basename = bzrlib.osutils.basename(filename)
+ basename = os.path.basename(filename)
if basename.startswith('callgrind.out'):
format = "callgrind"
else:
- ext = bzrlib.osutils.splitext(filename)[1]
+ ext = os.path.splitext(filename)[1]
if len(ext) > 1:
format = ext[1:]
outfile = open(filename, 'wb')
More information about the bazaar-commits
mailing list