Rev 3722: Clean out the debugging code. in http://bzr.arbash-meinel.com/branches/bzr/1.8-dev/lighter_log_file
John Arbash Meinel
john at arbash-meinel.com
Thu Sep 18 21:05:31 BST 2008
At http://bzr.arbash-meinel.com/branches/bzr/1.8-dev/lighter_log_file
------------------------------------------------------------
revno: 3722
revision-id: john at arbash-meinel.com-20080918200531-woiund7811i2kgyy
parent: john at arbash-meinel.com-20080918200316-v76fi3cotgu3qfun
committer: John Arbash Meinel <john at arbash-meinel.com>
branch nick: lighter_log_file
timestamp: Thu 2008-09-18 15:05:31 -0500
message:
Clean out the debugging code.
-------------- next part --------------
=== modified file 'bzrlib/log.py'
--- a/bzrlib/log.py 2008-09-18 20:03:16 +0000
+++ b/bzrlib/log.py 2008-09-18 20:05:31 +0000
@@ -55,7 +55,6 @@
)
import re
import sys
-import time
from warnings import (
warn,
)
@@ -557,7 +556,6 @@
:return: A list of (revision_id, dotted_revno, merge_depth) tuples.
"""
- tstart = time.time()
# find all the revisions that change the specific file
# build the ancestry of each revision in the graph
# - only listing the ancestors that change the specific file.
@@ -570,7 +568,6 @@
parent_map = dict(((key, value) for key, value in
graph.iter_ancestry(mainline_revisions[1:]) if value is not None))
sorted_rev_list = tsort.topo_sort(parent_map)
- tparent_map = time.time()
text_keys = [(file_id, rev_id) for rev_id in sorted_rev_list]
# Do a direct lookup of all possible text keys, and figure out which ones
# are actually present, and then convert it back to revision_ids, since the
@@ -579,7 +576,6 @@
modified_text_revisions = set(key[1] for key in text_parent_map)
del text_parent_map
del text_keys
- tmodified_text_revisions = time.time()
# Algorithms tried:
# a single dictionary mapping tree_revision_id => file_ancestry
# file_ancestry_as_tuple 50.3 272MB
@@ -600,7 +596,6 @@
# ancestry_values contains a pointer from a revision_id to either a tuple,
# or a frozenset() of a given per-file ancestry.
ancestry_values = {_mod_revision.NULL_REVISION: frozenset()}
- _hits = [0, 0, 0, 0, 0, 0, 0]
for rev in sorted_rev_list:
parents = parent_map[rev]
rev_ancestry = None
@@ -627,7 +622,6 @@
if parent_ancestry is rev_ancestry:
# They both point to the same ancestry value, so we know
# there is nothing new
- _hits[5] += 1
continue
parent_ancestry = frozenset(parent_ancestry)
new_revisions = parent_ancestry.difference(rev_ancestry)
@@ -636,7 +630,6 @@
# list, because we are only adding the 'new_revisions', so
# we know that we won't have duplicates.
if not isinstance(rev_ancestry, list):
- _hits[6] += 1
rev_ancestry = list(rev_ancestry)
rev_ancestry.extend(new_revisions)
if rev_ancestry is None:
@@ -658,44 +651,24 @@
rev_ancestry = frozenset(rev_ancestry)
ancestry_values[rev] = rev_ancestry
- trev_ancestry = time.time()
-
def is_merging_rev(r):
- _hits[0] = _hits[0] + 1
parents = parent_map[r]
if len(parents) > 1:
- _hits[1] += 1
leftparent = parents[0]
left_ancestry = ancestry_values[leftparent]
for rightparent in parents[1:]:
right_ancestry = ancestry_values[rightparent]
if left_ancestry is right_ancestry:
- _hits[2] += 1
continue
- _hits[3] += 1
left_ancestry = frozenset(left_ancestry)
if not left_ancestry.issuperset(right_ancestry):
- _hits[4] += 1
return True
return False
- trace.note('Found %s nodes, and %s unique values for %s view_revs'
- ' modified_text_revisions: %s',
- 0, len(ancestry_values),
- len(view_revs_iter), len(modified_text_revisions))
-
- # filter from the view the revisions that did not change or merge
+ # filter from the view the revisions that did not change or merge
# the specific file
result = [(r, n, d) for r, n, d in view_revs_iter
if r in modified_text_revisions or is_merging_rev(r)]
- tresult = time.time()
- trace.note('Hits: %s', _hits)
- trace.note('Timing: parent_map: %.3fs mod text revs: %.3fs'
- ' rev_ancestry: %.3fs result: %.3fs',
- tparent_map - tstart,
- tmodified_text_revisions - tparent_map,
- trev_ancestry - tmodified_text_revisions,
- tresult - trev_ancestry)
return result
More information about the bazaar-commits
mailing list