43
45
import bzrlib.branch
46
47
import bzrlib.errors
47
import bzrlib.lru_cache
48
48
import bzrlib.progress
49
49
import bzrlib.revision
50
import bzrlib.textfile
51
50
import bzrlib.tsort
54
53
# bzrlib's UIFactory is not thread-safe
55
54
uihack = threading.local()
58
56
class ThreadSafeUIFactory (bzrlib.ui.SilentUIFactory):
60
57
def nested_progress_bar(self):
61
58
if getattr(uihack, '_progress_bar_stack', None) is None:
62
pbs = bzrlib.progress.ProgressBarStack(
63
klass=bzrlib.progress.DummyProgress)
64
uihack._progress_bar_stack = pbs
59
uihack._progress_bar_stack = bzrlib.progress.ProgressBarStack(klass=bzrlib.progress.DummyProgress)
65
60
return uihack._progress_bar_stack.get_nested()
67
62
bzrlib.ui.ui_factory = ThreadSafeUIFactory()
65
def _process_side_by_side_buffers(line_list, delete_list, insert_list):
66
while len(delete_list) < len(insert_list):
67
delete_list.append((None, '', 'context'))
68
while len(insert_list) < len(delete_list):
69
insert_list.append((None, '', 'context'))
70
while len(delete_list) > 0:
71
d = delete_list.pop(0)
72
i = insert_list.pop(0)
73
line_list.append(util.Container(old_lineno=d[0], new_lineno=i[0],
74
old_line=d[1], new_line=i[1],
75
old_type=d[2], new_type=i[2]))
78
def _make_side_by_side(chunk_list):
80
turn a normal unified-style diff (post-processed by parse_delta) into a
81
side-by-side diff structure. the new structure is::
89
type: str('context' or 'changed'),
94
for chunk in chunk_list:
97
delete_list, insert_list = [], []
98
for line in chunk.diff:
99
# Add <wbr/> every X characters so we can wrap properly
100
wrap_line = re.findall(r'.{%d}|.+$' % 78, line.line)
101
wrap_lines = [util.html_clean(_line) for _line in wrap_line]
102
wrapped_line = wrap_char.join(wrap_lines)
104
if line.type == 'context':
105
if len(delete_list) or len(insert_list):
106
_process_side_by_side_buffers(line_list, delete_list,
108
delete_list, insert_list = [], []
109
line_list.append(util.Container(old_lineno=line.old_lineno,
110
new_lineno=line.new_lineno,
111
old_line=wrapped_line,
112
new_line=wrapped_line,
115
elif line.type == 'delete':
116
delete_list.append((line.old_lineno, wrapped_line, line.type))
117
elif line.type == 'insert':
118
insert_list.append((line.new_lineno, wrapped_line, line.type))
119
if len(delete_list) or len(insert_list):
120
_process_side_by_side_buffers(line_list, delete_list, insert_list)
121
out_chunk_list.append(util.Container(diff=line_list))
122
return out_chunk_list
69
125
def is_branch(folder):
71
127
bzrlib.branch.Branch.open(folder)
123
178
def __getitem__(self, index):
124
179
"""Get the date of the index'd item"""
125
return datetime.datetime.fromtimestamp(self.repository.get_revision(
126
self.revid_list[index]).timestamp)
180
return datetime.datetime.fromtimestamp(self.repository.get_revision(self.revid_list[index]).timestamp)
128
182
def __len__(self):
129
183
return len(self.revid_list)
131
class FileChangeReporter(object):
132
def __init__(self, old_inv, new_inv):
137
self.text_changes = []
138
self.old_inv = old_inv
139
self.new_inv = new_inv
141
def revid(self, inv, file_id):
143
return inv[file_id].revision
144
except bzrlib.errors.NoSuchId:
147
def report(self, file_id, paths, versioned, renamed, modified,
149
if modified not in ('unchanged', 'kind changed'):
150
if versioned == 'removed':
151
filename = rich_filename(paths[0], kind[0])
153
filename = rich_filename(paths[1], kind[1])
154
self.text_changes.append(util.Container(
155
filename=filename, file_id=file_id,
156
old_revision=self.revid(self.old_inv, file_id),
157
new_revision=self.revid(self.new_inv, file_id)))
158
if versioned == 'added':
159
self.added.append(util.Container(
160
filename=rich_filename(paths[1], kind),
161
file_id=file_id, kind=kind[1]))
162
elif versioned == 'removed':
163
self.removed.append(util.Container(
164
filename=rich_filename(paths[0], kind),
165
file_id=file_id, kind=kind[0]))
167
self.renamed.append(util.Container(
168
old_filename=rich_filename(paths[0], kind[0]),
169
new_filename=rich_filename(paths[1], kind[1]),
171
text_modified=modified == 'modified'))
173
self.modified.append(util.Container(
174
filename=rich_filename(paths[1], kind),
178
class RevInfoMemoryCache(object):
179
"""A store that validates values against the revids they were stored with.
181
We use a unique key for each branch.
183
The reason for not just using the revid as the key is so that when a new
184
value is provided for a branch, we replace the old value used for the
187
There is another implementation of the same interface in
188
loggerhead.changecache.RevInfoDiskCache.
191
def __init__(self, cache):
194
def get(self, key, revid):
195
"""Return the data associated with `key`, subject to a revid check.
197
If a value was stored under `key`, with the same revid, return it.
198
Otherwise return None.
200
cached = self._cache.get(key)
203
stored_revid, data = cached
204
if revid == stored_revid:
209
def set(self, key, revid, data):
210
"""Store `data` under `key`, to be checked against `revid` on get().
212
self._cache[key] = (revid, data)
215
186
class History (object):
216
187
"""Decorate a branch to provide information for rendering.
220
191
around it, serve the request, throw the History object away, unlock the
221
192
branch and throw it away.
223
:ivar _file_change_cache: An object that caches information about the
224
files that changed between two revisions.
225
:ivar _rev_info: A list of information about revisions. This is by far
226
the most cryptic data structure in loggerhead. At the top level, it
227
is a list of 3-tuples [(merge-info, where-merged, parents)].
228
`merge-info` is (seq, revid, merge_depth, revno_str, end_of_merge) --
229
like a merged sorted list, but the revno is stringified.
230
`where-merged` is a tuple of revisions that have this revision as a
231
non-lefthand parent. Finally, `parents` is just the usual list of
232
parents of this revision.
233
:ivar _rev_indices: A dictionary mapping each revision id to the index of
234
the information about it in _rev_info.
235
:ivar _revno_revid: A dictionary mapping stringified revnos to revision
194
:ivar _file_change_cache: xx
239
def _load_whole_history_data(self, caches, cache_key):
240
"""Set the attributes relating to the whole history of the branch.
242
:param caches: a list of caches with interfaces like
243
`RevInfoMemoryCache` and be ordered from fastest to slowest.
244
:param cache_key: the key to use with the caches.
246
self._rev_indices = None
247
self._rev_info = None
250
def update_missed_caches():
251
for cache in missed_caches:
252
cache.set(cache_key, self.last_revid, self._rev_info)
254
data = cache.get(cache_key, self.last_revid)
256
self._rev_info = data
257
update_missed_caches()
260
missed_caches.append(cache)
262
whole_history_data = compute_whole_history_data(self._branch)
263
self._rev_info, self._rev_indices = whole_history_data
264
update_missed_caches()
266
if self._rev_indices is not None:
267
self._revno_revid = {}
268
for ((_, revid, _, revno_str, _), _, _) in self._rev_info:
269
self._revno_revid[revno_str] = revid
271
self._revno_revid = {}
272
self._rev_indices = {}
273
for ((seq, revid, _, revno_str, _), _, _) in self._rev_info:
274
self._rev_indices[revid] = seq
275
self._revno_revid[revno_str] = revid
277
def __init__(self, branch, whole_history_data_cache, file_cache=None,
278
revinfo_disk_cache=None, cache_key=None):
197
def __init__(self, branch, whole_history_data_cache):
279
198
assert branch.is_locked(), (
280
199
"Can only construct a History object with a read-locked branch.")
281
if file_cache is not None:
282
self._file_change_cache = file_cache
283
file_cache.history = self
285
self._file_change_cache = None
200
self._file_change_cache = None
286
201
self._branch = branch
287
self._inventory_cache = {}
288
self._branch_nick = self._branch.get_config().get_nickname()
289
self.log = logging.getLogger('loggerhead.%s' % self._branch_nick)
202
self.log = logging.getLogger('loggerhead.%s' % (branch.nick,))
291
204
self.last_revid = branch.last_revision()
293
caches = [RevInfoMemoryCache(whole_history_data_cache)]
294
if revinfo_disk_cache:
295
caches.append(revinfo_disk_cache)
296
self._load_whole_history_data(caches, cache_key)
206
whole_history_data = whole_history_data_cache.get(self.last_revid)
207
if whole_history_data is None:
208
whole_history_data = compute_whole_history_data(branch)
209
whole_history_data_cache[self.last_revid] = whole_history_data
211
(self._revision_graph, self._full_history, self._revision_info,
212
self._revno_revid, self._merge_sort, self._where_merged
213
) = whole_history_data
215
def use_file_cache(self, cache):
216
self._file_change_cache = cache
299
219
def has_revisions(self):
335
252
if introduced_revisions(revid) & revid_set:
337
parents = self._rev_info[self._rev_indices[revid]][2]
254
parents = self._revision_graph[revid]
338
255
if len(parents) == 0:
340
257
revid = parents[0]
342
259
def get_short_revision_history_by_fileid(self, file_id):
260
# wow. is this really the only way we can get this list? by
261
# man-handling the weave store directly? :-0
343
262
# FIXME: would be awesome if we could get, for a folder, the list of
344
# revisions where items within that folder changed.i
346
# FIXME: Workaround for bzr versions prior to 1.6b3.
347
# Remove me eventually pretty please :)
348
w = self._branch.repository.weave_store.get_weave(
349
file_id, self._branch.repository.get_transaction())
350
w_revids = w.versions()
351
revids = [r for r in self._rev_indices if r in w_revids]
352
except AttributeError:
353
possible_keys = [(file_id, revid) for revid in self._rev_indices]
354
get_parent_map = self._branch.repository.texts.get_parent_map
355
# We chunk the requests as this works better with GraphIndex.
356
# See _filter_revisions_touching_file_id in bzrlib/log.py
357
# for more information.
360
for start in xrange(0, len(possible_keys), chunk_size):
361
next_keys = possible_keys[start:start + chunk_size]
362
revids += [k[1] for k in get_parent_map(next_keys)]
363
del possible_keys, next_keys
263
# revisions where items within that folder changed.
264
possible_keys = [(file_id, revid) for revid in self._full_history]
265
existing_keys = self._branch.repository.texts.get_parent_map(possible_keys)
266
return [revid for _, revid in existing_keys.iterkeys()]
366
268
def get_revision_history_since(self, revid_list, date):
367
269
# if a user asks for revisions starting at 01-sep, they mean inclusive,
368
270
# so start at midnight on 02-sep.
369
271
date = date + datetime.timedelta(days=1)
370
# our revid list is sorted in REVERSE date order,
371
# so go thru some hoops here...
272
# our revid list is sorted in REVERSE date order, so go thru some hoops here...
372
273
revid_list.reverse()
373
index = bisect.bisect(_RevListToTimestamps(revid_list,
374
self._branch.repository),
274
index = bisect.bisect(_RevListToTimestamps(revid_list, self._branch.repository), date)
378
277
revid_list.reverse()
394
292
# all the relevant changes (time-consuming) only to return a list of
395
293
# revids which will be used to fetch a set of changes again.
397
# if they entered a revid, just jump straight there;
398
# ignore the passed-in revid_list
295
# if they entered a revid, just jump straight there; ignore the passed-in revid_list
399
296
revid = self.fix_revid(query)
400
297
if revid is not None:
401
298
if isinstance(revid, unicode):
402
299
revid = revid.encode('utf-8')
403
changes = self.get_changes([revid])
300
changes = self.get_changes([ revid ])
404
301
if (changes is not None) and (len(changes) > 0):
408
305
m = self.us_date_re.match(query)
409
306
if m is not None:
410
date = datetime.datetime(util.fix_year(int(m.group(3))),
307
date = datetime.datetime(util.fix_year(int(m.group(3))), int(m.group(1)), int(m.group(2)))
414
309
m = self.earth_date_re.match(query)
415
310
if m is not None:
416
date = datetime.datetime(util.fix_year(int(m.group(3))),
311
date = datetime.datetime(util.fix_year(int(m.group(3))), int(m.group(2)), int(m.group(1)))
420
313
m = self.iso_date_re.match(query)
421
314
if m is not None:
422
date = datetime.datetime(util.fix_year(int(m.group(1))),
315
date = datetime.datetime(util.fix_year(int(m.group(1))), int(m.group(2)), int(m.group(3)))
425
316
if date is not None:
426
317
if revid_list is None:
427
# if no limit to the query was given,
428
# search only the direct-parent path.
318
# if no limit to the query was given, search only the direct-parent path.
429
319
revid_list = list(self.get_revids_from(None, self.last_revid))
430
320
return self.get_revision_history_since(revid_list, date)
580
464
revnol = revno.split(".")
581
465
revnos = ".".join(revnol[:-2])
582
466
revnolast = int(revnol[-1])
583
if revnos in d.keys():
467
if d.has_key(revnos):
585
469
if revnolast < m:
586
d[revnos] = (revnolast, revid)
470
d[revnos] = ( revnolast, revid )
588
d[revnos] = (revnolast, revid)
590
return [d[revnos][1] for revnos in d.keys()]
592
def add_branch_nicks(self, change):
472
d[revnos] = ( revnolast, revid )
474
return [ d[revnos][1] for revnos in d.keys() ]
476
def get_branch_nicks(self, changes):
594
given a 'change', fill in the branch nicks on all parents and merge
478
given a list of changes from L{get_changes}, fill in the branch nicks
479
on all parents and merge points.
597
481
fetch_set = set()
598
for p in change.parents:
599
fetch_set.add(p.revid)
600
for p in change.merge_points:
601
fetch_set.add(p.revid)
482
for change in changes:
483
for p in change.parents:
484
fetch_set.add(p.revid)
485
for p in change.merge_points:
486
fetch_set.add(p.revid)
602
487
p_changes = self.get_changes(list(fetch_set))
603
488
p_change_dict = dict([(c.revid, c) for c in p_changes])
604
for p in change.parents:
605
if p.revid in p_change_dict:
606
p.branch_nick = p_change_dict[p.revid].branch_nick
608
p.branch_nick = '(missing)'
609
for p in change.merge_points:
610
if p.revid in p_change_dict:
611
p.branch_nick = p_change_dict[p.revid].branch_nick
613
p.branch_nick = '(missing)'
489
for change in changes:
490
# arch-converted branches may not have merged branch info :(
491
for p in change.parents:
492
if p.revid in p_change_dict:
493
p.branch_nick = p_change_dict[p.revid].branch_nick
495
p.branch_nick = '(missing)'
496
for p in change.merge_points:
497
if p.revid in p_change_dict:
498
p.branch_nick = p_change_dict[p.revid].branch_nick
500
p.branch_nick = '(missing)'
615
502
def get_changes(self, revid_list):
616
503
"""Return a list of changes objects for the given revids.
656
539
return [self._change_from_revision(rev) for rev in rev_list]
541
def _get_deltas_for_revisions_with_trees(self, revisions):
542
"""Produce a list of revision deltas.
544
Note that the input is a sequence of REVISIONS, not revision_ids.
545
Trees will be held in memory until the generator exits.
546
Each delta is relative to the revision's lefthand predecessor.
547
(This is copied from bzrlib.)
549
required_trees = set()
550
for revision in revisions:
551
required_trees.add(revision.revid)
552
required_trees.update([p.revid for p in revision.parents[:1]])
553
trees = dict((t.get_revision_id(), t) for
554
t in self._branch.repository.revision_trees(required_trees))
556
self._branch.repository.lock_read()
558
for revision in revisions:
559
if not revision.parents:
560
old_tree = self._branch.repository.revision_tree(
561
bzrlib.revision.NULL_REVISION)
563
old_tree = trees[revision.parents[0].revid]
564
tree = trees[revision.revid]
565
ret.append(tree.changes_from(old_tree))
568
self._branch.repository.unlock()
658
570
def _change_from_revision(self, revision):
660
572
Given a bzrlib Revision, return a processed "change" for use in
663
575
commit_time = datetime.datetime.fromtimestamp(revision.timestamp)
665
parents = [util.Container(revid=r,
666
revno=self.get_revno(r)) for r in revision.parent_ids]
577
parents = [util.Container(revid=r, revno=self.get_revno(r)) for r in revision.parent_ids]
668
579
message, short_message = clean_message(revision.message)
671
authors = revision.get_apparent_authors()
672
except AttributeError:
673
authors = [revision.get_apparent_author()]
676
582
'revid': revision.revision_id,
677
583
'date': commit_time,
584
'author': revision.get_apparent_author(),
679
585
'branch_nick': revision.properties.get('branch-nick', None),
680
586
'short_comment': short_message,
681
587
'comment': revision.message,
682
588
'comment_clean': [util.html_clean(s) for s in message],
683
589
'parents': revision.parent_ids,
684
'bugs': [bug.split()[0] for bug in revision.properties.get('bugs', '').splitlines()],
686
591
return util.Container(entry)
688
def get_file_changes_uncached(self, entry):
689
repo = self._branch.repository
691
old_revid = entry.parents[0].revid
693
old_revid = bzrlib.revision.NULL_REVISION
694
return self.file_changes_for_revision_ids(old_revid, entry.revid)
696
def get_file_changes(self, entry):
593
def get_file_changes_uncached(self, entries):
594
delta_list = self._get_deltas_for_revisions_with_trees(entries)
596
return [self.parse_delta(delta) for delta in delta_list]
598
def get_file_changes(self, entries):
697
599
if self._file_change_cache is None:
698
return self.get_file_changes_uncached(entry)
600
return self.get_file_changes_uncached(entries)
700
return self._file_change_cache.get_file_changes(entry)
702
def add_changes(self, entry):
703
changes = self.get_file_changes(entry)
704
entry.changes = changes
602
return self._file_change_cache.get_file_changes(entries)
604
def add_changes(self, entries):
605
changes_list = self.get_file_changes(entries)
607
for entry, changes in zip(entries, changes_list):
608
entry.changes = changes
610
def get_change_with_diff(self, revid, compare_revid=None):
611
change = self.get_changes([revid])[0]
613
if compare_revid is None:
615
compare_revid = change.parents[0].revid
617
compare_revid = 'null:'
619
rev_tree1 = self._branch.repository.revision_tree(compare_revid)
620
rev_tree2 = self._branch.repository.revision_tree(revid)
621
delta = rev_tree2.changes_from(rev_tree1)
623
change.changes = self.parse_delta(delta)
624
change.changes.modified = self._parse_diffs(rev_tree1, rev_tree2, delta)
706
628
def get_file(self, file_id, revid):
707
629
"returns (path, filename, data)"
713
635
path = '/' + path
714
636
return path, inv_entry.name, rev_tree.get_file_text(file_id)
716
def file_changes_for_revision_ids(self, old_revid, new_revid):
638
def _parse_diffs(self, old_tree, new_tree, delta):
640
Return a list of processed diffs, in the format::
649
type: str('context', 'delete', or 'insert'),
658
for old_path, new_path, fid, kind, text_modified, meta_modified in delta.renamed:
660
process.append((old_path, new_path, fid, kind))
661
for path, fid, kind, text_modified, meta_modified in delta.modified:
662
process.append((path, path, fid, kind))
664
for old_path, new_path, fid, kind in process:
665
old_lines = old_tree.get_file_lines(fid)
666
new_lines = new_tree.get_file_lines(fid)
668
if old_lines != new_lines:
670
bzrlib.diff.internal_diff(old_path, old_lines,
671
new_path, new_lines, buffer)
672
except bzrlib.errors.BinaryFile:
675
diff = buffer.getvalue()
678
out.append(util.Container(filename=rich_filename(new_path, kind), file_id=fid, chunks=self._process_diff(diff), raw_diff=diff))
682
def _process_diff(self, diff):
683
# doesn't really need to be a method; could be static.
686
for line in diff.splitlines():
689
if line.startswith('+++ ') or line.startswith('--- '):
691
if line.startswith('@@ '):
693
if chunk is not None:
695
chunk = util.Container()
697
lines = [int(x.split(',')[0][1:]) for x in line.split(' ')[1:3]]
698
old_lineno = lines[0]
699
new_lineno = lines[1]
700
elif line.startswith(' '):
701
chunk.diff.append(util.Container(old_lineno=old_lineno,
702
new_lineno=new_lineno,
707
elif line.startswith('+'):
708
chunk.diff.append(util.Container(old_lineno=None,
709
new_lineno=new_lineno,
710
type='insert', line=line[1:]))
712
elif line.startswith('-'):
713
chunk.diff.append(util.Container(old_lineno=old_lineno,
715
type='delete', line=line[1:]))
718
chunk.diff.append(util.Container(old_lineno=None,
722
if chunk is not None:
726
def parse_delta(self, delta):
718
728
Return a nested data structure containing the changes in a delta::
727
text_changes: list((filename, file_id)),
729
repo = self._branch.repository
730
if bzrlib.revision.is_null(old_revid) or \
731
bzrlib.revision.is_null(new_revid):
732
old_tree, new_tree = map(
733
repo.revision_tree, [old_revid, new_revid])
735
old_tree, new_tree = repo.revision_trees([old_revid, new_revid])
737
reporter = FileChangeReporter(old_tree.inventory, new_tree.inventory)
739
bzrlib.delta.report_changes(new_tree.iter_changes(old_tree), reporter)
741
return util.Container(
742
added=sorted(reporter.added, key=lambda x:x.filename),
743
renamed=sorted(reporter.renamed, key=lambda x:x.new_filename),
744
removed=sorted(reporter.removed, key=lambda x:x.filename),
745
modified=sorted(reporter.modified, key=lambda x:x.filename),
746
text_changes=sorted(reporter.text_changes, key=lambda x:x.filename))
743
for path, fid, kind in delta.added:
744
added.append((rich_filename(path, kind), fid))
746
for path, fid, kind, text_modified, meta_modified in delta.modified:
747
modified.append(util.Container(filename=rich_filename(path, kind), file_id=fid))
749
for old_path, new_path, fid, kind, text_modified, meta_modified in delta.renamed:
750
renamed.append((rich_filename(old_path, kind), rich_filename(new_path, kind), fid))
751
if meta_modified or text_modified:
752
modified.append(util.Container(filename=rich_filename(new_path, kind), file_id=fid))
754
for path, fid, kind in delta.removed:
755
removed.append((rich_filename(path, kind), fid))
757
return util.Container(added=added, renamed=renamed, removed=removed, modified=modified)
760
def add_side_by_side(changes):
761
# FIXME: this is a rotten API.
762
for change in changes:
763
for m in change.changes.modified:
764
m.sbs_chunks = _make_side_by_side(m.chunks)
766
def get_filelist(self, inv, file_id, sort_type=None):
768
return the list of all files (and their attributes) within a given
772
dir_ie = inv[file_id]
773
path = inv.id2path(file_id)
778
for filename, entry in dir_ie.children.iteritems():
779
revid_set.add(entry.revision)
782
for change in self.get_changes(list(revid_set)):
783
change_dict[change.revid] = change
785
for filename, entry in dir_ie.children.iteritems():
787
if entry.kind == 'directory':
790
revid = entry.revision
792
file = util.Container(
793
filename=filename, executable=entry.executable, kind=entry.kind,
794
pathname=pathname, file_id=entry.file_id, size=entry.text_size,
795
revid=revid, change=change_dict[revid])
796
file_list.append(file)
798
if sort_type == 'filename' or sort_type is None:
799
file_list.sort(key=lambda x: x.filename.lower()) # case-insensitive
800
elif sort_type == 'size':
801
file_list.sort(key=lambda x: x.size)
802
elif sort_type == 'date':
803
file_list.sort(key=lambda x: x.change.date)
805
# Always sort by kind to get directories first
806
file_list.sort(key=lambda x: x.kind != 'directory')
809
for file in file_list:
816
_BADCHARS_RE = re.compile(ur'[\x00-\x08\x0b\x0e-\x1f]')
818
def annotate_file(self, file_id, revid):
823
file_revid = self.get_inventory(revid)[file_id].revision
825
tree = self._branch.repository.revision_tree(file_revid)
828
for line_revid, text in tree.annotate_iter(file_id):
829
revid_set.add(line_revid)
830
if self._BADCHARS_RE.match(text):
831
# bail out; this isn't displayable text
832
yield util.Container(parity=0, lineno=1, status='same',
833
text='(This is a binary file.)',
834
change=util.Container())
836
change_cache = dict([(c.revid, c) \
837
for c in self.get_changes(list(revid_set))])
839
last_line_revid = None
840
for line_revid, text in tree.annotate_iter(file_id):
841
if line_revid == last_line_revid:
842
# remember which lines have a new revno and which don't
847
last_line_revid = line_revid
848
change = change_cache[line_revid]
849
trunc_revno = change.revno
850
if len(trunc_revno) > 10:
851
trunc_revno = trunc_revno[:9] + '...'
853
yield util.Container(parity=parity, lineno=lineno, status=status,
854
change=change, text=util.fixed_width(text))
857
self.log.debug('annotate: %r secs' % (time.time() - z,))