99
103
out_chunk_list = []
100
104
for chunk in chunk_list:
103
106
delete_list, insert_list = [], []
104
107
for line in chunk.diff:
105
# Add <wbr/> every X characters so we can wrap properly
106
wrap_line = re.findall(r'.{%d}|.+$' % 78, line.line)
107
wrap_lines = [util.html_clean(_line) for _line in wrap_line]
108
wrapped_line = wrap_char.join(wrap_lines)
110
108
if line.type == 'context':
111
109
if len(delete_list) or len(insert_list):
112
_process_side_by_side_buffers(line_list, delete_list,
110
_process_side_by_side_buffers(line_list, delete_list, insert_list)
114
111
delete_list, insert_list = [], []
115
line_list.append(util.Container(old_lineno=line.old_lineno,
116
new_lineno=line.new_lineno,
117
old_line=wrapped_line,
118
new_line=wrapped_line,
112
line_list.append(util.Container(old_lineno=line.old_lineno, new_lineno=line.new_lineno,
113
old_line=line.line, new_line=line.line,
114
old_type=line.type, new_type=line.type))
121
115
elif line.type == 'delete':
122
delete_list.append((line.old_lineno, wrapped_line, line.type))
116
delete_list.append((line.old_lineno, line.line, line.type))
123
117
elif line.type == 'insert':
124
insert_list.append((line.new_lineno, wrapped_line, line.type))
118
insert_list.append((line.new_lineno, line.line, line.type))
125
119
if len(delete_list) or len(insert_list):
126
120
_process_side_by_side_buffers(line_list, delete_list, insert_list)
127
121
out_chunk_list.append(util.Container(diff=line_list))
185
178
def __getitem__(self, index):
186
179
"""Get the date of the index'd item"""
187
return datetime.datetime.fromtimestamp(self.repository.get_revision(
188
self.revid_list[index]).timestamp)
180
return datetime.datetime.fromtimestamp(self.repository.get_revision(self.revid_list[index]).timestamp)
190
182
def __len__(self):
191
183
return len(self.revid_list)
194
186
class History (object):
195
"""Decorate a branch to provide information for rendering.
197
History objects are expected to be short lived -- when serving a request
198
for a particular branch, open it, read-lock it, wrap a History object
199
around it, serve the request, throw the History object away, unlock the
200
branch and throw it away.
202
:ivar _file_change_cache: xx
205
def __init__(self, branch, whole_history_data_cache):
206
assert branch.is_locked(), (
207
"Can only construct a History object with a read-locked branch.")
189
self._change_cache = None
208
190
self._file_change_cache = None
192
self._lock = threading.RLock()
195
def from_branch(cls, branch, name=None):
209
198
self._branch = branch
210
self._inventory_cache = {}
211
self._branch_nick = self._branch.get_config().get_nickname()
212
self.log = logging.getLogger('loggerhead.%s' % self._branch_nick)
214
self.last_revid = branch.last_revision()
216
whole_history_data = whole_history_data_cache.get(self.last_revid)
217
if whole_history_data is None:
218
whole_history_data = compute_whole_history_data(branch)
219
whole_history_data_cache[self.last_revid] = whole_history_data
221
(self._revision_graph, self._full_history, self._revision_info,
222
self._revno_revid, self._merge_sort, self._where_merged,
223
) = whole_history_data
199
self._last_revid = self._branch.last_revision()
200
if self._last_revid is not None:
201
self._revision_graph = branch.repository.get_revision_graph(self._last_revid)
203
self._revision_graph = {}
206
name = self._branch.nick
208
self.log = logging.getLogger('loggerhead.%s' % (name,))
210
self._full_history = []
211
self._revision_info = {}
212
self._revno_revid = {}
213
self._merge_sort = bzrlib.tsort.merge_sort(self._revision_graph, self._last_revid, generate_revno=True)
214
for (seq, revid, merge_depth, revno, end_of_merge) in self._merge_sort:
215
self._full_history.append(revid)
216
revno_str = '.'.join(str(n) for n in revno)
217
self._revno_revid[revno_str] = revid
218
self._revision_info[revid] = (seq, revid, merge_depth, revno_str, end_of_merge)
221
self._where_merged = {}
222
for revid in self._revision_graph.keys():
223
if not revid in self._full_history:
225
for parent in self._revision_graph[revid]:
226
self._where_merged.setdefault(parent, set()).add(revid)
228
self.log.info('built revision graph cache: %r secs' % (time.time() - z,))
232
def from_folder(cls, path, name=None):
233
b = bzrlib.branch.Branch.open(path)
234
return cls.from_branch(b, name)
237
def out_of_date(self):
238
# the branch may have been upgraded on disk, in which case we're stale.
239
if self._branch.__class__ is not \
240
bzrlib.branch.Branch.open(self._branch.base).__class__:
242
return self._branch.last_revision() != self._last_revid
244
def use_cache(self, cache):
245
self._change_cache = cache
225
247
def use_file_cache(self, cache):
226
248
self._file_change_cache = cache
229
def has_revisions(self):
230
return not bzrlib.revision.is_null(self.last_revid)
250
def use_search_index(self, index):
255
# called when a new history object needs to be created, because the
256
# branch history has changed. we need to immediately close and stop
257
# using our caches, because a new history object will be created to
258
# replace us, using the same cache files.
259
# (may also be called during server shutdown.)
260
if self._change_cache is not None:
261
self._change_cache.close()
262
self._change_cache = None
263
if self._index is not None:
267
def flush_cache(self):
268
if self._change_cache is None:
270
self._change_cache.flush()
272
def check_rebuild(self):
273
if self._change_cache is not None:
274
self._change_cache.check_rebuild()
275
if self._index is not None:
276
self._index.check_rebuild()
278
last_revid = property(lambda self: self._last_revid, None, None)
232
281
def get_config(self):
233
282
return self._branch.get_config()
236
285
if revid not in self._revision_info:
239
(seq, revid, merge_depth,
240
revno_str, end_of_merge) = self._revision_info[revid]
288
seq, revid, merge_depth, revno_str, end_of_merge = self._revision_info[revid]
243
def get_revids_from(self, revid_list, start_revid):
245
Yield the mainline (wrt start_revid) revisions that merged each
248
if revid_list is None:
249
revid_list = self._full_history
250
revid_set = set(revid_list)
291
def get_revision_history(self):
292
return self._full_history
253
def introduced_revisions(revid):
255
seq, revid, md, revno, end_of_merge = self._revision_info[revid]
257
while i < len(self._merge_sort) and self._merge_sort[i][2] > md:
258
r.add(self._merge_sort[i][1])
262
if bzrlib.revision.is_null(revid):
264
if introduced_revisions(revid) & revid_set:
294
def get_revids_from(self, revid_list, revid):
296
given a list of revision ids, yield revisions in graph order,
297
starting from revid. the list can be None if you just want to travel
298
across all revisions.
301
if (revid_list is None) or (revid in revid_list):
303
if not self._revision_graph.has_key(revid):
266
305
parents = self._revision_graph[revid]
267
306
if len(parents) == 0:
269
308
revid = parents[0]
271
311
def get_short_revision_history_by_fileid(self, file_id):
312
# wow. is this really the only way we can get this list? by
313
# man-handling the weave store directly? :-0
272
314
# FIXME: would be awesome if we could get, for a folder, the list of
273
# revisions where items within that folder changed.i
275
# FIXME: Workaround for bzr versions prior to 1.6b3.
276
# Remove me eventually pretty please :)
277
w = self._branch.repository.weave_store.get_weave(
278
file_id, self._branch.repository.get_transaction())
279
w_revids = w.versions()
280
revids = [r for r in self._full_history if r in w_revids]
281
except AttributeError:
282
possible_keys = [(file_id, revid) for revid in self._full_history]
283
get_parent_map = self._branch.repository.texts.get_parent_map
284
# We chunk the requests as this works better with GraphIndex.
285
# See _filter_revisions_touching_file_id in bzrlib/log.py
286
# for more information.
289
for start in xrange(0, len(possible_keys), chunk_size):
290
next_keys = possible_keys[start:start + chunk_size]
291
revids += [k[1] for k in get_parent_map(next_keys)]
292
del possible_keys, next_keys
315
# revisions where items within that folder changed.
316
w = self._branch.repository.weave_store.get_weave(file_id, self._branch.repository.get_transaction())
317
w_revids = w.versions()
318
revids = [r for r in self._full_history if r in w_revids]
295
322
def get_revision_history_since(self, revid_list, date):
296
323
# if a user asks for revisions starting at 01-sep, they mean inclusive,
297
324
# so start at midnight on 02-sep.
298
325
date = date + datetime.timedelta(days=1)
299
# our revid list is sorted in REVERSE date order,
300
# so go thru some hoops here...
326
# our revid list is sorted in REVERSE date order, so go thru some hoops here...
301
327
revid_list.reverse()
302
index = bisect.bisect(_RevListToTimestamps(revid_list,
303
self._branch.repository),
328
index = bisect.bisect(_RevListToTimestamps(revid_list, self._branch.repository), date)
307
331
revid_list.reverse()
309
333
return revid_list[index:]
336
def get_revision_history_matching(self, revid_list, text):
337
self.log.debug('searching %d revisions for %r', len(revid_list), text)
339
# this is going to be painfully slow. :(
342
for revid in revid_list:
343
change = self.get_changes([ revid ])[0]
344
if text in change.comment.lower():
346
self.log.debug('searched %d revisions for %r in %r secs', len(revid_list), text, time.time() - z)
349
def get_revision_history_matching_indexed(self, revid_list, text):
350
self.log.debug('searching %d revisions for %r', len(revid_list), text)
352
if self._index is None:
353
return self.get_revision_history_matching(revid_list, text)
354
out = self._index.find(text, revid_list)
355
self.log.debug('searched %d revisions for %r in %r secs: %d results', len(revid_list), text, time.time() - z, len(out))
356
# put them in some coherent order :)
357
out = [r for r in self._full_history if r in out]
311
361
def get_search_revid_list(self, query, revid_list):
313
363
given a "quick-search" query, try a few obvious possible meanings:
315
365
- revision id or # ("128.1.3")
316
- date (US style "mm/dd/yy", earth style "dd-mm-yy", or \
317
iso style "yyyy-mm-dd")
366
- date (US style "mm/dd/yy", earth style "dd-mm-yy", or iso style "yyyy-mm-dd")
318
367
- comment text as a fallback
320
369
and return a revid list that matches.
323
372
# all the relevant changes (time-consuming) only to return a list of
324
373
# revids which will be used to fetch a set of changes again.
326
# if they entered a revid, just jump straight there;
327
# ignore the passed-in revid_list
375
# if they entered a revid, just jump straight there; ignore the passed-in revid_list
328
376
revid = self.fix_revid(query)
329
377
if revid is not None:
330
378
if isinstance(revid, unicode):
331
379
revid = revid.encode('utf-8')
332
changes = self.get_changes([revid])
380
changes = self.get_changes([ revid ])
333
381
if (changes is not None) and (len(changes) > 0):
337
385
m = self.us_date_re.match(query)
338
386
if m is not None:
339
date = datetime.datetime(util.fix_year(int(m.group(3))),
387
date = datetime.datetime(util.fix_year(int(m.group(3))), int(m.group(1)), int(m.group(2)))
343
389
m = self.earth_date_re.match(query)
344
390
if m is not None:
345
date = datetime.datetime(util.fix_year(int(m.group(3))),
391
date = datetime.datetime(util.fix_year(int(m.group(3))), int(m.group(2)), int(m.group(1)))
349
393
m = self.iso_date_re.match(query)
350
394
if m is not None:
351
date = datetime.datetime(util.fix_year(int(m.group(1))),
395
date = datetime.datetime(util.fix_year(int(m.group(1))), int(m.group(2)), int(m.group(3)))
354
396
if date is not None:
355
397
if revid_list is None:
356
# if no limit to the query was given,
357
# search only the direct-parent path.
358
revid_list = list(self.get_revids_from(None, self.last_revid))
398
# if no limit to the query was given, search only the direct-parent path.
399
revid_list = list(self.get_revids_from(None, self._last_revid))
359
400
return self.get_revision_history_since(revid_list, date)
402
# check comment fields.
403
if revid_list is None:
404
revid_list = self._full_history
405
return self.get_revision_history_matching_indexed(revid_list, query)
361
407
revno_re = re.compile(r'^[\d\.]+$')
362
408
# the date regex are without a final '$' so that queries like
363
409
# "2006-11-30 12:15" still mostly work. (i think it's better to give
398
442
revlist = list(self.get_revids_from(None, revid))
401
446
def get_view(self, revid, start_revid, file_id, query=None):
403
448
use the URL parameters (revid, start_revid, file_id, and query) to
404
449
determine the revision list we're viewing (start_revid, file_id, query)
405
450
and where we are in it (revid).
407
- if a query is given, we're viewing query results.
408
- if a file_id is given, we're viewing revisions for a specific
410
- if a start_revid is given, we're viewing the branch from a
411
specific revision up the tree.
413
these may be combined to view revisions for a specific file, from
414
a specific revision, with a specific search query.
416
returns a new (revid, start_revid, revid_list) where:
452
if a query is given, we're viewing query results.
453
if a file_id is given, we're viewing revisions for a specific file.
454
if a start_revid is given, we're viewing the branch from a
455
specific revision up the tree.
456
(these may be combined to view revisions for a specific file, from
457
a specific revision, with a specific search query.)
459
returns a new (revid, start_revid, revid_list, scan_list) where:
418
461
- revid: current position within the view
419
462
- start_revid: starting revision of this view
441
484
revid_list = self.get_file_view(start_revid, file_id)
443
486
revid_list = None
444
revid_list = search.search_revisions(self._branch, query)
445
if revid_list and len(revid_list) > 0:
488
revid_list = self.get_search_revid_list(query, revid_list)
489
if len(revid_list) > 0:
446
490
if revid not in revid_list:
447
491
revid = revid_list[0]
448
492
return revid, start_revid, revid_list
450
# XXX: This should return a message saying that the search could
451
# not be completed due to either missing the plugin or missing a
453
495
return None, None, []
455
498
def get_inventory(self, revid):
456
if revid not in self._inventory_cache:
457
self._inventory_cache[revid] = (
458
self._branch.repository.get_revision_inventory(revid))
459
return self._inventory_cache[revid]
499
return self._branch.repository.get_revision_inventory(revid)
461
502
def get_path(self, revid, file_id):
462
503
if (file_id is None) or (file_id == ''):
464
path = self.get_inventory(revid).id2path(file_id)
505
path = self._branch.repository.get_revision_inventory(revid).id2path(file_id)
465
506
if (len(path) > 0) and not path.startswith('/'):
466
507
path = '/' + path
469
511
def get_file_id(self, revid, path):
470
512
if (len(path) > 0) and not path.startswith('/'):
471
513
path = '/' + path
472
return self.get_inventory(revid).path2id(path)
514
return self._branch.repository.get_revision_inventory(revid).path2id(path)
474
517
def get_merge_point_list(self, revid):
545
588
p.branch_nick = '(missing)'
547
591
def get_changes(self, revid_list):
548
"""Return a list of changes objects for the given revids.
550
Revisions not present and NULL_REVISION will be ignored.
552
changes = self.get_changes_uncached(revid_list)
592
if self._change_cache is None:
593
changes = self.get_changes_uncached(revid_list)
595
changes = self._change_cache.get_changes(revid_list)
553
596
if len(changes) == 0:
556
599
# some data needs to be recalculated each time, because it may
557
600
# change as new revisions are added.
558
601
for change in changes:
559
merge_revids = self.simplify_merge_point_list(
560
self.get_merge_point_list(change.revid))
561
change.merge_points = [
562
util.Container(revid=r,
563
revno=self.get_revno(r)) for r in merge_revids]
564
if len(change.parents) > 0:
565
change.parents = [util.Container(revid=r,
566
revno=self.get_revno(r)) for r in change.parents]
602
merge_revids = self.simplify_merge_point_list(self.get_merge_point_list(change.revid))
603
change.merge_points = [util.Container(revid=r, revno=self.get_revno(r)) for r in merge_revids]
567
604
change.revno = self.get_revno(change.revid)
576
def get_changes_uncached(self, revid_list):
577
# FIXME: deprecated method in getting a null revision
578
revid_list = filter(lambda revid: not bzrlib.revision.is_null(revid),
580
parent_map = self._branch.repository.get_graph().get_parent_map(
582
# We need to return the answer in the same order as the input,
584
present_revids = [revid for revid in revid_list
585
if revid in parent_map]
586
rev_list = self._branch.repository.get_revisions(present_revids)
588
return [self._change_from_revision(rev) for rev in rev_list]
590
def _get_deltas_for_revisions_with_trees(self, revisions):
591
"""Produce a list of revision deltas.
613
# alright, let's profile this sucka.
614
def _get_changes_profiled(self, revid_list, get_diffs=False):
615
from loggerhead.lsprof import profile
617
ret, stats = profile(self.get_changes_uncached, revid_list, get_diffs)
620
cPickle.dump(stats, open('lsprof.stats', 'w'), 2)
621
self.log.info('lsprof complete!')
624
def _get_deltas_for_revisions_with_trees(self, entries):
625
"""Produce a generator of revision deltas.
593
627
Note that the input is a sequence of REVISIONS, not revision_ids.
594
628
Trees will be held in memory until the generator exits.
595
629
Each delta is relative to the revision's lefthand predecessor.
596
(This is copied from bzrlib.)
598
631
required_trees = set()
599
for revision in revisions:
600
required_trees.add(revision.revid)
601
required_trees.update([p.revid for p in revision.parents[:1]])
632
for entry in entries:
633
required_trees.add(entry.revid)
634
required_trees.update([p.revid for p in entry.parents[:1]])
602
635
trees = dict((t.get_revision_id(), t) for
603
t in self._branch.repository.revision_trees(
636
t in self._branch.repository.revision_trees(required_trees))
606
for revision in revisions:
607
if not revision.parents:
608
old_tree = self._branch.repository.revision_tree(
609
bzrlib.revision.NULL_REVISION)
611
old_tree = trees[revision.parents[0].revid]
612
tree = trees[revision.revid]
613
ret.append(tree.changes_from(old_tree))
638
self._branch.repository.lock_read()
640
for entry in entries:
641
if not entry.parents:
642
old_tree = self._branch.repository.revision_tree(
643
bzrlib.revision.NULL_REVISION)
645
old_tree = trees[entry.parents[0].revid]
646
tree = trees[entry.revid]
647
ret.append(tree.changes_from(old_tree))
650
self._branch.repository.unlock()
616
def _change_from_revision(self, revision):
618
Given a bzrlib Revision, return a processed "change" for use in
652
def entry_from_revision(self, revision):
621
653
commit_time = datetime.datetime.fromtimestamp(revision.timestamp)
623
parents = [util.Container(revid=r,
624
revno=self.get_revno(r)) for r in revision.parent_ids]
655
parents = [util.Container(revid=r, revno=self.get_revno(r)) for r in revision.parent_ids]
626
657
message, short_message = clean_message(revision.message)
629
660
'revid': revision.revision_id,
630
661
'date': commit_time,
631
'author': revision.get_apparent_author(),
662
'author': revision.committer,
632
663
'branch_nick': revision.properties.get('branch-nick', None),
633
664
'short_comment': short_message,
634
665
'comment': revision.message,
635
666
'comment_clean': [util.html_clean(s) for s in message],
636
'parents': revision.parent_ids,
638
669
return util.Container(entry)
672
def get_changes_uncached(self, revid_list):
673
# Because we may loop and call get_revisions multiple times (to throw
674
# out dud revids), we grab a read lock.
675
self._branch.lock_read()
679
rev_list = self._branch.repository.get_revisions(revid_list)
680
except (KeyError, bzrlib.errors.NoSuchRevision), e:
681
# this sometimes happens with arch-converted branches.
682
# i don't know why. :(
683
self.log.debug('No such revision (skipping): %s', e)
684
revid_list.remove(e.revision)
688
return [self.entry_from_revision(rev) for rev in rev_list]
690
self._branch.unlock()
640
692
def get_file_changes_uncached(self, entries):
641
693
delta_list = self._get_deltas_for_revisions_with_trees(entries)
643
695
return [self.parse_delta(delta) for delta in delta_list]
645
698
def get_file_changes(self, entries):
646
699
if self._file_change_cache is None:
647
700
return self.get_file_changes_uncached(entries)
748
797
chunks.append(chunk)
749
798
chunk = util.Container()
751
split_lines = line.split(' ')[1:3]
752
lines = [int(x.split(',')[0][1:]) for x in split_lines]
800
lines = [int(x.split(',')[0][1:]) for x in line.split(' ')[1:3]]
753
801
old_lineno = lines[0]
754
802
new_lineno = lines[1]
755
803
elif line.startswith(' '):
756
chunk.diff.append(util.Container(old_lineno=old_lineno,
757
new_lineno=new_lineno,
804
chunk.diff.append(util.Container(old_lineno=old_lineno, new_lineno=new_lineno,
805
type='context', line=util.fixed_width(line[1:])))
762
808
elif line.startswith('+'):
763
chunk.diff.append(util.Container(old_lineno=None,
764
new_lineno=new_lineno,
765
type='insert', line=line[1:]))
809
chunk.diff.append(util.Container(old_lineno=None, new_lineno=new_lineno,
810
type='insert', line=util.fixed_width(line[1:])))
767
812
elif line.startswith('-'):
768
chunk.diff.append(util.Container(old_lineno=old_lineno,
770
type='delete', line=line[1:]))
813
chunk.diff.append(util.Container(old_lineno=old_lineno, new_lineno=None,
814
type='delete', line=util.fixed_width(line[1:])))
773
chunk.diff.append(util.Container(old_lineno=None,
817
chunk.diff.append(util.Container(old_lineno=None, new_lineno=None,
818
type='unknown', line=util.fixed_width(repr(line))))
777
819
if chunk is not None:
778
820
chunks.append(chunk)
799
841
added.append((rich_filename(path, kind), fid))
801
843
for path, fid, kind, text_modified, meta_modified in delta.modified:
802
modified.append(util.Container(filename=rich_filename(path, kind),
844
modified.append(util.Container(filename=rich_filename(path, kind), file_id=fid))
805
for old_path, new_path, fid, kind, text_modified, meta_modified in \
807
renamed.append((rich_filename(old_path, kind),
808
rich_filename(new_path, kind), fid))
846
for old_path, new_path, fid, kind, text_modified, meta_modified in delta.renamed:
847
renamed.append((rich_filename(old_path, kind), rich_filename(new_path, kind), fid))
809
848
if meta_modified or text_modified:
810
modified.append(util.Container(
811
filename=rich_filename(new_path, kind), file_id=fid))
849
modified.append(util.Container(filename=rich_filename(new_path, kind), file_id=fid))
813
851
for path, fid, kind in delta.removed:
814
852
removed.append((rich_filename(path, kind), fid))
816
return util.Container(added=added, renamed=renamed,
817
removed=removed, modified=modified)
854
return util.Container(added=added, renamed=renamed, removed=removed, modified=modified)
820
857
def add_side_by_side(changes):
846
884
pathname = filename
847
885
if entry.kind == 'directory':
850
absolutepath = pathname
852
absolutepath = urllib.quote(path + '/' + pathname)
853
888
revid = entry.revision
855
890
file = util.Container(
856
filename=filename, executable=entry.executable,
857
kind=entry.kind, pathname=pathname, absolutepath=absolutepath,
858
file_id=entry.file_id, size=entry.text_size, revid=revid,
859
change=change_dict[revid])
891
filename=filename, executable=entry.executable, kind=entry.kind,
892
pathname=pathname, file_id=entry.file_id, size=entry.text_size,
893
revid=revid, change=change_dict[revid])
860
894
file_list.append(file)
862
896
if sort_type == 'filename' or sort_type is None:
863
file_list.sort(key=lambda x: x.filename.lower()) # case-insensitive
897
file_list.sort(key=lambda x: x.filename)
864
898
elif sort_type == 'size':
865
899
file_list.sort(key=lambda x: x.size)
866
900
elif sort_type == 'date':
867
901
file_list.sort(key=lambda x: x.change.date)
869
# Always sort by kind to get directories first
870
file_list.sort(key=lambda x: x.kind != 'directory')
873
904
for file in file_list:
874
905
file.parity = parity
884
919
file_revid = self.get_inventory(revid)[file_id].revision
886
tree = self._branch.repository.revision_tree(file_revid)
922
# because we cache revision metadata ourselves, it's actually much
923
# faster to call 'annotate_iter' on the weave directly than it is to
924
# ask bzrlib to annotate for us.
925
w = self._branch.repository.weave_store.get_weave(file_id, self._branch.repository.get_transaction())
887
927
revid_set = set()
890
bzrlib.textfile.check_text_lines(tree.get_file_lines(file_id))
891
except bzrlib.errors.BinaryFile:
928
for line_revid, text in w.annotate_iter(file_revid):
929
revid_set.add(line_revid)
930
if self._BADCHARS_RE.match(text):
892
931
# bail out; this isn't displayable text
893
932
yield util.Container(parity=0, lineno=1, status='same',
894
933
text='(This is a binary file.)',
895
934
change=util.Container())
897
for line_revid, text in tree.annotate_iter(file_id):
898
revid_set.add(line_revid)
900
change_cache = dict([(c.revid, c) \
901
for c in self.get_changes(list(revid_set))])
903
last_line_revid = None
904
for line_revid, text in tree.annotate_iter(file_id):
905
if line_revid == last_line_revid:
906
# remember which lines have a new revno and which don't
911
last_line_revid = line_revid
912
change = change_cache[line_revid]
913
trunc_revno = change.revno
914
if len(trunc_revno) > 10:
915
trunc_revno = trunc_revno[:9] + '...'
917
yield util.Container(parity=parity, lineno=lineno, status=status,
918
change=change, text=util.fixed_width(text))
921
self.log.debug('annotate: %r secs' % (time.time() - z))
936
change_cache = dict([(c.revid, c) for c in self.get_changes(list(revid_set))])
938
last_line_revid = None
939
for line_revid, text in w.annotate_iter(file_revid):
940
if line_revid == last_line_revid:
941
# remember which lines have a new revno and which don't
946
last_line_revid = line_revid
947
change = change_cache[line_revid]
948
trunc_revno = change.revno
949
if len(trunc_revno) > 10:
950
trunc_revno = trunc_revno[:9] + '...'
952
yield util.Container(parity=parity, lineno=lineno, status=status,
953
change=change, text=util.fixed_width(text))
956
self.log.debug('annotate: %r secs' % (time.time() - z,))
959
def get_bundle(self, revid, compare_revid=None):
960
if compare_revid is None:
961
parents = self._revision_graph[revid]
963
compare_revid = parents[0]
967
bzrlib.bundle.serializer.write_bundle(self._branch.repository, revid, compare_revid, s)