38
from StringIO import StringIO
40
from loggerhead import search
41
from loggerhead import util
42
from loggerhead.wholehistory import compute_whole_history_data
38
45
import bzrlib.branch
40
47
import bzrlib.errors
48
import bzrlib.progress
42
49
import bzrlib.revision
44
from loggerhead import search
45
from loggerhead import util
46
from loggerhead.wholehistory import compute_whole_history_data
53
# bzrlib's UIFactory is not thread-safe
54
uihack = threading.local()
56
class ThreadSafeUIFactory (bzrlib.ui.SilentUIFactory):
57
def nested_progress_bar(self):
58
if getattr(uihack, '_progress_bar_stack', None) is None:
59
uihack._progress_bar_stack = bzrlib.progress.ProgressBarStack(klass=bzrlib.progress.DummyProgress)
60
return uihack._progress_bar_stack.get_nested()
62
bzrlib.ui.ui_factory = ThreadSafeUIFactory()
65
def _process_side_by_side_buffers(line_list, delete_list, insert_list):
66
while len(delete_list) < len(insert_list):
67
delete_list.append((None, '', 'context'))
68
while len(insert_list) < len(delete_list):
69
insert_list.append((None, '', 'context'))
70
while len(delete_list) > 0:
71
d = delete_list.pop(0)
72
i = insert_list.pop(0)
73
line_list.append(util.Container(old_lineno=d[0], new_lineno=i[0],
74
old_line=d[1], new_line=i[1],
75
old_type=d[2], new_type=i[2]))
78
def _make_side_by_side(chunk_list):
80
turn a normal unified-style diff (post-processed by parse_delta) into a
81
side-by-side diff structure. the new structure is::
89
type: str('context' or 'changed'),
94
for chunk in chunk_list:
97
delete_list, insert_list = [], []
98
for line in chunk.diff:
99
# Add <wbr/> every X characters so we can wrap properly
100
wrap_line = re.findall(r'.{%d}|.+$' % 78, line.line)
101
wrap_lines = [util.html_clean(_line) for _line in wrap_line]
102
wrapped_line = wrap_char.join(wrap_lines)
104
if line.type == 'context':
105
if len(delete_list) or len(insert_list):
106
_process_side_by_side_buffers(line_list, delete_list,
108
delete_list, insert_list = [], []
109
line_list.append(util.Container(old_lineno=line.old_lineno,
110
new_lineno=line.new_lineno,
111
old_line=wrapped_line,
112
new_line=wrapped_line,
115
elif line.type == 'delete':
116
delete_list.append((line.old_lineno, wrapped_line, line.type))
117
elif line.type == 'insert':
118
insert_list.append((line.new_lineno, wrapped_line, line.type))
119
if len(delete_list) or len(insert_list):
120
_process_side_by_side_buffers(line_list, delete_list, insert_list)
121
out_chunk_list.append(util.Container(diff=line_list))
122
return out_chunk_list
49
125
def is_branch(folder):
100
178
def __getitem__(self, index):
101
179
"""Get the date of the index'd item"""
102
return datetime.datetime.fromtimestamp(self.repository.get_revision(
103
self.revid_list[index]).timestamp)
180
return datetime.datetime.fromtimestamp(self.repository.get_revision(self.revid_list[index]).timestamp)
105
182
def __len__(self):
106
183
return len(self.revid_list)
108
class FileChangeReporter(object):
110
def __init__(self, old_inv, new_inv):
115
self.text_changes = []
116
self.old_inv = old_inv
117
self.new_inv = new_inv
119
def revid(self, inv, file_id):
121
return inv[file_id].revision
122
except bzrlib.errors.NoSuchId:
125
def report(self, file_id, paths, versioned, renamed, modified,
127
if modified not in ('unchanged', 'kind changed'):
128
if versioned == 'removed':
129
filename = rich_filename(paths[0], kind[0])
131
filename = rich_filename(paths[1], kind[1])
132
self.text_changes.append(util.Container(
133
filename=filename, file_id=file_id,
134
old_revision=self.revid(self.old_inv, file_id),
135
new_revision=self.revid(self.new_inv, file_id)))
136
if versioned == 'added':
137
self.added.append(util.Container(
138
filename=rich_filename(paths[1], kind),
139
file_id=file_id, kind=kind[1]))
140
elif versioned == 'removed':
141
self.removed.append(util.Container(
142
filename=rich_filename(paths[0], kind),
143
file_id=file_id, kind=kind[0]))
145
self.renamed.append(util.Container(
146
old_filename=rich_filename(paths[0], kind[0]),
147
new_filename=rich_filename(paths[1], kind[1]),
149
text_modified=modified == 'modified'))
151
self.modified.append(util.Container(
152
filename=rich_filename(paths[1], kind),
156
class RevInfoMemoryCache(object):
157
"""A store that validates values against the revids they were stored with.
159
We use a unique key for each branch.
161
The reason for not just using the revid as the key is so that when a new
162
value is provided for a branch, we replace the old value used for the
165
There is another implementation of the same interface in
166
loggerhead.changecache.RevInfoDiskCache.
169
def __init__(self, cache):
171
# lru_cache is not thread-safe, so we need to lock all accesses.
172
# It is even modified when doing a get() on it.
173
self._lock = threading.RLock()
175
def get(self, key, revid):
176
"""Return the data associated with `key`, subject to a revid check.
178
If a value was stored under `key`, with the same revid, return it.
179
Otherwise return None.
183
cached = self._cache.get(key)
188
stored_revid, data = cached
189
if revid == stored_revid:
194
def set(self, key, revid, data):
195
"""Store `data` under `key`, to be checked against `revid` on get().
199
self._cache[key] = (revid, data)
203
# Used to store locks that prevent multiple threads from building a
204
# revision graph for the same branch at the same time, because that can
205
# cause severe performance issues that are so bad that the system seems
207
revision_graph_locks = {}
208
revision_graph_check_lock = threading.Lock()
210
class History(object):
186
class History (object):
211
187
"""Decorate a branch to provide information for rendering.
213
189
History objects are expected to be short lived -- when serving a request
215
191
around it, serve the request, throw the History object away, unlock the
216
192
branch and throw it away.
218
:ivar _file_change_cache: An object that caches information about the
219
files that changed between two revisions.
220
:ivar _rev_info: A list of information about revisions. This is by far
221
the most cryptic data structure in loggerhead. At the top level, it
222
is a list of 3-tuples [(merge-info, where-merged, parents)].
223
`merge-info` is (seq, revid, merge_depth, revno_str, end_of_merge) --
224
like a merged sorted list, but the revno is stringified.
225
`where-merged` is a tuple of revisions that have this revision as a
226
non-lefthand parent. Finally, `parents` is just the usual list of
227
parents of this revision.
228
:ivar _rev_indices: A dictionary mapping each revision id to the index of
229
the information about it in _rev_info.
230
:ivar _revno_revid: A dictionary mapping stringified revnos to revision
194
:ivar _file_change_cache: xx
234
def _load_whole_history_data(self, caches, cache_key):
235
"""Set the attributes relating to the whole history of the branch.
237
:param caches: a list of caches with interfaces like
238
`RevInfoMemoryCache` and be ordered from fastest to slowest.
239
:param cache_key: the key to use with the caches.
241
self._rev_indices = None
242
self._rev_info = None
245
def update_missed_caches():
246
for cache in missed_caches:
247
cache.set(cache_key, self.last_revid, self._rev_info)
249
# Theoretically, it's possible for two threads to race in creating
250
# the Lock() object for their branch, so we put a lock around
251
# creating the per-branch Lock().
252
revision_graph_check_lock.acquire()
254
if cache_key not in revision_graph_locks:
255
revision_graph_locks[cache_key] = threading.Lock()
257
revision_graph_check_lock.release()
259
revision_graph_locks[cache_key].acquire()
262
data = cache.get(cache_key, self.last_revid)
264
self._rev_info = data
265
update_missed_caches()
268
missed_caches.append(cache)
270
whole_history_data = compute_whole_history_data(self._branch)
271
self._rev_info, self._rev_indices = whole_history_data
272
update_missed_caches()
274
revision_graph_locks[cache_key].release()
276
if self._rev_indices is not None:
277
self._revno_revid = {}
278
for ((_, revid, _, revno_str, _), _, _) in self._rev_info:
279
self._revno_revid[revno_str] = revid
281
self._revno_revid = {}
282
self._rev_indices = {}
283
for ((seq, revid, _, revno_str, _), _, _) in self._rev_info:
284
self._rev_indices[revid] = seq
285
self._revno_revid[revno_str] = revid
287
def __init__(self, branch, whole_history_data_cache, file_cache=None,
288
revinfo_disk_cache=None, cache_key=None):
197
def __init__(self, branch, whole_history_data_cache):
289
198
assert branch.is_locked(), (
290
199
"Can only construct a History object with a read-locked branch.")
291
if file_cache is not None:
292
self._file_change_cache = file_cache
293
file_cache.history = self
295
self._file_change_cache = None
200
self._file_change_cache = None
296
201
self._branch = branch
297
self._branch_tags = None
298
202
self._inventory_cache = {}
299
self._branch_nick = self._branch.get_config().get_nickname()
300
self.log = logging.getLogger('loggerhead.%s' % (self._branch_nick,))
203
self.log = logging.getLogger('loggerhead.%s' % (branch.nick,))
302
205
self.last_revid = branch.last_revision()
304
caches = [RevInfoMemoryCache(whole_history_data_cache)]
305
if revinfo_disk_cache:
306
caches.append(revinfo_disk_cache)
307
self._load_whole_history_data(caches, cache_key)
207
whole_history_data = whole_history_data_cache.get(self.last_revid)
208
if whole_history_data is None:
209
whole_history_data = compute_whole_history_data(branch)
210
whole_history_data_cache[self.last_revid] = whole_history_data
212
(self._revision_graph, self._full_history, self._revision_info,
213
self._revno_revid, self._merge_sort, self._where_merged
214
) = whole_history_data
217
def use_file_cache(self, cache):
218
self._file_change_cache = cache
310
221
def has_revisions(self):
353
261
def get_short_revision_history_by_fileid(self, file_id):
354
262
# FIXME: would be awesome if we could get, for a folder, the list of
355
263
# revisions where items within that folder changed.i
356
possible_keys = [(file_id, revid) for revid in self._rev_indices]
357
get_parent_map = self._branch.repository.texts.get_parent_map
358
# We chunk the requests as this works better with GraphIndex.
359
# See _filter_revisions_touching_file_id in bzrlib/log.py
360
# for more information.
363
for start in xrange(0, len(possible_keys), chunk_size):
364
next_keys = possible_keys[start:start + chunk_size]
365
revids += [k[1] for k in get_parent_map(next_keys)]
366
del possible_keys, next_keys
265
# FIXME: Workaround for bzr versions prior to 1.6b3.
266
# Remove me eventually pretty please :)
267
w = self._branch.repository.weave_store.get_weave(file_id, self._branch.repository.get_transaction())
268
w_revids = w.versions()
269
revids = [r for r in self._full_history if r in w_revids]
270
except AttributeError:
271
possible_keys = [(file_id, revid) for revid in self._full_history]
272
existing_keys = self._branch.repository.texts.get_parent_map(possible_keys)
273
revids = [revid for _, revid in existing_keys.iterkeys()]
369
276
def get_revision_history_since(self, revid_list, date):
370
277
# if a user asks for revisions starting at 01-sep, they mean inclusive,
371
278
# so start at midnight on 02-sep.
372
279
date = date + datetime.timedelta(days=1)
373
# our revid list is sorted in REVERSE date order,
374
# so go thru some hoops here...
280
# our revid list is sorted in REVERSE date order, so go thru some hoops here...
375
281
revid_list.reverse()
376
index = bisect.bisect(_RevListToTimestamps(revid_list,
377
self._branch.repository),
282
index = bisect.bisect(_RevListToTimestamps(revid_list, self._branch.repository), date)
381
285
revid_list.reverse()
397
300
# all the relevant changes (time-consuming) only to return a list of
398
301
# revids which will be used to fetch a set of changes again.
400
# if they entered a revid, just jump straight there;
401
# ignore the passed-in revid_list
303
# if they entered a revid, just jump straight there; ignore the passed-in revid_list
402
304
revid = self.fix_revid(query)
403
305
if revid is not None:
404
306
if isinstance(revid, unicode):
405
307
revid = revid.encode('utf-8')
406
changes = self.get_changes([revid])
308
changes = self.get_changes([ revid ])
407
309
if (changes is not None) and (len(changes) > 0):
411
313
m = self.us_date_re.match(query)
412
314
if m is not None:
413
date = datetime.datetime(util.fix_year(int(m.group(3))),
315
date = datetime.datetime(util.fix_year(int(m.group(3))), int(m.group(1)), int(m.group(2)))
417
317
m = self.earth_date_re.match(query)
418
318
if m is not None:
419
date = datetime.datetime(util.fix_year(int(m.group(3))),
319
date = datetime.datetime(util.fix_year(int(m.group(3))), int(m.group(2)), int(m.group(1)))
423
321
m = self.iso_date_re.match(query)
424
322
if m is not None:
425
date = datetime.datetime(util.fix_year(int(m.group(1))),
323
date = datetime.datetime(util.fix_year(int(m.group(1))), int(m.group(2)), int(m.group(3)))
428
324
if date is not None:
429
325
if revid_list is None:
430
# if no limit to the query was given,
431
# search only the direct-parent path.
326
# if no limit to the query was given, search only the direct-parent path.
432
327
revid_list = list(self.get_revids_from(None, self.last_revid))
433
328
return self.get_revision_history_since(revid_list, date)
583
478
revnol = revno.split(".")
584
479
revnos = ".".join(revnol[:-2])
585
480
revnolast = int(revnol[-1])
481
if d.has_key(revnos):
588
483
if revnolast < m:
589
d[revnos] = (revnolast, revid)
484
d[revnos] = ( revnolast, revid )
591
d[revnos] = (revnolast, revid)
593
return [revid for (_, revid) in d.itervalues()]
595
def add_branch_nicks(self, change):
486
d[revnos] = ( revnolast, revid )
488
return [ d[revnos][1] for revnos in d.keys() ]
490
def get_branch_nicks(self, changes):
597
given a 'change', fill in the branch nicks on all parents and merge
492
given a list of changes from L{get_changes}, fill in the branch nicks
493
on all parents and merge points.
600
495
fetch_set = set()
601
for p in change.parents:
602
fetch_set.add(p.revid)
603
for p in change.merge_points:
604
fetch_set.add(p.revid)
496
for change in changes:
497
for p in change.parents:
498
fetch_set.add(p.revid)
499
for p in change.merge_points:
500
fetch_set.add(p.revid)
605
501
p_changes = self.get_changes(list(fetch_set))
606
502
p_change_dict = dict([(c.revid, c) for c in p_changes])
607
for p in change.parents:
608
if p.revid in p_change_dict:
609
p.branch_nick = p_change_dict[p.revid].branch_nick
611
p.branch_nick = '(missing)'
612
for p in change.merge_points:
613
if p.revid in p_change_dict:
614
p.branch_nick = p_change_dict[p.revid].branch_nick
616
p.branch_nick = '(missing)'
503
for change in changes:
504
# arch-converted branches may not have merged branch info :(
505
for p in change.parents:
506
if p.revid in p_change_dict:
507
p.branch_nick = p_change_dict[p.revid].branch_nick
509
p.branch_nick = '(missing)'
510
for p in change.merge_points:
511
if p.revid in p_change_dict:
512
p.branch_nick = p_change_dict[p.revid].branch_nick
514
p.branch_nick = '(missing)'
618
516
def get_changes(self, revid_list):
619
517
"""Return a list of changes objects for the given revids.
659
553
return [self._change_from_revision(rev) for rev in rev_list]
555
def _get_deltas_for_revisions_with_trees(self, revisions):
556
"""Produce a list of revision deltas.
558
Note that the input is a sequence of REVISIONS, not revision_ids.
559
Trees will be held in memory until the generator exits.
560
Each delta is relative to the revision's lefthand predecessor.
561
(This is copied from bzrlib.)
563
required_trees = set()
564
for revision in revisions:
565
required_trees.add(revision.revid)
566
required_trees.update([p.revid for p in revision.parents[:1]])
567
trees = dict((t.get_revision_id(), t) for
568
t in self._branch.repository.revision_trees(required_trees))
570
for revision in revisions:
571
if not revision.parents:
572
old_tree = self._branch.repository.revision_tree(
573
bzrlib.revision.NULL_REVISION)
575
old_tree = trees[revision.parents[0].revid]
576
tree = trees[revision.revid]
577
ret.append(tree.changes_from(old_tree))
661
580
def _change_from_revision(self, revision):
663
582
Given a bzrlib Revision, return a processed "change" for use in
585
commit_time = datetime.datetime.fromtimestamp(revision.timestamp)
587
parents = [util.Container(revid=r, revno=self.get_revno(r)) for r in revision.parent_ids]
666
589
message, short_message = clean_message(revision.message)
668
if self._branch_tags is None:
669
self._branch_tags = self._branch.tags.get_reverse_tag_dict()
672
if revision.revision_id in self._branch_tags:
673
revtags = ', '.join(self._branch_tags[revision.revision_id])
676
592
'revid': revision.revision_id,
677
'date': datetime.datetime.fromtimestamp(revision.timestamp),
678
'utc_date': datetime.datetime.utcfromtimestamp(revision.timestamp),
679
'authors': revision.get_apparent_authors(),
594
'author': revision.get_apparent_author(),
680
595
'branch_nick': revision.properties.get('branch-nick', None),
681
596
'short_comment': short_message,
682
597
'comment': revision.message,
683
598
'comment_clean': [util.html_clean(s) for s in message],
684
599
'parents': revision.parent_ids,
685
'bugs': [bug.split()[0] for bug in revision.properties.get('bugs', '').splitlines()],
688
if isinstance(revision, bzrlib.foreign.ForeignRevision):
689
foreign_revid, mapping = (rev.foreign_revid, rev.mapping)
690
elif ":" in revision.revision_id:
692
foreign_revid, mapping = \
693
bzrlib.foreign.foreign_vcs_registry.parse_revision_id(
694
revision.revision_id)
695
except bzrlib.errors.InvalidRevisionId:
700
if foreign_revid is not None:
701
entry["foreign_vcs"] = mapping.vcs.abbreviation
702
entry["foreign_revid"] = mapping.vcs.show_foreign_revid(foreign_revid)
703
601
return util.Container(entry)
705
def get_file_changes_uncached(self, entry):
707
old_revid = entry.parents[0].revid
709
old_revid = bzrlib.revision.NULL_REVISION
710
return self.file_changes_for_revision_ids(old_revid, entry.revid)
712
def get_file_changes(self, entry):
603
def get_file_changes_uncached(self, entries):
604
delta_list = self._get_deltas_for_revisions_with_trees(entries)
606
return [self.parse_delta(delta) for delta in delta_list]
608
def get_file_changes(self, entries):
713
609
if self._file_change_cache is None:
714
return self.get_file_changes_uncached(entry)
610
return self.get_file_changes_uncached(entries)
716
return self._file_change_cache.get_file_changes(entry)
718
def add_changes(self, entry):
719
changes = self.get_file_changes(entry)
720
entry.changes = changes
612
return self._file_change_cache.get_file_changes(entries)
614
def add_changes(self, entries):
615
changes_list = self.get_file_changes(entries)
617
for entry, changes in zip(entries, changes_list):
618
entry.changes = changes
620
def get_change_with_diff(self, revid, compare_revid=None):
621
change = self.get_changes([revid])[0]
623
if compare_revid is None:
625
compare_revid = change.parents[0].revid
627
compare_revid = 'null:'
629
rev_tree1 = self._branch.repository.revision_tree(compare_revid)
630
rev_tree2 = self._branch.repository.revision_tree(revid)
631
delta = rev_tree2.changes_from(rev_tree1)
633
change.changes = self.parse_delta(delta)
634
change.changes.modified = self._parse_diffs(rev_tree1, rev_tree2, delta)
722
638
def get_file(self, file_id, revid):
723
"""Returns (path, filename, file contents)"""
639
"returns (path, filename, data)"
724
640
inv = self.get_inventory(revid)
725
641
inv_entry = inv[file_id]
726
642
rev_tree = self._branch.repository.revision_tree(inv_entry.revision)
729
645
path = '/' + path
730
646
return path, inv_entry.name, rev_tree.get_file_text(file_id)
732
def file_changes_for_revision_ids(self, old_revid, new_revid):
648
def _parse_diffs(self, old_tree, new_tree, delta):
650
Return a list of processed diffs, in the format::
659
type: str('context', 'delete', or 'insert'),
668
for old_path, new_path, fid, kind, text_modified, meta_modified in delta.renamed:
670
process.append((old_path, new_path, fid, kind))
671
for path, fid, kind, text_modified, meta_modified in delta.modified:
672
process.append((path, path, fid, kind))
674
for old_path, new_path, fid, kind in process:
675
old_lines = old_tree.get_file_lines(fid)
676
new_lines = new_tree.get_file_lines(fid)
678
if old_lines != new_lines:
680
bzrlib.diff.internal_diff(old_path, old_lines,
681
new_path, new_lines, buffer)
682
except bzrlib.errors.BinaryFile:
685
diff = buffer.getvalue()
688
out.append(util.Container(filename=rich_filename(new_path, kind), file_id=fid, chunks=self._process_diff(diff), raw_diff=diff))
692
def _process_diff(self, diff):
693
# doesn't really need to be a method; could be static.
696
for line in diff.splitlines():
699
if line.startswith('+++ ') or line.startswith('--- '):
701
if line.startswith('@@ '):
703
if chunk is not None:
705
chunk = util.Container()
707
lines = [int(x.split(',')[0][1:]) for x in line.split(' ')[1:3]]
708
old_lineno = lines[0]
709
new_lineno = lines[1]
710
elif line.startswith(' '):
711
chunk.diff.append(util.Container(old_lineno=old_lineno,
712
new_lineno=new_lineno,
717
elif line.startswith('+'):
718
chunk.diff.append(util.Container(old_lineno=None,
719
new_lineno=new_lineno,
720
type='insert', line=line[1:]))
722
elif line.startswith('-'):
723
chunk.diff.append(util.Container(old_lineno=old_lineno,
725
type='delete', line=line[1:]))
728
chunk.diff.append(util.Container(old_lineno=None,
732
if chunk is not None:
736
def parse_delta(self, delta):
734
738
Return a nested data structure containing the changes in a delta::
743
text_changes: list((filename, file_id)),
745
repo = self._branch.repository
746
if (bzrlib.revision.is_null(old_revid) or
747
bzrlib.revision.is_null(new_revid)):
748
old_tree, new_tree = map(
749
repo.revision_tree, [old_revid, new_revid])
751
old_tree, new_tree = repo.revision_trees([old_revid, new_revid])
753
reporter = FileChangeReporter(old_tree.inventory, new_tree.inventory)
755
bzrlib.delta.report_changes(new_tree.iter_changes(old_tree), reporter)
757
return util.Container(
758
added=sorted(reporter.added, key=lambda x: x.filename),
759
renamed=sorted(reporter.renamed, key=lambda x: x.new_filename),
760
removed=sorted(reporter.removed, key=lambda x: x.filename),
761
modified=sorted(reporter.modified, key=lambda x: x.filename),
762
text_changes=sorted(reporter.text_changes, key=lambda x: x.filename))
753
for path, fid, kind in delta.added:
754
added.append((rich_filename(path, kind), fid))
756
for path, fid, kind, text_modified, meta_modified in delta.modified:
757
modified.append(util.Container(filename=rich_filename(path, kind), file_id=fid))
759
for old_path, new_path, fid, kind, text_modified, meta_modified in delta.renamed:
760
renamed.append((rich_filename(old_path, kind), rich_filename(new_path, kind), fid))
761
if meta_modified or text_modified:
762
modified.append(util.Container(filename=rich_filename(new_path, kind), file_id=fid))
764
for path, fid, kind in delta.removed:
765
removed.append((rich_filename(path, kind), fid))
767
return util.Container(added=added, renamed=renamed, removed=removed, modified=modified)
770
def add_side_by_side(changes):
771
# FIXME: this is a rotten API.
772
for change in changes:
773
for m in change.changes.modified:
774
m.sbs_chunks = _make_side_by_side(m.chunks)
776
def get_filelist(self, inv, file_id, sort_type=None):
778
return the list of all files (and their attributes) within a given
782
dir_ie = inv[file_id]
783
path = inv.id2path(file_id)
788
for filename, entry in dir_ie.children.iteritems():
789
revid_set.add(entry.revision)
792
for change in self.get_changes(list(revid_set)):
793
change_dict[change.revid] = change
795
for filename, entry in dir_ie.children.iteritems():
797
if entry.kind == 'directory':
800
revid = entry.revision
802
file = util.Container(
803
filename=filename, executable=entry.executable, kind=entry.kind,
804
pathname=pathname, file_id=entry.file_id, size=entry.text_size,
805
revid=revid, change=change_dict[revid])
806
file_list.append(file)
808
if sort_type == 'filename' or sort_type is None:
809
file_list.sort(key=lambda x: x.filename.lower()) # case-insensitive
810
elif sort_type == 'size':
811
file_list.sort(key=lambda x: x.size)
812
elif sort_type == 'date':
813
file_list.sort(key=lambda x: x.change.date)
815
# Always sort by kind to get directories first
816
file_list.sort(key=lambda x: x.kind != 'directory')
819
for file in file_list:
826
_BADCHARS_RE = re.compile(ur'[\x00-\x08\x0b\x0e-\x1f]')
828
def annotate_file(self, file_id, revid):
833
file_revid = self.get_inventory(revid)[file_id].revision
835
tree = self._branch.repository.revision_tree(file_revid)
838
for line_revid, text in tree.annotate_iter(file_id):
839
revid_set.add(line_revid)
840
if self._BADCHARS_RE.match(text):
841
# bail out; this isn't displayable text
842
yield util.Container(parity=0, lineno=1, status='same',
843
text='(This is a binary file.)',
844
change=util.Container())
846
change_cache = dict([(c.revid, c) \
847
for c in self.get_changes(list(revid_set))])
849
last_line_revid = None
850
for line_revid, text in tree.annotate_iter(file_id):
851
if line_revid == last_line_revid:
852
# remember which lines have a new revno and which don't
857
last_line_revid = line_revid
858
change = change_cache[line_revid]
859
trunc_revno = change.revno
860
if len(trunc_revno) > 10:
861
trunc_revno = trunc_revno[:9] + '...'
863
yield util.Container(parity=parity, lineno=lineno, status=status,
864
change=change, text=util.fixed_width(text))
867
self.log.debug('annotate: %r secs' % (time.time() - z,))