36
from StringIO import StringIO
38
from loggerhead import search
39
from loggerhead import util
40
from loggerhead.wholehistory import compute_whole_history_data
38
43
import bzrlib.branch
40
45
import bzrlib.errors
46
import bzrlib.progress
42
47
import bzrlib.revision
44
from loggerhead import search
45
from loggerhead import util
46
from loggerhead.wholehistory import compute_whole_history_data
51
# bzrlib's UIFactory is not thread-safe
52
uihack = threading.local()
54
class ThreadSafeUIFactory (bzrlib.ui.SilentUIFactory):
55
def nested_progress_bar(self):
56
if getattr(uihack, '_progress_bar_stack', None) is None:
57
uihack._progress_bar_stack = bzrlib.progress.ProgressBarStack(klass=bzrlib.progress.DummyProgress)
58
return uihack._progress_bar_stack.get_nested()
60
bzrlib.ui.ui_factory = ThreadSafeUIFactory()
63
def _process_side_by_side_buffers(line_list, delete_list, insert_list):
64
while len(delete_list) < len(insert_list):
65
delete_list.append((None, '', 'context'))
66
while len(insert_list) < len(delete_list):
67
insert_list.append((None, '', 'context'))
68
while len(delete_list) > 0:
69
d = delete_list.pop(0)
70
i = insert_list.pop(0)
71
line_list.append(util.Container(old_lineno=d[0], new_lineno=i[0],
72
old_line=d[1], new_line=i[1],
73
old_type=d[2], new_type=i[2]))
76
def _make_side_by_side(chunk_list):
78
turn a normal unified-style diff (post-processed by parse_delta) into a
79
side-by-side diff structure. the new structure is::
87
type: str('context' or 'changed'),
92
for chunk in chunk_list:
94
delete_list, insert_list = [], []
95
for line in chunk.diff:
96
if line.type == 'context':
97
if len(delete_list) or len(insert_list):
98
_process_side_by_side_buffers(line_list, delete_list, insert_list)
99
delete_list, insert_list = [], []
100
line_list.append(util.Container(old_lineno=line.old_lineno, new_lineno=line.new_lineno,
101
old_line=line.line, new_line=line.line,
102
old_type=line.type, new_type=line.type))
103
elif line.type == 'delete':
104
delete_list.append((line.old_lineno, line.line, line.type))
105
elif line.type == 'insert':
106
insert_list.append((line.new_lineno, line.line, line.type))
107
if len(delete_list) or len(insert_list):
108
_process_side_by_side_buffers(line_list, delete_list, insert_list)
109
out_chunk_list.append(util.Container(diff=line_list))
110
return out_chunk_list
49
113
def is_branch(folder):
100
166
def __getitem__(self, index):
101
167
"""Get the date of the index'd item"""
102
return datetime.datetime.fromtimestamp(self.repository.get_revision(
103
self.revid_list[index]).timestamp)
168
return datetime.datetime.fromtimestamp(self.repository.get_revision(self.revid_list[index]).timestamp)
105
170
def __len__(self):
106
171
return len(self.revid_list)
108
class FileChangeReporter(object):
110
def __init__(self, old_inv, new_inv):
115
self.text_changes = []
116
self.old_inv = old_inv
117
self.new_inv = new_inv
119
def revid(self, inv, file_id):
121
return inv[file_id].revision
122
except bzrlib.errors.NoSuchId:
125
def report(self, file_id, paths, versioned, renamed, modified,
127
if modified not in ('unchanged', 'kind changed'):
128
if versioned == 'removed':
129
filename = rich_filename(paths[0], kind[0])
131
filename = rich_filename(paths[1], kind[1])
132
self.text_changes.append(util.Container(
133
filename=filename, file_id=file_id,
134
old_revision=self.revid(self.old_inv, file_id),
135
new_revision=self.revid(self.new_inv, file_id)))
136
if versioned == 'added':
137
self.added.append(util.Container(
138
filename=rich_filename(paths[1], kind),
139
file_id=file_id, kind=kind[1]))
140
elif versioned == 'removed':
141
self.removed.append(util.Container(
142
filename=rich_filename(paths[0], kind),
143
file_id=file_id, kind=kind[0]))
145
self.renamed.append(util.Container(
146
old_filename=rich_filename(paths[0], kind[0]),
147
new_filename=rich_filename(paths[1], kind[1]),
149
text_modified=modified == 'modified'))
151
self.modified.append(util.Container(
152
filename=rich_filename(paths[1], kind),
156
class RevInfoMemoryCache(object):
157
"""A store that validates values against the revids they were stored with.
159
We use a unique key for each branch.
161
The reason for not just using the revid as the key is so that when a new
162
value is provided for a branch, we replace the old value used for the
165
There is another implementation of the same interface in
166
loggerhead.changecache.RevInfoDiskCache.
169
def __init__(self, cache):
172
def get(self, key, revid):
173
"""Return the data associated with `key`, subject to a revid check.
175
If a value was stored under `key`, with the same revid, return it.
176
Otherwise return None.
178
cached = self._cache.get(key)
181
stored_revid, data = cached
182
if revid == stored_revid:
187
def set(self, key, revid, data):
188
"""Store `data` under `key`, to be checked against `revid` on get().
190
self._cache[key] = (revid, data)
192
# Used to store locks that prevent multiple threads from building a
193
# revision graph for the same branch at the same time, because that can
194
# cause severe performance issues that are so bad that the system seems
196
revision_graph_locks = {}
197
revision_graph_check_lock = threading.Lock()
199
class History(object):
174
class History (object):
200
175
"""Decorate a branch to provide information for rendering.
202
177
History objects are expected to be short lived -- when serving a request
204
179
around it, serve the request, throw the History object away, unlock the
205
180
branch and throw it away.
207
:ivar _file_change_cache: An object that caches information about the
208
files that changed between two revisions.
209
:ivar _rev_info: A list of information about revisions. This is by far
210
the most cryptic data structure in loggerhead. At the top level, it
211
is a list of 3-tuples [(merge-info, where-merged, parents)].
212
`merge-info` is (seq, revid, merge_depth, revno_str, end_of_merge) --
213
like a merged sorted list, but the revno is stringified.
214
`where-merged` is a tuple of revisions that have this revision as a
215
non-lefthand parent. Finally, `parents` is just the usual list of
216
parents of this revision.
217
:ivar _rev_indices: A dictionary mapping each revision id to the index of
218
the information about it in _rev_info.
219
:ivar _revno_revid: A dictionary mapping stringified revnos to revision
182
:ivar _file_change_cache: xx
223
def _load_whole_history_data(self, caches, cache_key):
224
"""Set the attributes relating to the whole history of the branch.
226
:param caches: a list of caches with interfaces like
227
`RevInfoMemoryCache` and be ordered from fastest to slowest.
228
:param cache_key: the key to use with the caches.
230
self._rev_indices = None
231
self._rev_info = None
234
def update_missed_caches():
235
for cache in missed_caches:
236
cache.set(cache_key, self.last_revid, self._rev_info)
238
# Theoretically, it's possible for two threads to race in creating
239
# the Lock() object for their branch, so we put a lock around
240
# creating the per-branch Lock().
241
revision_graph_check_lock.acquire()
243
if cache_key not in revision_graph_locks:
244
revision_graph_locks[cache_key] = threading.Lock()
246
revision_graph_check_lock.release()
248
revision_graph_locks[cache_key].acquire()
251
data = cache.get(cache_key, self.last_revid)
253
self._rev_info = data
254
update_missed_caches()
257
missed_caches.append(cache)
259
whole_history_data = compute_whole_history_data(self._branch)
260
self._rev_info, self._rev_indices = whole_history_data
261
update_missed_caches()
263
revision_graph_locks[cache_key].release()
265
if self._rev_indices is not None:
266
self._revno_revid = {}
267
for ((_, revid, _, revno_str, _), _, _) in self._rev_info:
268
self._revno_revid[revno_str] = revid
270
self._revno_revid = {}
271
self._rev_indices = {}
272
for ((seq, revid, _, revno_str, _), _, _) in self._rev_info:
273
self._rev_indices[revid] = seq
274
self._revno_revid[revno_str] = revid
276
def __init__(self, branch, whole_history_data_cache, file_cache=None,
277
revinfo_disk_cache=None, cache_key=None):
185
def __init__(self, branch, whole_history_data_cache):
278
186
assert branch.is_locked(), (
279
187
"Can only construct a History object with a read-locked branch.")
280
if file_cache is not None:
281
self._file_change_cache = file_cache
282
file_cache.history = self
284
self._file_change_cache = None
188
self._file_change_cache = None
285
189
self._branch = branch
286
self._inventory_cache = {}
287
self._branch_nick = self._branch.get_config().get_nickname()
288
self.log = logging.getLogger('loggerhead.%s' % (self._branch_nick,))
190
self.log = logging.getLogger('loggerhead.%s' % (branch.nick,))
290
192
self.last_revid = branch.last_revision()
292
caches = [RevInfoMemoryCache(whole_history_data_cache)]
293
if revinfo_disk_cache:
294
caches.append(revinfo_disk_cache)
295
self._load_whole_history_data(caches, cache_key)
194
whole_history_data = whole_history_data_cache.get(self.last_revid)
195
if whole_history_data is None:
196
whole_history_data = compute_whole_history_data(branch)
197
whole_history_data_cache[self.last_revid] = whole_history_data
199
(self._revision_graph, self._full_history, self._revision_info,
200
self._revno_revid, self._merge_sort, self._where_merged
201
) = whole_history_data
203
def use_file_cache(self, cache):
204
self._file_change_cache = cache
298
207
def has_revisions(self):
315
223
revid in revid_list.
317
225
if revid_list is None:
318
revid_list = [r[0][1] for r in self._rev_info]
226
revid_list = self._full_history
319
227
revid_set = set(revid_list)
320
228
revid = start_revid
322
229
def introduced_revisions(revid):
324
seq = self._rev_indices[revid]
325
md = self._rev_info[seq][0][2]
231
seq, revid, md, revno, end_of_merge = self._revision_info[revid]
327
while i < len(self._rev_info) and self._rev_info[i][0][2] > md:
328
r.add(self._rev_info[i][0][1])
233
while i < len(self._merge_sort) and self._merge_sort[i][2] > md:
234
r.add(self._merge_sort[i][1])
332
238
if bzrlib.revision.is_null(revid):
334
240
if introduced_revisions(revid) & revid_set:
336
parents = self._rev_info[self._rev_indices[revid]][2]
242
parents = self._revision_graph[revid]
337
243
if len(parents) == 0:
339
245
revid = parents[0]
341
247
def get_short_revision_history_by_fileid(self, file_id):
248
# wow. is this really the only way we can get this list? by
249
# man-handling the weave store directly? :-0
342
250
# FIXME: would be awesome if we could get, for a folder, the list of
343
# revisions where items within that folder changed.i
344
possible_keys = [(file_id, revid) for revid in self._rev_indices]
345
get_parent_map = self._branch.repository.texts.get_parent_map
346
# We chunk the requests as this works better with GraphIndex.
347
# See _filter_revisions_touching_file_id in bzrlib/log.py
348
# for more information.
351
for start in xrange(0, len(possible_keys), chunk_size):
352
next_keys = possible_keys[start:start + chunk_size]
353
revids += [k[1] for k in get_parent_map(next_keys)]
354
del possible_keys, next_keys
251
# revisions where items within that folder changed.
252
possible_keys = [(file_id, revid) for revid in self._full_history]
253
existing_keys = self._branch.repository.texts.get_parent_map(possible_keys)
254
return [revid for _, revid in existing_keys.iterkeys()]
357
256
def get_revision_history_since(self, revid_list, date):
358
257
# if a user asks for revisions starting at 01-sep, they mean inclusive,
359
258
# so start at midnight on 02-sep.
360
259
date = date + datetime.timedelta(days=1)
361
# our revid list is sorted in REVERSE date order,
362
# so go thru some hoops here...
260
# our revid list is sorted in REVERSE date order, so go thru some hoops here...
363
261
revid_list.reverse()
364
index = bisect.bisect(_RevListToTimestamps(revid_list,
365
self._branch.repository),
262
index = bisect.bisect(_RevListToTimestamps(revid_list, self._branch.repository), date)
369
265
revid_list.reverse()
385
280
# all the relevant changes (time-consuming) only to return a list of
386
281
# revids which will be used to fetch a set of changes again.
388
# if they entered a revid, just jump straight there;
389
# ignore the passed-in revid_list
283
# if they entered a revid, just jump straight there; ignore the passed-in revid_list
390
284
revid = self.fix_revid(query)
391
285
if revid is not None:
392
286
if isinstance(revid, unicode):
393
287
revid = revid.encode('utf-8')
394
changes = self.get_changes([revid])
288
changes = self.get_changes([ revid ])
395
289
if (changes is not None) and (len(changes) > 0):
399
293
m = self.us_date_re.match(query)
400
294
if m is not None:
401
date = datetime.datetime(util.fix_year(int(m.group(3))),
295
date = datetime.datetime(util.fix_year(int(m.group(3))), int(m.group(1)), int(m.group(2)))
405
297
m = self.earth_date_re.match(query)
406
298
if m is not None:
407
date = datetime.datetime(util.fix_year(int(m.group(3))),
299
date = datetime.datetime(util.fix_year(int(m.group(3))), int(m.group(2)), int(m.group(1)))
411
301
m = self.iso_date_re.match(query)
412
302
if m is not None:
413
date = datetime.datetime(util.fix_year(int(m.group(1))),
303
date = datetime.datetime(util.fix_year(int(m.group(1))), int(m.group(2)), int(m.group(3)))
416
304
if date is not None:
417
305
if revid_list is None:
418
# if no limit to the query was given,
419
# search only the direct-parent path.
306
# if no limit to the query was given, search only the direct-parent path.
420
307
revid_list = list(self.get_revids_from(None, self.last_revid))
421
308
return self.get_revision_history_since(revid_list, date)
571
452
revnol = revno.split(".")
572
453
revnos = ".".join(revnol[:-2])
573
454
revnolast = int(revnol[-1])
455
if d.has_key(revnos):
576
457
if revnolast < m:
577
d[revnos] = (revnolast, revid)
458
d[revnos] = ( revnolast, revid )
579
d[revnos] = (revnolast, revid)
581
return [revid for (_, revid) in d.itervalues()]
583
def add_branch_nicks(self, change):
460
d[revnos] = ( revnolast, revid )
462
return [ d[revnos][1] for revnos in d.keys() ]
464
def get_branch_nicks(self, changes):
585
given a 'change', fill in the branch nicks on all parents and merge
466
given a list of changes from L{get_changes}, fill in the branch nicks
467
on all parents and merge points.
588
469
fetch_set = set()
589
for p in change.parents:
590
fetch_set.add(p.revid)
591
for p in change.merge_points:
592
fetch_set.add(p.revid)
470
for change in changes:
471
for p in change.parents:
472
fetch_set.add(p.revid)
473
for p in change.merge_points:
474
fetch_set.add(p.revid)
593
475
p_changes = self.get_changes(list(fetch_set))
594
476
p_change_dict = dict([(c.revid, c) for c in p_changes])
595
for p in change.parents:
596
if p.revid in p_change_dict:
597
p.branch_nick = p_change_dict[p.revid].branch_nick
599
p.branch_nick = '(missing)'
600
for p in change.merge_points:
601
if p.revid in p_change_dict:
602
p.branch_nick = p_change_dict[p.revid].branch_nick
604
p.branch_nick = '(missing)'
477
for change in changes:
478
# arch-converted branches may not have merged branch info :(
479
for p in change.parents:
480
if p.revid in p_change_dict:
481
p.branch_nick = p_change_dict[p.revid].branch_nick
483
p.branch_nick = '(missing)'
484
for p in change.merge_points:
485
if p.revid in p_change_dict:
486
p.branch_nick = p_change_dict[p.revid].branch_nick
488
p.branch_nick = '(missing)'
606
490
def get_changes(self, revid_list):
607
491
"""Return a list of changes objects for the given revids.
647
527
return [self._change_from_revision(rev) for rev in rev_list]
529
def _get_deltas_for_revisions_with_trees(self, revisions):
530
"""Produce a list of revision deltas.
532
Note that the input is a sequence of REVISIONS, not revision_ids.
533
Trees will be held in memory until the generator exits.
534
Each delta is relative to the revision's lefthand predecessor.
535
(This is copied from bzrlib.)
537
required_trees = set()
538
for revision in revisions:
539
required_trees.add(revision.revid)
540
required_trees.update([p.revid for p in revision.parents[:1]])
541
trees = dict((t.get_revision_id(), t) for
542
t in self._branch.repository.revision_trees(required_trees))
544
self._branch.repository.lock_read()
546
for revision in revisions:
547
if not revision.parents:
548
old_tree = self._branch.repository.revision_tree(
549
bzrlib.revision.NULL_REVISION)
551
old_tree = trees[revision.parents[0].revid]
552
tree = trees[revision.revid]
553
ret.append(tree.changes_from(old_tree))
556
self._branch.repository.unlock()
649
558
def _change_from_revision(self, revision):
651
560
Given a bzrlib Revision, return a processed "change" for use in
563
commit_time = datetime.datetime.fromtimestamp(revision.timestamp)
565
parents = [util.Container(revid=r, revno=self.get_revno(r)) for r in revision.parent_ids]
654
567
message, short_message = clean_message(revision.message)
656
tags = self._branch.tags.get_reverse_tag_dict()
659
if tags.has_key(revision.revision_id):
660
revtags = ', '.join(tags[revision.revision_id])
663
570
'revid': revision.revision_id,
664
'date': datetime.datetime.fromtimestamp(revision.timestamp),
665
'utc_date': datetime.datetime.utcfromtimestamp(revision.timestamp),
666
'authors': revision.get_apparent_authors(),
572
'author': revision.get_apparent_author(),
667
573
'branch_nick': revision.properties.get('branch-nick', None),
668
574
'short_comment': short_message,
669
575
'comment': revision.message,
670
576
'comment_clean': [util.html_clean(s) for s in message],
671
577
'parents': revision.parent_ids,
672
'bugs': [bug.split()[0] for bug in revision.properties.get('bugs', '').splitlines()],
675
if isinstance(revision, bzrlib.foreign.ForeignRevision):
676
foreign_revid, mapping = (rev.foreign_revid, rev.mapping)
677
elif ":" in revision.revision_id:
679
foreign_revid, mapping = \
680
bzrlib.foreign.foreign_vcs_registry.parse_revision_id(
681
revision.revision_id)
682
except bzrlib.errors.InvalidRevisionId:
687
if foreign_revid is not None:
688
entry["foreign_vcs"] = mapping.vcs.abbreviation
689
entry["foreign_revid"] = mapping.vcs.show_foreign_revid(foreign_revid)
690
579
return util.Container(entry)
692
def get_file_changes_uncached(self, entry):
694
old_revid = entry.parents[0].revid
696
old_revid = bzrlib.revision.NULL_REVISION
697
return self.file_changes_for_revision_ids(old_revid, entry.revid)
699
def get_file_changes(self, entry):
581
def get_file_changes_uncached(self, entries):
582
delta_list = self._get_deltas_for_revisions_with_trees(entries)
584
return [self.parse_delta(delta) for delta in delta_list]
586
def get_file_changes(self, entries):
700
587
if self._file_change_cache is None:
701
return self.get_file_changes_uncached(entry)
588
return self.get_file_changes_uncached(entries)
703
return self._file_change_cache.get_file_changes(entry)
705
def add_changes(self, entry):
706
changes = self.get_file_changes(entry)
707
entry.changes = changes
590
return self._file_change_cache.get_file_changes(entries)
592
def add_changes(self, entries):
593
changes_list = self.get_file_changes(entries)
595
for entry, changes in zip(entries, changes_list):
596
entry.changes = changes
598
def get_change_with_diff(self, revid, compare_revid=None):
599
change = self.get_changes([revid])[0]
601
if compare_revid is None:
603
compare_revid = change.parents[0].revid
605
compare_revid = 'null:'
607
rev_tree1 = self._branch.repository.revision_tree(compare_revid)
608
rev_tree2 = self._branch.repository.revision_tree(revid)
609
delta = rev_tree2.changes_from(rev_tree1)
611
change.changes = self.parse_delta(delta)
612
change.changes.modified = self._parse_diffs(rev_tree1, rev_tree2, delta)
709
616
def get_file(self, file_id, revid):
710
"""Returns (path, filename, file contents)"""
617
"returns (path, filename, data)"
711
618
inv = self.get_inventory(revid)
712
619
inv_entry = inv[file_id]
713
620
rev_tree = self._branch.repository.revision_tree(inv_entry.revision)
716
623
path = '/' + path
717
624
return path, inv_entry.name, rev_tree.get_file_text(file_id)
719
def file_changes_for_revision_ids(self, old_revid, new_revid):
626
def _parse_diffs(self, old_tree, new_tree, delta):
628
Return a list of processed diffs, in the format::
637
type: str('context', 'delete', or 'insert'),
646
for old_path, new_path, fid, kind, text_modified, meta_modified in delta.renamed:
648
process.append((old_path, new_path, fid, kind))
649
for path, fid, kind, text_modified, meta_modified in delta.modified:
650
process.append((path, path, fid, kind))
652
for old_path, new_path, fid, kind in process:
653
old_lines = old_tree.get_file_lines(fid)
654
new_lines = new_tree.get_file_lines(fid)
656
if old_lines != new_lines:
658
bzrlib.diff.internal_diff(old_path, old_lines,
659
new_path, new_lines, buffer)
660
except bzrlib.errors.BinaryFile:
663
diff = buffer.getvalue()
666
out.append(util.Container(filename=rich_filename(new_path, kind), file_id=fid, chunks=self._process_diff(diff), raw_diff=diff))
670
def _process_diff(self, diff):
671
# doesn't really need to be a method; could be static.
674
for line in diff.splitlines():
677
if line.startswith('+++ ') or line.startswith('--- '):
679
if line.startswith('@@ '):
681
if chunk is not None:
683
chunk = util.Container()
685
lines = [int(x.split(',')[0][1:]) for x in line.split(' ')[1:3]]
686
old_lineno = lines[0]
687
new_lineno = lines[1]
688
elif line.startswith(' '):
689
chunk.diff.append(util.Container(old_lineno=old_lineno, new_lineno=new_lineno,
690
type='context', line=util.fixed_width(line[1:])))
693
elif line.startswith('+'):
694
chunk.diff.append(util.Container(old_lineno=None, new_lineno=new_lineno,
695
type='insert', line=util.fixed_width(line[1:])))
697
elif line.startswith('-'):
698
chunk.diff.append(util.Container(old_lineno=old_lineno, new_lineno=None,
699
type='delete', line=util.fixed_width(line[1:])))
702
chunk.diff.append(util.Container(old_lineno=None, new_lineno=None,
703
type='unknown', line=util.fixed_width(repr(line))))
704
if chunk is not None:
708
def parse_delta(self, delta):
721
710
Return a nested data structure containing the changes in a delta::
730
text_changes: list((filename, file_id)),
732
repo = self._branch.repository
733
if (bzrlib.revision.is_null(old_revid) or
734
bzrlib.revision.is_null(new_revid)):
735
old_tree, new_tree = map(
736
repo.revision_tree, [old_revid, new_revid])
738
old_tree, new_tree = repo.revision_trees([old_revid, new_revid])
740
reporter = FileChangeReporter(old_tree.inventory, new_tree.inventory)
742
bzrlib.delta.report_changes(new_tree.iter_changes(old_tree), reporter)
744
return util.Container(
745
added=sorted(reporter.added, key=lambda x: x.filename),
746
renamed=sorted(reporter.renamed, key=lambda x: x.new_filename),
747
removed=sorted(reporter.removed, key=lambda x: x.filename),
748
modified=sorted(reporter.modified, key=lambda x: x.filename),
749
text_changes=sorted(reporter.text_changes, key=lambda x: x.filename))
725
for path, fid, kind in delta.added:
726
added.append((rich_filename(path, kind), fid))
728
for path, fid, kind, text_modified, meta_modified in delta.modified:
729
modified.append(util.Container(filename=rich_filename(path, kind), file_id=fid))
731
for old_path, new_path, fid, kind, text_modified, meta_modified in delta.renamed:
732
renamed.append((rich_filename(old_path, kind), rich_filename(new_path, kind), fid))
733
if meta_modified or text_modified:
734
modified.append(util.Container(filename=rich_filename(new_path, kind), file_id=fid))
736
for path, fid, kind in delta.removed:
737
removed.append((rich_filename(path, kind), fid))
739
return util.Container(added=added, renamed=renamed, removed=removed, modified=modified)
742
def add_side_by_side(changes):
743
# FIXME: this is a rotten API.
744
for change in changes:
745
for m in change.changes.modified:
746
m.sbs_chunks = _make_side_by_side(m.chunks)
748
def get_filelist(self, inv, file_id, sort_type=None):
750
return the list of all files (and their attributes) within a given
754
dir_ie = inv[file_id]
755
path = inv.id2path(file_id)
760
for filename, entry in dir_ie.children.iteritems():
761
revid_set.add(entry.revision)
764
for change in self.get_changes(list(revid_set)):
765
change_dict[change.revid] = change
767
for filename, entry in dir_ie.children.iteritems():
769
if entry.kind == 'directory':
772
revid = entry.revision
774
file = util.Container(
775
filename=filename, executable=entry.executable, kind=entry.kind,
776
pathname=pathname, file_id=entry.file_id, size=entry.text_size,
777
revid=revid, change=change_dict[revid])
778
file_list.append(file)
780
if sort_type == 'filename' or sort_type is None:
781
file_list.sort(key=lambda x: x.filename.lower()) # case-insensitive
782
elif sort_type == 'size':
783
file_list.sort(key=lambda x: x.size)
784
elif sort_type == 'date':
785
file_list.sort(key=lambda x: x.change.date)
787
# Always sort by kind to get directories first
788
file_list.sort(key=lambda x: x.kind != 'directory')
791
for file in file_list:
798
_BADCHARS_RE = re.compile(ur'[\x00-\x08\x0b\x0e-\x1f]')
800
def annotate_file(self, file_id, revid):
805
file_revid = self.get_inventory(revid)[file_id].revision
807
tree = self._branch.repository.revision_tree(file_revid)
810
for line_revid, text in tree.annotate_iter(file_id):
811
revid_set.add(line_revid)
812
if self._BADCHARS_RE.match(text):
813
# bail out; this isn't displayable text
814
yield util.Container(parity=0, lineno=1, status='same',
815
text='(This is a binary file.)',
816
change=util.Container())
818
change_cache = dict([(c.revid, c) \
819
for c in self.get_changes(list(revid_set))])
821
last_line_revid = None
822
for line_revid, text in tree.annotate_iter(file_id):
823
if line_revid == last_line_revid:
824
# remember which lines have a new revno and which don't
829
last_line_revid = line_revid
830
change = change_cache[line_revid]
831
trunc_revno = change.revno
832
if len(trunc_revno) > 10:
833
trunc_revno = trunc_revno[:9] + '...'
835
yield util.Container(parity=parity, lineno=lineno, status=status,
836
change=change, text=util.fixed_width(text))
839
self.log.debug('annotate: %r secs' % (time.time() - z,))