38
from StringIO import StringIO
42
import bzrlib.revision
40
44
from loggerhead import search
41
45
from loggerhead import util
42
46
from loggerhead.wholehistory import compute_whole_history_data
48
import bzrlib.progress
49
import bzrlib.revision
53
# bzrlib's UIFactory is not thread-safe
54
uihack = threading.local()
56
class ThreadSafeUIFactory (bzrlib.ui.SilentUIFactory):
57
def nested_progress_bar(self):
58
if getattr(uihack, '_progress_bar_stack', None) is None:
59
uihack._progress_bar_stack = bzrlib.progress.ProgressBarStack(klass=bzrlib.progress.DummyProgress)
60
return uihack._progress_bar_stack.get_nested()
62
bzrlib.ui.ui_factory = ThreadSafeUIFactory()
65
def _process_side_by_side_buffers(line_list, delete_list, insert_list):
66
while len(delete_list) < len(insert_list):
67
delete_list.append((None, '', 'context'))
68
while len(insert_list) < len(delete_list):
69
insert_list.append((None, '', 'context'))
70
while len(delete_list) > 0:
71
d = delete_list.pop(0)
72
i = insert_list.pop(0)
73
line_list.append(util.Container(old_lineno=d[0], new_lineno=i[0],
74
old_line=d[1], new_line=i[1],
75
old_type=d[2], new_type=i[2]))
78
def _make_side_by_side(chunk_list):
80
turn a normal unified-style diff (post-processed by parse_delta) into a
81
side-by-side diff structure. the new structure is::
89
type: str('context' or 'changed'),
94
for chunk in chunk_list:
97
delete_list, insert_list = [], []
98
for line in chunk.diff:
99
# Add <wbr/> every X characters so we can wrap properly
100
wrap_line = re.findall(r'.{%d}|.+$' % 78, line.line)
101
wrap_lines = [util.html_clean(_line) for _line in wrap_line]
102
wrapped_line = wrap_char.join(wrap_lines)
104
if line.type == 'context':
105
if len(delete_list) or len(insert_list):
106
_process_side_by_side_buffers(line_list, delete_list,
108
delete_list, insert_list = [], []
109
line_list.append(util.Container(old_lineno=line.old_lineno,
110
new_lineno=line.new_lineno,
111
old_line=wrapped_line,
112
new_line=wrapped_line,
115
elif line.type == 'delete':
116
delete_list.append((line.old_lineno, wrapped_line, line.type))
117
elif line.type == 'insert':
118
insert_list.append((line.new_lineno, wrapped_line, line.type))
119
if len(delete_list) or len(insert_list):
120
_process_side_by_side_buffers(line_list, delete_list, insert_list)
121
out_chunk_list.append(util.Container(diff=line_list))
122
return out_chunk_list
125
49
def is_branch(folder):
178
100
def __getitem__(self, index):
179
101
"""Get the date of the index'd item"""
180
return datetime.datetime.fromtimestamp(self.repository.get_revision(self.revid_list[index]).timestamp)
102
return datetime.datetime.fromtimestamp(self.repository.get_revision(
103
self.revid_list[index]).timestamp)
182
105
def __len__(self):
183
106
return len(self.revid_list)
186
class History (object):
108
class FileChangeReporter(object):
110
def __init__(self, old_inv, new_inv):
115
self.text_changes = []
116
self.old_inv = old_inv
117
self.new_inv = new_inv
119
def revid(self, inv, file_id):
121
return inv[file_id].revision
122
except bzrlib.errors.NoSuchId:
125
def report(self, file_id, paths, versioned, renamed, modified,
127
if modified not in ('unchanged', 'kind changed'):
128
if versioned == 'removed':
129
filename = rich_filename(paths[0], kind[0])
131
filename = rich_filename(paths[1], kind[1])
132
self.text_changes.append(util.Container(
133
filename=filename, file_id=file_id,
134
old_revision=self.revid(self.old_inv, file_id),
135
new_revision=self.revid(self.new_inv, file_id)))
136
if versioned == 'added':
137
self.added.append(util.Container(
138
filename=rich_filename(paths[1], kind),
139
file_id=file_id, kind=kind[1]))
140
elif versioned == 'removed':
141
self.removed.append(util.Container(
142
filename=rich_filename(paths[0], kind),
143
file_id=file_id, kind=kind[0]))
145
self.renamed.append(util.Container(
146
old_filename=rich_filename(paths[0], kind[0]),
147
new_filename=rich_filename(paths[1], kind[1]),
149
text_modified=modified == 'modified'))
151
self.modified.append(util.Container(
152
filename=rich_filename(paths[1], kind),
155
# The lru_cache is not thread-safe, so we need a lock around it for
157
rev_info_memory_cache_lock = threading.RLock()
159
class RevInfoMemoryCache(object):
160
"""A store that validates values against the revids they were stored with.
162
We use a unique key for each branch.
164
The reason for not just using the revid as the key is so that when a new
165
value is provided for a branch, we replace the old value used for the
168
There is another implementation of the same interface in
169
loggerhead.changecache.RevInfoDiskCache.
172
def __init__(self, cache):
175
def get(self, key, revid):
176
"""Return the data associated with `key`, subject to a revid check.
178
If a value was stored under `key`, with the same revid, return it.
179
Otherwise return None.
181
rev_info_memory_cache_lock.acquire()
183
cached = self._cache.get(key)
185
rev_info_memory_cache_lock.release()
188
stored_revid, data = cached
189
if revid == stored_revid:
194
def set(self, key, revid, data):
195
"""Store `data` under `key`, to be checked against `revid` on get().
197
rev_info_memory_cache_lock.acquire()
199
self._cache[key] = (revid, data)
201
rev_info_memory_cache_lock.release()
203
# Used to store locks that prevent multiple threads from building a
204
# revision graph for the same branch at the same time, because that can
205
# cause severe performance issues that are so bad that the system seems
207
revision_graph_locks = {}
208
revision_graph_check_lock = threading.Lock()
210
class History(object):
187
211
"""Decorate a branch to provide information for rendering.
189
213
History objects are expected to be short lived -- when serving a request
191
215
around it, serve the request, throw the History object away, unlock the
192
216
branch and throw it away.
194
:ivar _file_change_cache: xx
218
:ivar _file_change_cache: An object that caches information about the
219
files that changed between two revisions.
220
:ivar _rev_info: A list of information about revisions. This is by far
221
the most cryptic data structure in loggerhead. At the top level, it
222
is a list of 3-tuples [(merge-info, where-merged, parents)].
223
`merge-info` is (seq, revid, merge_depth, revno_str, end_of_merge) --
224
like a merged sorted list, but the revno is stringified.
225
`where-merged` is a tuple of revisions that have this revision as a
226
non-lefthand parent. Finally, `parents` is just the usual list of
227
parents of this revision.
228
:ivar _rev_indices: A dictionary mapping each revision id to the index of
229
the information about it in _rev_info.
230
:ivar _revno_revid: A dictionary mapping stringified revnos to revision
197
def __init__(self, branch, whole_history_data_cache):
234
def _load_whole_history_data(self, caches, cache_key):
235
"""Set the attributes relating to the whole history of the branch.
237
:param caches: a list of caches with interfaces like
238
`RevInfoMemoryCache` and be ordered from fastest to slowest.
239
:param cache_key: the key to use with the caches.
241
self._rev_indices = None
242
self._rev_info = None
245
def update_missed_caches():
246
for cache in missed_caches:
247
cache.set(cache_key, self.last_revid, self._rev_info)
249
# Theoretically, it's possible for two threads to race in creating
250
# the Lock() object for their branch, so we put a lock around
251
# creating the per-branch Lock().
252
revision_graph_check_lock.acquire()
254
if cache_key not in revision_graph_locks:
255
revision_graph_locks[cache_key] = threading.Lock()
257
revision_graph_check_lock.release()
259
revision_graph_locks[cache_key].acquire()
262
data = cache.get(cache_key, self.last_revid)
264
self._rev_info = data
265
update_missed_caches()
268
missed_caches.append(cache)
270
whole_history_data = compute_whole_history_data(self._branch)
271
self._rev_info, self._rev_indices = whole_history_data
272
update_missed_caches()
274
revision_graph_locks[cache_key].release()
276
if self._rev_indices is not None:
277
self._revno_revid = {}
278
for ((_, revid, _, revno_str, _), _, _) in self._rev_info:
279
self._revno_revid[revno_str] = revid
281
self._revno_revid = {}
282
self._rev_indices = {}
283
for ((seq, revid, _, revno_str, _), _, _) in self._rev_info:
284
self._rev_indices[revid] = seq
285
self._revno_revid[revno_str] = revid
287
def __init__(self, branch, whole_history_data_cache, file_cache=None,
288
revinfo_disk_cache=None, cache_key=None):
198
289
assert branch.is_locked(), (
199
290
"Can only construct a History object with a read-locked branch.")
200
self._file_change_cache = None
291
if file_cache is not None:
292
self._file_change_cache = file_cache
293
file_cache.history = self
295
self._file_change_cache = None
201
296
self._branch = branch
202
self.log = logging.getLogger('loggerhead.%s' % (branch.nick,))
297
self._branch_tags = None
298
self._inventory_cache = {}
299
self._branch_nick = self._branch.get_config().get_nickname()
300
self.log = logging.getLogger('loggerhead.%s' % (self._branch_nick,))
204
302
self.last_revid = branch.last_revision()
206
whole_history_data = whole_history_data_cache.get(self.last_revid)
207
if whole_history_data is None:
208
whole_history_data = compute_whole_history_data(branch)
209
whole_history_data_cache[self.last_revid] = whole_history_data
211
(self._revision_graph, self._full_history, self._revision_info,
212
self._revno_revid, self._merge_sort, self._where_merged
213
) = whole_history_data
215
def use_file_cache(self, cache):
216
self._file_change_cache = cache
304
caches = [RevInfoMemoryCache(whole_history_data_cache)]
305
if revinfo_disk_cache:
306
caches.append(revinfo_disk_cache)
307
self._load_whole_history_data(caches, cache_key)
219
310
def has_revisions(self):
235
327
revid in revid_list.
237
329
if revid_list is None:
238
revid_list = self._full_history
330
revid_list = [r[0][1] for r in self._rev_info]
239
331
revid_set = set(revid_list)
240
332
revid = start_revid
241
334
def introduced_revisions(revid):
243
seq, revid, md, revno, end_of_merge = self._revision_info[revid]
336
seq = self._rev_indices[revid]
337
md = self._rev_info[seq][0][2]
245
while i < len(self._merge_sort) and self._merge_sort[i][2] > md:
246
r.add(self._merge_sort[i][1])
339
while i < len(self._rev_info) and self._rev_info[i][0][2] > md:
340
r.add(self._rev_info[i][0][1])
250
344
if bzrlib.revision.is_null(revid):
252
346
if introduced_revisions(revid) & revid_set:
254
parents = self._revision_graph[revid]
348
parents = self._rev_info[self._rev_indices[revid]][2]
255
349
if len(parents) == 0:
257
351
revid = parents[0]
259
353
def get_short_revision_history_by_fileid(self, file_id):
260
# wow. is this really the only way we can get this list? by
261
# man-handling the weave store directly? :-0
262
354
# FIXME: would be awesome if we could get, for a folder, the list of
263
# revisions where items within that folder changed.
264
possible_keys = [(file_id, revid) for revid in self._full_history]
265
existing_keys = self._branch.repository.texts.get_parent_map(possible_keys)
266
return [revid for _, revid in existing_keys.iterkeys()]
355
# revisions where items within that folder changed.i
356
possible_keys = [(file_id, revid) for revid in self._rev_indices]
357
get_parent_map = self._branch.repository.texts.get_parent_map
358
# We chunk the requests as this works better with GraphIndex.
359
# See _filter_revisions_touching_file_id in bzrlib/log.py
360
# for more information.
363
for start in xrange(0, len(possible_keys), chunk_size):
364
next_keys = possible_keys[start:start + chunk_size]
365
revids += [k[1] for k in get_parent_map(next_keys)]
366
del possible_keys, next_keys
268
369
def get_revision_history_since(self, revid_list, date):
269
370
# if a user asks for revisions starting at 01-sep, they mean inclusive,
270
371
# so start at midnight on 02-sep.
271
372
date = date + datetime.timedelta(days=1)
272
# our revid list is sorted in REVERSE date order, so go thru some hoops here...
373
# our revid list is sorted in REVERSE date order,
374
# so go thru some hoops here...
273
375
revid_list.reverse()
274
index = bisect.bisect(_RevListToTimestamps(revid_list, self._branch.repository), date)
376
index = bisect.bisect(_RevListToTimestamps(revid_list,
377
self._branch.repository),
277
381
revid_list.reverse()
292
397
# all the relevant changes (time-consuming) only to return a list of
293
398
# revids which will be used to fetch a set of changes again.
295
# if they entered a revid, just jump straight there; ignore the passed-in revid_list
400
# if they entered a revid, just jump straight there;
401
# ignore the passed-in revid_list
296
402
revid = self.fix_revid(query)
297
403
if revid is not None:
298
404
if isinstance(revid, unicode):
299
405
revid = revid.encode('utf-8')
300
changes = self.get_changes([ revid ])
406
changes = self.get_changes([revid])
301
407
if (changes is not None) and (len(changes) > 0):
305
411
m = self.us_date_re.match(query)
306
412
if m is not None:
307
date = datetime.datetime(util.fix_year(int(m.group(3))), int(m.group(1)), int(m.group(2)))
413
date = datetime.datetime(util.fix_year(int(m.group(3))),
309
417
m = self.earth_date_re.match(query)
310
418
if m is not None:
311
date = datetime.datetime(util.fix_year(int(m.group(3))), int(m.group(2)), int(m.group(1)))
419
date = datetime.datetime(util.fix_year(int(m.group(3))),
313
423
m = self.iso_date_re.match(query)
314
424
if m is not None:
315
date = datetime.datetime(util.fix_year(int(m.group(1))), int(m.group(2)), int(m.group(3)))
425
date = datetime.datetime(util.fix_year(int(m.group(1))),
316
428
if date is not None:
317
429
if revid_list is None:
318
# if no limit to the query was given, search only the direct-parent path.
430
# if no limit to the query was given,
431
# search only the direct-parent path.
319
432
revid_list = list(self.get_revids_from(None, self.last_revid))
320
433
return self.get_revision_history_since(revid_list, date)
464
583
revnol = revno.split(".")
465
584
revnos = ".".join(revnol[:-2])
466
585
revnolast = int(revnol[-1])
467
if d.has_key(revnos):
469
588
if revnolast < m:
470
d[revnos] = ( revnolast, revid )
589
d[revnos] = (revnolast, revid)
472
d[revnos] = ( revnolast, revid )
474
return [ d[revnos][1] for revnos in d.keys() ]
476
def get_branch_nicks(self, changes):
591
d[revnos] = (revnolast, revid)
593
return [revid for (_, revid) in d.itervalues()]
595
def add_branch_nicks(self, change):
478
given a list of changes from L{get_changes}, fill in the branch nicks
479
on all parents and merge points.
597
given a 'change', fill in the branch nicks on all parents and merge
481
600
fetch_set = set()
482
for change in changes:
483
for p in change.parents:
484
fetch_set.add(p.revid)
485
for p in change.merge_points:
486
fetch_set.add(p.revid)
601
for p in change.parents:
602
fetch_set.add(p.revid)
603
for p in change.merge_points:
604
fetch_set.add(p.revid)
487
605
p_changes = self.get_changes(list(fetch_set))
488
606
p_change_dict = dict([(c.revid, c) for c in p_changes])
489
for change in changes:
490
# arch-converted branches may not have merged branch info :(
491
for p in change.parents:
492
if p.revid in p_change_dict:
493
p.branch_nick = p_change_dict[p.revid].branch_nick
495
p.branch_nick = '(missing)'
496
for p in change.merge_points:
497
if p.revid in p_change_dict:
498
p.branch_nick = p_change_dict[p.revid].branch_nick
500
p.branch_nick = '(missing)'
607
for p in change.parents:
608
if p.revid in p_change_dict:
609
p.branch_nick = p_change_dict[p.revid].branch_nick
611
p.branch_nick = '(missing)'
612
for p in change.merge_points:
613
if p.revid in p_change_dict:
614
p.branch_nick = p_change_dict[p.revid].branch_nick
616
p.branch_nick = '(missing)'
502
618
def get_changes(self, revid_list):
503
619
"""Return a list of changes objects for the given revids.
539
659
return [self._change_from_revision(rev) for rev in rev_list]
541
def _get_deltas_for_revisions_with_trees(self, revisions):
542
"""Produce a list of revision deltas.
544
Note that the input is a sequence of REVISIONS, not revision_ids.
545
Trees will be held in memory until the generator exits.
546
Each delta is relative to the revision's lefthand predecessor.
547
(This is copied from bzrlib.)
549
required_trees = set()
550
for revision in revisions:
551
required_trees.add(revision.revid)
552
required_trees.update([p.revid for p in revision.parents[:1]])
553
trees = dict((t.get_revision_id(), t) for
554
t in self._branch.repository.revision_trees(required_trees))
556
self._branch.repository.lock_read()
558
for revision in revisions:
559
if not revision.parents:
560
old_tree = self._branch.repository.revision_tree(
561
bzrlib.revision.NULL_REVISION)
563
old_tree = trees[revision.parents[0].revid]
564
tree = trees[revision.revid]
565
ret.append(tree.changes_from(old_tree))
568
self._branch.repository.unlock()
570
661
def _change_from_revision(self, revision):
572
663
Given a bzrlib Revision, return a processed "change" for use in
575
commit_time = datetime.datetime.fromtimestamp(revision.timestamp)
577
parents = [util.Container(revid=r, revno=self.get_revno(r)) for r in revision.parent_ids]
579
666
message, short_message = clean_message(revision.message)
668
if self._branch_tags is None:
669
self._branch_tags = self._branch.tags.get_reverse_tag_dict()
672
if revision.revision_id in self._branch_tags:
673
revtags = ', '.join(self._branch_tags[revision.revision_id])
582
676
'revid': revision.revision_id,
584
'author': revision.get_apparent_author(),
677
'date': datetime.datetime.fromtimestamp(revision.timestamp),
678
'utc_date': datetime.datetime.utcfromtimestamp(revision.timestamp),
679
'authors': revision.get_apparent_authors(),
585
680
'branch_nick': revision.properties.get('branch-nick', None),
586
681
'short_comment': short_message,
587
682
'comment': revision.message,
588
683
'comment_clean': [util.html_clean(s) for s in message],
589
684
'parents': revision.parent_ids,
685
'bugs': [bug.split()[0] for bug in revision.properties.get('bugs', '').splitlines()],
688
if isinstance(revision, bzrlib.foreign.ForeignRevision):
689
foreign_revid, mapping = (rev.foreign_revid, rev.mapping)
690
elif ":" in revision.revision_id:
692
foreign_revid, mapping = \
693
bzrlib.foreign.foreign_vcs_registry.parse_revision_id(
694
revision.revision_id)
695
except bzrlib.errors.InvalidRevisionId:
700
if foreign_revid is not None:
701
entry["foreign_vcs"] = mapping.vcs.abbreviation
702
entry["foreign_revid"] = mapping.vcs.show_foreign_revid(foreign_revid)
591
703
return util.Container(entry)
593
def get_file_changes_uncached(self, entries):
594
delta_list = self._get_deltas_for_revisions_with_trees(entries)
596
return [self.parse_delta(delta) for delta in delta_list]
598
def get_file_changes(self, entries):
705
def get_file_changes_uncached(self, entry):
707
old_revid = entry.parents[0].revid
709
old_revid = bzrlib.revision.NULL_REVISION
710
return self.file_changes_for_revision_ids(old_revid, entry.revid)
712
def get_file_changes(self, entry):
599
713
if self._file_change_cache is None:
600
return self.get_file_changes_uncached(entries)
714
return self.get_file_changes_uncached(entry)
602
return self._file_change_cache.get_file_changes(entries)
604
def add_changes(self, entries):
605
changes_list = self.get_file_changes(entries)
607
for entry, changes in zip(entries, changes_list):
608
entry.changes = changes
610
def get_change_with_diff(self, revid, compare_revid=None):
611
change = self.get_changes([revid])[0]
613
if compare_revid is None:
615
compare_revid = change.parents[0].revid
617
compare_revid = 'null:'
619
rev_tree1 = self._branch.repository.revision_tree(compare_revid)
620
rev_tree2 = self._branch.repository.revision_tree(revid)
621
delta = rev_tree2.changes_from(rev_tree1)
623
change.changes = self.parse_delta(delta)
624
change.changes.modified = self._parse_diffs(rev_tree1, rev_tree2, delta)
716
return self._file_change_cache.get_file_changes(entry)
718
def add_changes(self, entry):
719
changes = self.get_file_changes(entry)
720
entry.changes = changes
628
722
def get_file(self, file_id, revid):
629
"returns (path, filename, data)"
723
"""Returns (path, filename, file contents)"""
630
724
inv = self.get_inventory(revid)
631
725
inv_entry = inv[file_id]
632
726
rev_tree = self._branch.repository.revision_tree(inv_entry.revision)
635
729
path = '/' + path
636
730
return path, inv_entry.name, rev_tree.get_file_text(file_id)
638
def _parse_diffs(self, old_tree, new_tree, delta):
640
Return a list of processed diffs, in the format::
649
type: str('context', 'delete', or 'insert'),
658
for old_path, new_path, fid, kind, text_modified, meta_modified in delta.renamed:
660
process.append((old_path, new_path, fid, kind))
661
for path, fid, kind, text_modified, meta_modified in delta.modified:
662
process.append((path, path, fid, kind))
664
for old_path, new_path, fid, kind in process:
665
old_lines = old_tree.get_file_lines(fid)
666
new_lines = new_tree.get_file_lines(fid)
668
if old_lines != new_lines:
670
bzrlib.diff.internal_diff(old_path, old_lines,
671
new_path, new_lines, buffer)
672
except bzrlib.errors.BinaryFile:
675
diff = buffer.getvalue()
678
out.append(util.Container(filename=rich_filename(new_path, kind), file_id=fid, chunks=self._process_diff(diff), raw_diff=diff))
682
def _process_diff(self, diff):
683
# doesn't really need to be a method; could be static.
686
for line in diff.splitlines():
689
if line.startswith('+++ ') or line.startswith('--- '):
691
if line.startswith('@@ '):
693
if chunk is not None:
695
chunk = util.Container()
697
lines = [int(x.split(',')[0][1:]) for x in line.split(' ')[1:3]]
698
old_lineno = lines[0]
699
new_lineno = lines[1]
700
elif line.startswith(' '):
701
chunk.diff.append(util.Container(old_lineno=old_lineno,
702
new_lineno=new_lineno,
707
elif line.startswith('+'):
708
chunk.diff.append(util.Container(old_lineno=None,
709
new_lineno=new_lineno,
710
type='insert', line=line[1:]))
712
elif line.startswith('-'):
713
chunk.diff.append(util.Container(old_lineno=old_lineno,
715
type='delete', line=line[1:]))
718
chunk.diff.append(util.Container(old_lineno=None,
722
if chunk is not None:
726
def parse_delta(self, delta):
732
def file_changes_for_revision_ids(self, old_revid, new_revid):
728
734
Return a nested data structure containing the changes in a delta::
743
for path, fid, kind in delta.added:
744
added.append((rich_filename(path, kind), fid))
746
for path, fid, kind, text_modified, meta_modified in delta.modified:
747
modified.append(util.Container(filename=rich_filename(path, kind), file_id=fid))
749
for old_path, new_path, fid, kind, text_modified, meta_modified in delta.renamed:
750
renamed.append((rich_filename(old_path, kind), rich_filename(new_path, kind), fid))
751
if meta_modified or text_modified:
752
modified.append(util.Container(filename=rich_filename(new_path, kind), file_id=fid))
754
for path, fid, kind in delta.removed:
755
removed.append((rich_filename(path, kind), fid))
757
return util.Container(added=added, renamed=renamed, removed=removed, modified=modified)
760
def add_side_by_side(changes):
761
# FIXME: this is a rotten API.
762
for change in changes:
763
for m in change.changes.modified:
764
m.sbs_chunks = _make_side_by_side(m.chunks)
766
def get_filelist(self, inv, file_id, sort_type=None):
768
return the list of all files (and their attributes) within a given
772
dir_ie = inv[file_id]
773
path = inv.id2path(file_id)
778
for filename, entry in dir_ie.children.iteritems():
779
revid_set.add(entry.revision)
782
for change in self.get_changes(list(revid_set)):
783
change_dict[change.revid] = change
785
for filename, entry in dir_ie.children.iteritems():
787
if entry.kind == 'directory':
790
revid = entry.revision
792
file = util.Container(
793
filename=filename, executable=entry.executable, kind=entry.kind,
794
pathname=pathname, file_id=entry.file_id, size=entry.text_size,
795
revid=revid, change=change_dict[revid])
796
file_list.append(file)
798
if sort_type == 'filename' or sort_type is None:
799
file_list.sort(key=lambda x: x.filename.lower()) # case-insensitive
800
elif sort_type == 'size':
801
file_list.sort(key=lambda x: x.size)
802
elif sort_type == 'date':
803
file_list.sort(key=lambda x: x.change.date)
805
# Always sort by kind to get directories first
806
file_list.sort(key=lambda x: x.kind != 'directory')
809
for file in file_list:
816
_BADCHARS_RE = re.compile(ur'[\x00-\x08\x0b\x0e-\x1f]')
818
def annotate_file(self, file_id, revid):
823
file_revid = self.get_inventory(revid)[file_id].revision
825
tree = self._branch.repository.revision_tree(file_revid)
828
for line_revid, text in tree.annotate_iter(file_id):
829
revid_set.add(line_revid)
830
if self._BADCHARS_RE.match(text):
831
# bail out; this isn't displayable text
832
yield util.Container(parity=0, lineno=1, status='same',
833
text='(This is a binary file.)',
834
change=util.Container())
836
change_cache = dict([(c.revid, c) \
837
for c in self.get_changes(list(revid_set))])
839
last_line_revid = None
840
for line_revid, text in tree.annotate_iter(file_id):
841
if line_revid == last_line_revid:
842
# remember which lines have a new revno and which don't
847
last_line_revid = line_revid
848
change = change_cache[line_revid]
849
trunc_revno = change.revno
850
if len(trunc_revno) > 10:
851
trunc_revno = trunc_revno[:9] + '...'
853
yield util.Container(parity=parity, lineno=lineno, status=status,
854
change=change, text=util.fixed_width(text))
857
self.log.debug('annotate: %r secs' % (time.time() - z,))
743
text_changes: list((filename, file_id)),
745
repo = self._branch.repository
746
if (bzrlib.revision.is_null(old_revid) or
747
bzrlib.revision.is_null(new_revid)):
748
old_tree, new_tree = map(
749
repo.revision_tree, [old_revid, new_revid])
751
old_tree, new_tree = repo.revision_trees([old_revid, new_revid])
753
reporter = FileChangeReporter(old_tree.inventory, new_tree.inventory)
755
bzrlib.delta.report_changes(new_tree.iter_changes(old_tree), reporter)
757
return util.Container(
758
added=sorted(reporter.added, key=lambda x: x.filename),
759
renamed=sorted(reporter.renamed, key=lambda x: x.new_filename),
760
removed=sorted(reporter.removed, key=lambda x: x.filename),
761
modified=sorted(reporter.modified, key=lambda x: x.filename),
762
text_changes=sorted(reporter.text_changes, key=lambda x: x.filename))