36
from StringIO import StringIO
38
from loggerhead import search
39
from loggerhead import util
40
from loggerhead.wholehistory import compute_whole_history_data
38
43
import bzrlib.branch
40
45
import bzrlib.errors
46
import bzrlib.progress
42
47
import bzrlib.revision
44
from loggerhead import search
45
from loggerhead import util
46
from loggerhead.wholehistory import compute_whole_history_data
51
# bzrlib's UIFactory is not thread-safe
52
uihack = threading.local()
54
class ThreadSafeUIFactory (bzrlib.ui.SilentUIFactory):
55
def nested_progress_bar(self):
56
if getattr(uihack, '_progress_bar_stack', None) is None:
57
uihack._progress_bar_stack = bzrlib.progress.ProgressBarStack(klass=bzrlib.progress.DummyProgress)
58
return uihack._progress_bar_stack.get_nested()
60
bzrlib.ui.ui_factory = ThreadSafeUIFactory()
63
def _process_side_by_side_buffers(line_list, delete_list, insert_list):
64
while len(delete_list) < len(insert_list):
65
delete_list.append((None, '', 'context'))
66
while len(insert_list) < len(delete_list):
67
insert_list.append((None, '', 'context'))
68
while len(delete_list) > 0:
69
d = delete_list.pop(0)
70
i = insert_list.pop(0)
71
line_list.append(util.Container(old_lineno=d[0], new_lineno=i[0],
72
old_line=d[1], new_line=i[1],
73
old_type=d[2], new_type=i[2]))
76
def _make_side_by_side(chunk_list):
78
turn a normal unified-style diff (post-processed by parse_delta) into a
79
side-by-side diff structure. the new structure is::
87
type: str('context' or 'changed'),
92
for chunk in chunk_list:
94
delete_list, insert_list = [], []
95
for line in chunk.diff:
96
if line.type == 'context':
97
if len(delete_list) or len(insert_list):
98
_process_side_by_side_buffers(line_list, delete_list, insert_list)
99
delete_list, insert_list = [], []
100
line_list.append(util.Container(old_lineno=line.old_lineno, new_lineno=line.new_lineno,
101
old_line=line.line, new_line=line.line,
102
old_type=line.type, new_type=line.type))
103
elif line.type == 'delete':
104
delete_list.append((line.old_lineno, line.line, line.type))
105
elif line.type == 'insert':
106
insert_list.append((line.new_lineno, line.line, line.type))
107
if len(delete_list) or len(insert_list):
108
_process_side_by_side_buffers(line_list, delete_list, insert_list)
109
out_chunk_list.append(util.Container(diff=line_list))
110
return out_chunk_list
49
113
def is_branch(folder):
100
166
def __getitem__(self, index):
101
167
"""Get the date of the index'd item"""
102
return datetime.datetime.fromtimestamp(self.repository.get_revision(
103
self.revid_list[index]).timestamp)
168
return datetime.datetime.fromtimestamp(self.repository.get_revision(self.revid_list[index]).timestamp)
105
170
def __len__(self):
106
171
return len(self.revid_list)
108
class FileChangeReporter(object):
110
def __init__(self, old_inv, new_inv):
115
self.text_changes = []
116
self.old_inv = old_inv
117
self.new_inv = new_inv
119
def revid(self, inv, file_id):
121
return inv[file_id].revision
122
except bzrlib.errors.NoSuchId:
125
def report(self, file_id, paths, versioned, renamed, modified,
127
if modified not in ('unchanged', 'kind changed'):
128
if versioned == 'removed':
129
filename = rich_filename(paths[0], kind[0])
131
filename = rich_filename(paths[1], kind[1])
132
self.text_changes.append(util.Container(
133
filename=filename, file_id=file_id,
134
old_revision=self.revid(self.old_inv, file_id),
135
new_revision=self.revid(self.new_inv, file_id)))
136
if versioned == 'added':
137
self.added.append(util.Container(
138
filename=rich_filename(paths[1], kind),
139
file_id=file_id, kind=kind[1]))
140
elif versioned == 'removed':
141
self.removed.append(util.Container(
142
filename=rich_filename(paths[0], kind),
143
file_id=file_id, kind=kind[0]))
145
self.renamed.append(util.Container(
146
old_filename=rich_filename(paths[0], kind[0]),
147
new_filename=rich_filename(paths[1], kind[1]),
149
text_modified=modified == 'modified'))
151
self.modified.append(util.Container(
152
filename=rich_filename(paths[1], kind),
155
# The lru_cache is not thread-safe, so we need a lock around it for
157
rev_info_memory_cache_lock = threading.RLock()
159
class RevInfoMemoryCache(object):
160
"""A store that validates values against the revids they were stored with.
162
We use a unique key for each branch.
164
The reason for not just using the revid as the key is so that when a new
165
value is provided for a branch, we replace the old value used for the
168
There is another implementation of the same interface in
169
loggerhead.changecache.RevInfoDiskCache.
172
def __init__(self, cache):
175
def get(self, key, revid):
176
"""Return the data associated with `key`, subject to a revid check.
178
If a value was stored under `key`, with the same revid, return it.
179
Otherwise return None.
181
rev_info_memory_cache_lock.acquire()
183
cached = self._cache.get(key)
185
rev_info_memory_cache_lock.release()
188
stored_revid, data = cached
189
if revid == stored_revid:
194
def set(self, key, revid, data):
195
"""Store `data` under `key`, to be checked against `revid` on get().
197
rev_info_memory_cache_lock.acquire()
199
self._cache[key] = (revid, data)
201
rev_info_memory_cache_lock.release()
203
# Used to store locks that prevent multiple threads from building a
204
# revision graph for the same branch at the same time, because that can
205
# cause severe performance issues that are so bad that the system seems
207
revision_graph_locks = {}
208
revision_graph_check_lock = threading.Lock()
210
class History(object):
174
class History (object):
211
175
"""Decorate a branch to provide information for rendering.
213
177
History objects are expected to be short lived -- when serving a request
215
179
around it, serve the request, throw the History object away, unlock the
216
180
branch and throw it away.
218
:ivar _file_change_cache: An object that caches information about the
219
files that changed between two revisions.
220
:ivar _rev_info: A list of information about revisions. This is by far
221
the most cryptic data structure in loggerhead. At the top level, it
222
is a list of 3-tuples [(merge-info, where-merged, parents)].
223
`merge-info` is (seq, revid, merge_depth, revno_str, end_of_merge) --
224
like a merged sorted list, but the revno is stringified.
225
`where-merged` is a tuple of revisions that have this revision as a
226
non-lefthand parent. Finally, `parents` is just the usual list of
227
parents of this revision.
228
:ivar _rev_indices: A dictionary mapping each revision id to the index of
229
the information about it in _rev_info.
230
:ivar _revno_revid: A dictionary mapping stringified revnos to revision
182
:ivar _file_change_cache: xx
234
def _load_whole_history_data(self, caches, cache_key):
235
"""Set the attributes relating to the whole history of the branch.
237
:param caches: a list of caches with interfaces like
238
`RevInfoMemoryCache` and be ordered from fastest to slowest.
239
:param cache_key: the key to use with the caches.
241
self._rev_indices = None
242
self._rev_info = None
245
def update_missed_caches():
246
for cache in missed_caches:
247
cache.set(cache_key, self.last_revid, self._rev_info)
249
# Theoretically, it's possible for two threads to race in creating
250
# the Lock() object for their branch, so we put a lock around
251
# creating the per-branch Lock().
252
revision_graph_check_lock.acquire()
254
if cache_key not in revision_graph_locks:
255
revision_graph_locks[cache_key] = threading.Lock()
257
revision_graph_check_lock.release()
259
revision_graph_locks[cache_key].acquire()
262
data = cache.get(cache_key, self.last_revid)
264
self._rev_info = data
265
update_missed_caches()
268
missed_caches.append(cache)
270
whole_history_data = compute_whole_history_data(self._branch)
271
self._rev_info, self._rev_indices = whole_history_data
272
update_missed_caches()
274
revision_graph_locks[cache_key].release()
276
if self._rev_indices is not None:
277
self._revno_revid = {}
278
for ((_, revid, _, revno_str, _), _, _) in self._rev_info:
279
self._revno_revid[revno_str] = revid
281
self._revno_revid = {}
282
self._rev_indices = {}
283
for ((seq, revid, _, revno_str, _), _, _) in self._rev_info:
284
self._rev_indices[revid] = seq
285
self._revno_revid[revno_str] = revid
287
def __init__(self, branch, whole_history_data_cache, file_cache=None,
288
revinfo_disk_cache=None, cache_key=None):
185
def __init__(self, branch, whole_history_data_cache):
289
186
assert branch.is_locked(), (
290
187
"Can only construct a History object with a read-locked branch.")
291
if file_cache is not None:
292
self._file_change_cache = file_cache
293
file_cache.history = self
295
self._file_change_cache = None
188
self._file_change_cache = None
296
189
self._branch = branch
297
self._branch_tags = None
298
self._inventory_cache = {}
299
self._branch_nick = self._branch.get_config().get_nickname()
300
self.log = logging.getLogger('loggerhead.%s' % (self._branch_nick,))
190
self.log = logging.getLogger('loggerhead.%s' % (branch.nick,))
302
192
self.last_revid = branch.last_revision()
304
caches = [RevInfoMemoryCache(whole_history_data_cache)]
305
if revinfo_disk_cache:
306
caches.append(revinfo_disk_cache)
307
self._load_whole_history_data(caches, cache_key)
194
whole_history_data = whole_history_data_cache.get(self.last_revid)
195
if whole_history_data is None:
196
whole_history_data = compute_whole_history_data(branch)
197
whole_history_data_cache[self.last_revid] = whole_history_data
199
(self._revision_graph, self._full_history, self._revision_info,
200
self._revno_revid, self._merge_sort, self._where_merged
201
) = whole_history_data
203
def use_file_cache(self, cache):
204
self._file_change_cache = cache
310
207
def has_revisions(self):
327
223
revid in revid_list.
329
225
if revid_list is None:
330
revid_list = [r[0][1] for r in self._rev_info]
226
revid_list = self._full_history
331
227
revid_set = set(revid_list)
332
228
revid = start_revid
334
229
def introduced_revisions(revid):
336
seq = self._rev_indices[revid]
337
md = self._rev_info[seq][0][2]
231
seq, revid, md, revno, end_of_merge = self._revision_info[revid]
339
while i < len(self._rev_info) and self._rev_info[i][0][2] > md:
340
r.add(self._rev_info[i][0][1])
233
while i < len(self._merge_sort) and self._merge_sort[i][2] > md:
234
r.add(self._merge_sort[i][1])
344
238
if bzrlib.revision.is_null(revid):
346
240
if introduced_revisions(revid) & revid_set:
348
parents = self._rev_info[self._rev_indices[revid]][2]
242
parents = self._revision_graph[revid]
349
243
if len(parents) == 0:
351
245
revid = parents[0]
353
247
def get_short_revision_history_by_fileid(self, file_id):
248
# wow. is this really the only way we can get this list? by
249
# man-handling the weave store directly? :-0
354
250
# FIXME: would be awesome if we could get, for a folder, the list of
355
# revisions where items within that folder changed.i
356
possible_keys = [(file_id, revid) for revid in self._rev_indices]
357
get_parent_map = self._branch.repository.texts.get_parent_map
358
# We chunk the requests as this works better with GraphIndex.
359
# See _filter_revisions_touching_file_id in bzrlib/log.py
360
# for more information.
363
for start in xrange(0, len(possible_keys), chunk_size):
364
next_keys = possible_keys[start:start + chunk_size]
365
revids += [k[1] for k in get_parent_map(next_keys)]
366
del possible_keys, next_keys
251
# revisions where items within that folder changed.
252
possible_keys = [(file_id, revid) for revid in self._full_history]
253
existing_keys = self._branch.repository.texts.get_parent_map(possible_keys)
254
return [revid for _, revid in existing_keys.iterkeys()]
369
256
def get_revision_history_since(self, revid_list, date):
370
257
# if a user asks for revisions starting at 01-sep, they mean inclusive,
371
258
# so start at midnight on 02-sep.
372
259
date = date + datetime.timedelta(days=1)
373
# our revid list is sorted in REVERSE date order,
374
# so go thru some hoops here...
260
# our revid list is sorted in REVERSE date order, so go thru some hoops here...
375
261
revid_list.reverse()
376
index = bisect.bisect(_RevListToTimestamps(revid_list,
377
self._branch.repository),
262
index = bisect.bisect(_RevListToTimestamps(revid_list, self._branch.repository), date)
381
265
revid_list.reverse()
397
280
# all the relevant changes (time-consuming) only to return a list of
398
281
# revids which will be used to fetch a set of changes again.
400
# if they entered a revid, just jump straight there;
401
# ignore the passed-in revid_list
283
# if they entered a revid, just jump straight there; ignore the passed-in revid_list
402
284
revid = self.fix_revid(query)
403
285
if revid is not None:
404
286
if isinstance(revid, unicode):
405
287
revid = revid.encode('utf-8')
406
changes = self.get_changes([revid])
288
changes = self.get_changes([ revid ])
407
289
if (changes is not None) and (len(changes) > 0):
411
293
m = self.us_date_re.match(query)
412
294
if m is not None:
413
date = datetime.datetime(util.fix_year(int(m.group(3))),
295
date = datetime.datetime(util.fix_year(int(m.group(3))), int(m.group(1)), int(m.group(2)))
417
297
m = self.earth_date_re.match(query)
418
298
if m is not None:
419
date = datetime.datetime(util.fix_year(int(m.group(3))),
299
date = datetime.datetime(util.fix_year(int(m.group(3))), int(m.group(2)), int(m.group(1)))
423
301
m = self.iso_date_re.match(query)
424
302
if m is not None:
425
date = datetime.datetime(util.fix_year(int(m.group(1))),
303
date = datetime.datetime(util.fix_year(int(m.group(1))), int(m.group(2)), int(m.group(3)))
428
304
if date is not None:
429
305
if revid_list is None:
430
# if no limit to the query was given,
431
# search only the direct-parent path.
306
# if no limit to the query was given, search only the direct-parent path.
432
307
revid_list = list(self.get_revids_from(None, self.last_revid))
433
308
return self.get_revision_history_since(revid_list, date)
583
452
revnol = revno.split(".")
584
453
revnos = ".".join(revnol[:-2])
585
454
revnolast = int(revnol[-1])
455
if d.has_key(revnos):
588
457
if revnolast < m:
589
d[revnos] = (revnolast, revid)
458
d[revnos] = ( revnolast, revid )
591
d[revnos] = (revnolast, revid)
593
return [revid for (_, revid) in d.itervalues()]
595
def add_branch_nicks(self, change):
460
d[revnos] = ( revnolast, revid )
462
return [ d[revnos][1] for revnos in d.keys() ]
464
def get_branch_nicks(self, changes):
597
given a 'change', fill in the branch nicks on all parents and merge
466
given a list of changes from L{get_changes}, fill in the branch nicks
467
on all parents and merge points.
600
469
fetch_set = set()
601
for p in change.parents:
602
fetch_set.add(p.revid)
603
for p in change.merge_points:
604
fetch_set.add(p.revid)
470
for change in changes:
471
for p in change.parents:
472
fetch_set.add(p.revid)
473
for p in change.merge_points:
474
fetch_set.add(p.revid)
605
475
p_changes = self.get_changes(list(fetch_set))
606
476
p_change_dict = dict([(c.revid, c) for c in p_changes])
607
for p in change.parents:
608
if p.revid in p_change_dict:
609
p.branch_nick = p_change_dict[p.revid].branch_nick
611
p.branch_nick = '(missing)'
612
for p in change.merge_points:
613
if p.revid in p_change_dict:
614
p.branch_nick = p_change_dict[p.revid].branch_nick
616
p.branch_nick = '(missing)'
477
for change in changes:
478
# arch-converted branches may not have merged branch info :(
479
for p in change.parents:
480
if p.revid in p_change_dict:
481
p.branch_nick = p_change_dict[p.revid].branch_nick
483
p.branch_nick = '(missing)'
484
for p in change.merge_points:
485
if p.revid in p_change_dict:
486
p.branch_nick = p_change_dict[p.revid].branch_nick
488
p.branch_nick = '(missing)'
618
490
def get_changes(self, revid_list):
619
491
"""Return a list of changes objects for the given revids.
659
527
return [self._change_from_revision(rev) for rev in rev_list]
529
def _get_deltas_for_revisions_with_trees(self, revisions):
530
"""Produce a list of revision deltas.
532
Note that the input is a sequence of REVISIONS, not revision_ids.
533
Trees will be held in memory until the generator exits.
534
Each delta is relative to the revision's lefthand predecessor.
535
(This is copied from bzrlib.)
537
required_trees = set()
538
for revision in revisions:
539
required_trees.add(revision.revid)
540
required_trees.update([p.revid for p in revision.parents[:1]])
541
trees = dict((t.get_revision_id(), t) for
542
t in self._branch.repository.revision_trees(required_trees))
544
self._branch.repository.lock_read()
546
for revision in revisions:
547
if not revision.parents:
548
old_tree = self._branch.repository.revision_tree(
549
bzrlib.revision.NULL_REVISION)
551
old_tree = trees[revision.parents[0].revid]
552
tree = trees[revision.revid]
553
ret.append(tree.changes_from(old_tree))
556
self._branch.repository.unlock()
661
558
def _change_from_revision(self, revision):
663
560
Given a bzrlib Revision, return a processed "change" for use in
563
commit_time = datetime.datetime.fromtimestamp(revision.timestamp)
565
parents = [util.Container(revid=r, revno=self.get_revno(r)) for r in revision.parent_ids]
666
567
message, short_message = clean_message(revision.message)
668
if self._branch_tags is None:
669
self._branch_tags = self._branch.tags.get_reverse_tag_dict()
672
if revision.revision_id in self._branch_tags:
673
revtags = ', '.join(self._branch_tags[revision.revision_id])
676
570
'revid': revision.revision_id,
677
'date': datetime.datetime.fromtimestamp(revision.timestamp),
678
'utc_date': datetime.datetime.utcfromtimestamp(revision.timestamp),
679
'authors': revision.get_apparent_authors(),
572
'author': revision.get_apparent_author(),
680
573
'branch_nick': revision.properties.get('branch-nick', None),
681
574
'short_comment': short_message,
682
575
'comment': revision.message,
683
576
'comment_clean': [util.html_clean(s) for s in message],
684
577
'parents': revision.parent_ids,
685
'bugs': [bug.split()[0] for bug in revision.properties.get('bugs', '').splitlines()],
688
if isinstance(revision, bzrlib.foreign.ForeignRevision):
689
foreign_revid, mapping = (rev.foreign_revid, rev.mapping)
690
elif ":" in revision.revision_id:
692
foreign_revid, mapping = \
693
bzrlib.foreign.foreign_vcs_registry.parse_revision_id(
694
revision.revision_id)
695
except bzrlib.errors.InvalidRevisionId:
700
if foreign_revid is not None:
701
entry["foreign_vcs"] = mapping.vcs.abbreviation
702
entry["foreign_revid"] = mapping.vcs.show_foreign_revid(foreign_revid)
703
579
return util.Container(entry)
705
def get_file_changes_uncached(self, entry):
707
old_revid = entry.parents[0].revid
709
old_revid = bzrlib.revision.NULL_REVISION
710
return self.file_changes_for_revision_ids(old_revid, entry.revid)
712
def get_file_changes(self, entry):
581
def get_file_changes_uncached(self, entries):
582
delta_list = self._get_deltas_for_revisions_with_trees(entries)
584
return [self.parse_delta(delta) for delta in delta_list]
586
def get_file_changes(self, entries):
713
587
if self._file_change_cache is None:
714
return self.get_file_changes_uncached(entry)
588
return self.get_file_changes_uncached(entries)
716
return self._file_change_cache.get_file_changes(entry)
718
def add_changes(self, entry):
719
changes = self.get_file_changes(entry)
720
entry.changes = changes
590
return self._file_change_cache.get_file_changes(entries)
592
def add_changes(self, entries):
593
changes_list = self.get_file_changes(entries)
595
for entry, changes in zip(entries, changes_list):
596
entry.changes = changes
598
def get_change_with_diff(self, revid, compare_revid=None):
599
change = self.get_changes([revid])[0]
601
if compare_revid is None:
603
compare_revid = change.parents[0].revid
605
compare_revid = 'null:'
607
rev_tree1 = self._branch.repository.revision_tree(compare_revid)
608
rev_tree2 = self._branch.repository.revision_tree(revid)
609
delta = rev_tree2.changes_from(rev_tree1)
611
change.changes = self.parse_delta(delta)
612
change.changes.modified = self._parse_diffs(rev_tree1, rev_tree2, delta)
722
616
def get_file(self, file_id, revid):
723
"""Returns (path, filename, file contents)"""
617
"returns (path, filename, data)"
724
618
inv = self.get_inventory(revid)
725
619
inv_entry = inv[file_id]
726
620
rev_tree = self._branch.repository.revision_tree(inv_entry.revision)
729
623
path = '/' + path
730
624
return path, inv_entry.name, rev_tree.get_file_text(file_id)
732
def file_changes_for_revision_ids(self, old_revid, new_revid):
626
def _parse_diffs(self, old_tree, new_tree, delta):
628
Return a list of processed diffs, in the format::
637
type: str('context', 'delete', or 'insert'),
646
for old_path, new_path, fid, kind, text_modified, meta_modified in delta.renamed:
648
process.append((old_path, new_path, fid, kind))
649
for path, fid, kind, text_modified, meta_modified in delta.modified:
650
process.append((path, path, fid, kind))
652
for old_path, new_path, fid, kind in process:
653
old_lines = old_tree.get_file_lines(fid)
654
new_lines = new_tree.get_file_lines(fid)
656
if old_lines != new_lines:
658
bzrlib.diff.internal_diff(old_path, old_lines,
659
new_path, new_lines, buffer)
660
except bzrlib.errors.BinaryFile:
663
diff = buffer.getvalue()
666
out.append(util.Container(filename=rich_filename(new_path, kind), file_id=fid, chunks=self._process_diff(diff), raw_diff=diff))
670
def _process_diff(self, diff):
671
# doesn't really need to be a method; could be static.
674
for line in diff.splitlines():
677
if line.startswith('+++ ') or line.startswith('--- '):
679
if line.startswith('@@ '):
681
if chunk is not None:
683
chunk = util.Container()
685
lines = [int(x.split(',')[0][1:]) for x in line.split(' ')[1:3]]
686
old_lineno = lines[0]
687
new_lineno = lines[1]
688
elif line.startswith(' '):
689
chunk.diff.append(util.Container(old_lineno=old_lineno, new_lineno=new_lineno,
690
type='context', line=util.fixed_width(line[1:])))
693
elif line.startswith('+'):
694
chunk.diff.append(util.Container(old_lineno=None, new_lineno=new_lineno,
695
type='insert', line=util.fixed_width(line[1:])))
697
elif line.startswith('-'):
698
chunk.diff.append(util.Container(old_lineno=old_lineno, new_lineno=None,
699
type='delete', line=util.fixed_width(line[1:])))
702
chunk.diff.append(util.Container(old_lineno=None, new_lineno=None,
703
type='unknown', line=util.fixed_width(repr(line))))
704
if chunk is not None:
708
def parse_delta(self, delta):
734
710
Return a nested data structure containing the changes in a delta::
743
text_changes: list((filename, file_id)),
745
repo = self._branch.repository
746
if (bzrlib.revision.is_null(old_revid) or
747
bzrlib.revision.is_null(new_revid)):
748
old_tree, new_tree = map(
749
repo.revision_tree, [old_revid, new_revid])
751
old_tree, new_tree = repo.revision_trees([old_revid, new_revid])
753
reporter = FileChangeReporter(old_tree.inventory, new_tree.inventory)
755
bzrlib.delta.report_changes(new_tree.iter_changes(old_tree), reporter)
757
return util.Container(
758
added=sorted(reporter.added, key=lambda x: x.filename),
759
renamed=sorted(reporter.renamed, key=lambda x: x.new_filename),
760
removed=sorted(reporter.removed, key=lambda x: x.filename),
761
modified=sorted(reporter.modified, key=lambda x: x.filename),
762
text_changes=sorted(reporter.text_changes, key=lambda x: x.filename))
725
for path, fid, kind in delta.added:
726
added.append((rich_filename(path, kind), fid))
728
for path, fid, kind, text_modified, meta_modified in delta.modified:
729
modified.append(util.Container(filename=rich_filename(path, kind), file_id=fid))
731
for old_path, new_path, fid, kind, text_modified, meta_modified in delta.renamed:
732
renamed.append((rich_filename(old_path, kind), rich_filename(new_path, kind), fid))
733
if meta_modified or text_modified:
734
modified.append(util.Container(filename=rich_filename(new_path, kind), file_id=fid))
736
for path, fid, kind in delta.removed:
737
removed.append((rich_filename(path, kind), fid))
739
return util.Container(added=added, renamed=renamed, removed=removed, modified=modified)
742
def add_side_by_side(changes):
743
# FIXME: this is a rotten API.
744
for change in changes:
745
for m in change.changes.modified:
746
m.sbs_chunks = _make_side_by_side(m.chunks)
748
def get_filelist(self, inv, file_id, sort_type=None):
750
return the list of all files (and their attributes) within a given
754
dir_ie = inv[file_id]
755
path = inv.id2path(file_id)
760
for filename, entry in dir_ie.children.iteritems():
761
revid_set.add(entry.revision)
764
for change in self.get_changes(list(revid_set)):
765
change_dict[change.revid] = change
767
for filename, entry in dir_ie.children.iteritems():
769
if entry.kind == 'directory':
772
revid = entry.revision
774
file = util.Container(
775
filename=filename, executable=entry.executable, kind=entry.kind,
776
pathname=pathname, file_id=entry.file_id, size=entry.text_size,
777
revid=revid, change=change_dict[revid])
778
file_list.append(file)
780
if sort_type == 'filename' or sort_type is None:
781
file_list.sort(key=lambda x: x.filename.lower()) # case-insensitive
782
elif sort_type == 'size':
783
file_list.sort(key=lambda x: x.size)
784
elif sort_type == 'date':
785
file_list.sort(key=lambda x: x.change.date)
787
# Always sort by kind to get directories first
788
file_list.sort(key=lambda x: x.kind != 'directory')
791
for file in file_list:
798
_BADCHARS_RE = re.compile(ur'[\x00-\x08\x0b\x0e-\x1f]')
800
def annotate_file(self, file_id, revid):
805
file_revid = self.get_inventory(revid)[file_id].revision
807
tree = self._branch.repository.revision_tree(file_revid)
810
for line_revid, text in tree.annotate_iter(file_id):
811
revid_set.add(line_revid)
812
if self._BADCHARS_RE.match(text):
813
# bail out; this isn't displayable text
814
yield util.Container(parity=0, lineno=1, status='same',
815
text='(This is a binary file.)',
816
change=util.Container())
818
change_cache = dict([(c.revid, c) \
819
for c in self.get_changes(list(revid_set))])
821
last_line_revid = None
822
for line_revid, text in tree.annotate_iter(file_id):
823
if line_revid == last_line_revid:
824
# remember which lines have a new revno and which don't
829
last_line_revid = line_revid
830
change = change_cache[line_revid]
831
trunc_revno = change.revno
832
if len(trunc_revno) > 10:
833
trunc_revno = trunc_revno[:9] + '...'
835
yield util.Container(parity=parity, lineno=lineno, status=status,
836
change=change, text=util.fixed_width(text))
839
self.log.debug('annotate: %r secs' % (time.time() - z,))