62
82
bzrlib.ui.ui_factory = ThreadSafeUIFactory()
65
def _process_side_by_side_buffers(line_list, delete_list, insert_list):
66
while len(delete_list) < len(insert_list):
67
delete_list.append((None, '', 'context'))
68
while len(insert_list) < len(delete_list):
69
insert_list.append((None, '', 'context'))
70
while len(delete_list) > 0:
71
d = delete_list.pop(0)
72
i = insert_list.pop(0)
73
line_list.append(util.Container(old_lineno=d[0], new_lineno=i[0],
74
old_line=d[1], new_line=i[1],
75
old_type=d[2], new_type=i[2]))
78
def _make_side_by_side(chunk_list):
80
turn a normal unified-style diff (post-processed by parse_delta) into a
81
side-by-side diff structure. the new structure is::
89
type: str('context' or 'changed'),
94
for chunk in chunk_list:
97
delete_list, insert_list = [], []
98
for line in chunk.diff:
99
# Add <wbr/> every X characters so we can wrap properly
100
wrap_line = re.findall(r'.{%d}|.+$' % 78, line.line)
101
wrap_lines = [util.html_clean(_line) for _line in wrap_line]
102
wrapped_line = wrap_char.join(wrap_lines)
104
if line.type == 'context':
105
if len(delete_list) or len(insert_list):
106
_process_side_by_side_buffers(line_list, delete_list,
108
delete_list, insert_list = [], []
109
line_list.append(util.Container(old_lineno=line.old_lineno,
110
new_lineno=line.new_lineno,
111
old_line=wrapped_line,
112
new_line=wrapped_line,
115
elif line.type == 'delete':
116
delete_list.append((line.old_lineno, wrapped_line, line.type))
117
elif line.type == 'insert':
118
insert_list.append((line.new_lineno, wrapped_line, line.type))
119
if len(delete_list) or len(insert_list):
120
_process_side_by_side_buffers(line_list, delete_list, insert_list)
121
out_chunk_list.append(util.Container(diff=line_list))
122
return out_chunk_list
125
def is_branch(folder):
127
bzrlib.branch.Branch.open(folder)
133
def clean_message(message):
134
"""Clean up a commit message and return it and a short (1-line) version.
136
Commit messages that are long single lines are reflowed using the textwrap
137
module (Robey, the original author of this code, apparently favored this
140
message = message.splitlines()
142
if len(message) == 1:
143
message = textwrap.wrap(message[0])
145
if len(message) == 0:
146
# We can end up where when (a) the commit message was empty or (b)
147
# when the message consisted entirely of whitespace, in which case
148
# textwrap.wrap() returns an empty list.
151
# Make short form of commit message.
152
short_message = message[0]
153
if len(short_message) > 60:
154
short_message = short_message[:60] + '...'
156
return message, short_message
159
def rich_filename(path, kind):
160
if kind == 'directory':
162
if kind == 'symlink':
169
86
class _RevListToTimestamps(object):
170
87
"""This takes a list of revisions, and allows you to bisect by date"""
186
103
class History (object):
187
"""Decorate a branch to provide information for rendering.
189
History objects are expected to be short lived -- when serving a request
190
for a particular branch, open it, read-lock it, wrap a History object
191
around it, serve the request, throw the History object away, unlock the
192
branch and throw it away.
194
:ivar _file_change_cache: xx
197
def __init__(self, branch, whole_history_data_cache):
198
assert branch.is_locked(), (
199
"Can only construct a History object with a read-locked branch.")
200
self._file_change_cache = None
106
self._change_cache = None
108
self._lock = threading.RLock()
111
def from_branch(cls, branch, name=None):
201
114
self._branch = branch
202
self.log = logging.getLogger('loggerhead.%s' % (branch.nick,))
204
self.last_revid = branch.last_revision()
206
whole_history_data = whole_history_data_cache.get(self.last_revid)
207
if whole_history_data is None:
208
whole_history_data = compute_whole_history_data(branch)
209
whole_history_data_cache[self.last_revid] = whole_history_data
211
(self._revision_graph, self._full_history, self._revision_info,
212
self._revno_revid, self._merge_sort, self._where_merged
213
) = whole_history_data
215
def use_file_cache(self, cache):
216
self._file_change_cache = cache
219
def has_revisions(self):
220
return not bzrlib.revision.is_null(self.last_revid)
222
def get_config(self):
223
return self._branch.get_config()
115
self._history = branch.revision_history()
116
self._last_revid = self._history[-1]
117
self._revision_graph = branch.repository.get_revision_graph(self._last_revid)
120
name = self._branch.nick
122
self.log = logging.getLogger('loggerhead.%s' % (name,))
124
self._full_history = []
125
self._revision_info = {}
126
self._revno_revid = {}
127
self._merge_sort = bzrlib.tsort.merge_sort(self._revision_graph, self._last_revid, generate_revno=True)
129
for (seq, revid, merge_depth, revno, end_of_merge) in self._merge_sort:
130
self._full_history.append(revid)
131
revno_str = '.'.join(str(n) for n in revno)
132
self._revno_revid[revno_str] = revid
133
self._revision_info[revid] = (seq, revid, merge_depth, revno_str, end_of_merge)
138
self._where_merged = {}
139
for revid in self._revision_graph.keys():
140
if not revid in self._full_history:
142
for parent in self._revision_graph[revid]:
143
self._where_merged.setdefault(parent, set()).add(revid)
145
self.log.info('built revision graph cache: %r secs' % (time.time() - z,))
149
def from_folder(cls, path, name=None):
150
b = bzrlib.branch.Branch.open(path)
151
return cls.from_branch(b, name)
154
def out_of_date(self):
155
if self._branch.revision_history()[-1] != self._last_revid:
159
def use_cache(self, cache):
160
self._change_cache = cache
162
def use_search_index(self, index):
167
# called when a new history object needs to be created, because the
168
# branch history has changed. we need to immediately close and stop
169
# using our caches, because a new history object will be created to
170
# replace us, using the same cache files.
171
if self._change_cache is not None:
172
self._change_cache.close()
173
self._change_cache = None
174
if self._index is not None:
178
def flush_cache(self):
179
if self._change_cache is None:
181
self._change_cache.flush()
183
def check_rebuild(self):
184
if self._change_cache is not None:
185
self._change_cache.check_rebuild()
186
if self._index is not None:
187
self._index.check_rebuild()
189
last_revid = property(lambda self: self._last_revid, None, None)
191
count = property(lambda self: self._count, None, None)
194
def get_revision(self, revid):
195
return self._branch.repository.get_revision(revid)
225
197
def get_revno(self, revid):
226
198
if revid not in self._revision_info:
229
201
seq, revid, merge_depth, revno_str, end_of_merge = self._revision_info[revid]
232
def get_revids_from(self, revid_list, start_revid):
234
Yield the mainline (wrt start_revid) revisions that merged each
237
if revid_list is None:
238
revid_list = self._full_history
239
revid_set = set(revid_list)
241
def introduced_revisions(revid):
243
seq, revid, md, revno, end_of_merge = self._revision_info[revid]
245
while i < len(self._merge_sort) and self._merge_sort[i][2] > md:
246
r.add(self._merge_sort[i][1])
250
if bzrlib.revision.is_null(revid):
252
if introduced_revisions(revid) & revid_set:
204
def get_sequence(self, revid):
205
seq, revid, merge_depth, revno_str, end_of_merge = self._revision_info[revid]
208
def get_revision_history(self):
209
return self._full_history
211
def get_revid_sequence(self, revid_list, revid):
213
given a list of revision ids, return the sequence # of this revid in
222
def get_revids_from(self, revid_list, revid):
224
given a list of revision ids, yield revisions in graph order,
225
starting from revid. the list can be None if you just want to travel
226
across all revisions.
229
if (revid_list is None) or (revid in revid_list):
231
if not self._revision_graph.has_key(revid):
254
233
parents = self._revision_graph[revid]
255
234
if len(parents) == 0:
257
236
revid = parents[0]
259
239
def get_short_revision_history_by_fileid(self, file_id):
240
# wow. is this really the only way we can get this list? by
241
# man-handling the weave store directly? :-0
260
242
# FIXME: would be awesome if we could get, for a folder, the list of
261
# revisions where items within that folder changed.i
263
# FIXME: Workaround for bzr versions prior to 1.6b3.
264
# Remove me eventually pretty please :)
265
w = self._branch.repository.weave_store.get_weave(file_id, self._branch.repository.get_transaction())
266
w_revids = w.versions()
267
revids = [r for r in self._full_history if r in w_revids]
268
except AttributeError:
269
possible_keys = [(file_id, revid) for revid in self._full_history]
270
existing_keys = self._branch.repository.texts.get_parent_map(possible_keys)
271
revids = [revid for _, revid in existing_keys.iterkeys()]
243
# revisions where items within that folder changed.
244
w = self._branch.repository.weave_store.get_weave(file_id, self._branch.repository.get_transaction())
245
w_revids = w.versions()
246
revids = [r for r in self._full_history if r in w_revids]
274
250
def get_revision_history_since(self, revid_list, date):
275
251
# if a user asks for revisions starting at 01-sep, they mean inclusive,
276
252
# so start at midnight on 02-sep.
337
342
# if a "revid" is actually a dotted revno, convert it to a revid
338
343
if revid is None:
341
return self.last_revid
342
345
if self.revno_re.match(revid):
343
346
revid = self._revno_revid[revid]
346
350
def get_file_view(self, revid, file_id):
348
Given a revid and optional path, return a (revlist, revid) for
349
navigation through the current scope: from the revid (or the latest
350
revision) back to the original revision.
352
Given an optional revid and optional path, return a (revlist, revid)
353
for navigation through the current scope: from the revid (or the
354
latest revision) back to the original revision.
352
356
If file_id is None, the entire revision history is the list scope.
357
If revid is None, the latest revision is used.
354
359
if revid is None:
355
revid = self.last_revid
360
revid = self._last_revid
356
361
if file_id is not None:
357
# since revid is 'start_revid', possibly should start the path
358
# tracing from revid... FIXME
362
# since revid is 'start_revid', possibly should start the path tracing from revid... FIXME
363
inv = self._branch.repository.get_revision_inventory(revid)
359
364
revlist = list(self.get_short_revision_history_by_fileid(file_id))
360
365
revlist = list(self.get_revids_from(revlist, revid))
362
367
revlist = list(self.get_revids_from(None, revid))
370
return revlist, revid
365
373
def get_view(self, revid, start_revid, file_id, query=None):
367
375
use the URL parameters (revid, start_revid, file_id, and query) to
368
376
determine the revision list we're viewing (start_revid, file_id, query)
369
377
and where we are in it (revid).
371
- if a query is given, we're viewing query results.
372
- if a file_id is given, we're viewing revisions for a specific
374
- if a start_revid is given, we're viewing the branch from a
375
specific revision up the tree.
377
these may be combined to view revisions for a specific file, from
378
a specific revision, with a specific search query.
380
returns a new (revid, start_revid, revid_list) where:
379
if a query is given, we're viewing query results.
380
if a file_id is given, we're viewing revisions for a specific file.
381
if a start_revid is given, we're viewing the branch from a
382
specific revision up the tree.
383
(these may be combined to view revisions for a specific file, from
384
a specific revision, with a specific search query.)
386
returns a new (revid, start_revid, revid_list, scan_list) where:
382
388
- revid: current position within the view
383
389
- start_revid: starting revision of this view
384
390
- revid_list: list of revision ids for this view
386
392
file_id and query are never changed so aren't returned, but they may
387
393
contain vital context for future url navigation.
389
if start_revid is None:
390
start_revid = self.last_revid
392
395
if query is None:
393
revid_list = self.get_file_view(start_revid, file_id)
396
revid_list, start_revid = self.get_file_view(start_revid, file_id)
394
397
if revid is None:
395
398
revid = start_revid
396
399
if revid not in revid_list:
397
400
# if the given revid is not in the revlist, use a revlist that
398
401
# starts at the given revid.
399
revid_list = self.get_file_view(revid, file_id)
402
revid_list, start_revid = self.get_file_view(revid, file_id)
401
403
return revid, start_revid, revid_list
403
405
# potentially limit the search
404
if file_id is not None:
405
revid_list = self.get_file_view(start_revid, file_id)
406
if (start_revid is not None) or (file_id is not None):
407
revid_list, start_revid = self.get_file_view(start_revid, file_id)
407
409
revid_list = None
408
revid_list = search.search_revisions(self._branch, query)
409
if revid_list and len(revid_list) > 0:
411
revid_list = self.get_search_revid_list(query, revid_list)
412
if len(revid_list) > 0:
410
413
if revid not in revid_list:
411
414
revid = revid_list[0]
412
415
return revid, start_revid, revid_list
414
# XXX: This should return a message saying that the search could
415
# not be completed due to either missing the plugin or missing a
417
418
return None, None, []
419
421
def get_inventory(self, revid):
420
422
return self._branch.repository.get_revision_inventory(revid)
422
425
def get_path(self, revid, file_id):
423
426
if (file_id is None) or (file_id == ''):
493
497
p_changes = self.get_changes(list(fetch_set))
494
498
p_change_dict = dict([(c.revid, c) for c in p_changes])
495
499
for change in changes:
496
# arch-converted branches may not have merged branch info :(
497
500
for p in change.parents:
498
if p.revid in p_change_dict:
499
p.branch_nick = p_change_dict[p.revid].branch_nick
501
p.branch_nick = '(missing)'
501
p.branch_nick = p_change_dict[p.revid].branch_nick
502
502
for p in change.merge_points:
503
if p.revid in p_change_dict:
504
p.branch_nick = p_change_dict[p.revid].branch_nick
506
p.branch_nick = '(missing)'
508
def get_changes(self, revid_list):
509
"""Return a list of changes objects for the given revids.
511
Revisions not present and NULL_REVISION will be ignored.
513
changes = self.get_changes_uncached(revid_list)
514
if len(changes) == 0:
503
p.branch_nick = p_change_dict[p.revid].branch_nick
506
def get_changes(self, revid_list, get_diffs=False):
507
if self._change_cache is None:
508
changes = self.get_changes_uncached(revid_list, get_diffs)
510
changes = self._change_cache.get_changes(revid_list, get_diffs)
517
514
# some data needs to be recalculated each time, because it may
518
515
# change as new revisions are added.
519
for change in changes:
520
merge_revids = self.simplify_merge_point_list(self.get_merge_point_list(change.revid))
516
for i in xrange(len(revid_list)):
517
revid = revid_list[i]
519
merge_revids = self.simplify_merge_point_list(self.get_merge_point_list(revid))
521
520
change.merge_points = [util.Container(revid=r, revno=self.get_revno(r)) for r in merge_revids]
522
if len(change.parents) > 0:
523
change.parents = [util.Container(revid=r,
524
revno=self.get_revno(r)) for r in change.parents]
525
change.revno = self.get_revno(change.revid)
528
for change in changes:
529
change.parity = parity
534
def get_changes_uncached(self, revid_list):
535
# FIXME: deprecated method in getting a null revision
536
revid_list = filter(lambda revid: not bzrlib.revision.is_null(revid),
538
parent_map = self._branch.repository.get_graph().get_parent_map(revid_list)
539
# We need to return the answer in the same order as the input,
541
present_revids = [revid for revid in revid_list
542
if revid in parent_map]
543
rev_list = self._branch.repository.get_revisions(present_revids)
545
return [self._change_from_revision(rev) for rev in rev_list]
547
def _get_deltas_for_revisions_with_trees(self, revisions):
548
"""Produce a list of revision deltas.
550
Note that the input is a sequence of REVISIONS, not revision_ids.
551
Trees will be held in memory until the generator exits.
552
Each delta is relative to the revision's lefthand predecessor.
553
(This is copied from bzrlib.)
555
required_trees = set()
556
for revision in revisions:
557
required_trees.add(revision.revid)
558
required_trees.update([p.revid for p in revision.parents[:1]])
559
trees = dict((t.get_revision_id(), t) for
560
t in self._branch.repository.revision_trees(required_trees))
562
self._branch.repository.lock_read()
524
# alright, let's profile this sucka.
525
def _get_changes_profiled(self, revid_list, get_diffs=False):
526
from loggerhead.lsprof import profile
528
ret, stats = profile(self.get_changes_uncached, revid_list, get_diffs)
531
cPickle.dump(stats, open('lsprof.stats', 'w'), 2)
535
@with_bzrlib_read_lock
536
def get_changes_uncached(self, revid_list, get_diffs=False):
564
for revision in revisions:
565
if not revision.parents:
566
old_tree = self._branch.repository.revision_tree(
567
bzrlib.revision.NULL_REVISION)
569
old_tree = trees[revision.parents[0].revid]
570
tree = trees[revision.revid]
571
ret.append(tree.changes_from(old_tree))
574
self._branch.repository.unlock()
576
def _change_from_revision(self, revision):
578
Given a bzrlib Revision, return a processed "change" for use in
581
commit_time = datetime.datetime.fromtimestamp(revision.timestamp)
583
parents = [util.Container(revid=r, revno=self.get_revno(r)) for r in revision.parent_ids]
585
message, short_message = clean_message(revision.message)
588
'revid': revision.revision_id,
590
'author': revision.get_apparent_author(),
591
'branch_nick': revision.properties.get('branch-nick', None),
592
'short_comment': short_message,
593
'comment': revision.message,
594
'comment_clean': [util.html_clean(s) for s in message],
595
'parents': revision.parent_ids,
597
return util.Container(entry)
599
def get_file_changes_uncached(self, entries):
600
delta_list = self._get_deltas_for_revisions_with_trees(entries)
602
return [self.parse_delta(delta) for delta in delta_list]
604
def get_file_changes(self, entries):
605
if self._file_change_cache is None:
606
return self.get_file_changes_uncached(entries)
608
return self._file_change_cache.get_file_changes(entries)
610
def add_changes(self, entries):
611
changes_list = self.get_file_changes(entries)
613
for entry, changes in zip(entries, changes_list):
614
entry.changes = changes
616
def get_change_with_diff(self, revid, compare_revid=None):
617
change = self.get_changes([revid])[0]
619
if compare_revid is None:
621
compare_revid = change.parents[0].revid
538
rev_list = self._branch.repository.get_revisions(revid_list)
539
except (KeyError, bzrlib.errors.NoSuchRevision):
542
delta_list = self._branch.repository.get_deltas_for_revisions(rev_list)
543
combined_list = zip(rev_list, delta_list)
547
# lookup the trees for each revision, so we can calculate diffs
550
lookup_set.add(rev.revision_id)
551
if len(rev.parent_ids) > 0:
552
lookup_set.add(rev.parent_ids[0])
553
tree_map = dict((t.get_revision_id(), t) for t in self._branch.repository.revision_trees(lookup_set))
554
# also the root tree, in case we hit the origin:
555
tree_map[None] = self._branch.repository.revision_tree(None)
558
for rev, delta in combined_list:
559
commit_time = datetime.datetime.fromtimestamp(rev.timestamp)
561
parents = [util.Container(revid=r, revno=self.get_revno(r)) for r in rev.parent_ids]
563
if len(parents) == 0:
623
compare_revid = 'null:'
625
rev_tree1 = self._branch.repository.revision_tree(compare_revid)
626
rev_tree2 = self._branch.repository.revision_tree(revid)
627
delta = rev_tree2.changes_from(rev_tree1)
629
change.changes = self.parse_delta(delta)
630
change.changes.modified = self._parse_diffs(rev_tree1, rev_tree2, delta)
566
left_parent = rev.parent_ids[0]
568
message = rev.message.splitlines()
569
if len(message) == 1:
570
# robey-style 1-line long message
571
message = textwrap.wrap(message[0])
573
# make short form of commit message
574
short_message = message[0]
575
if len(short_message) > 60:
576
short_message = short_message[:60] + '...'
578
old_tree, new_tree = None, None
580
new_tree = tree_map[rev.revision_id]
581
old_tree = tree_map[left_parent]
584
'revid': rev.revision_id,
585
'revno': self.get_revno(rev.revision_id),
587
'author': rev.committer,
588
'branch_nick': rev.properties.get('branch-nick', None),
589
'short_comment': short_message,
590
'comment': rev.message,
591
'comment_clean': [util.html_clean(s) for s in message],
593
'changes': self.parse_delta(delta, get_diffs, old_tree, new_tree),
595
entries.append(util.Container(entry))
634
600
def get_file(self, file_id, revid):
635
"returns (path, filename, data)"
636
inv = self.get_inventory(revid)
637
inv_entry = inv[file_id]
601
"returns (filename, data)"
602
inv_entry = self.get_inventory(revid)[file_id]
638
603
rev_tree = self._branch.repository.revision_tree(inv_entry.revision)
639
path = inv.id2path(file_id)
640
if not path.startswith('/'):
642
return path, inv_entry.name, rev_tree.get_file_text(file_id)
644
def _parse_diffs(self, old_tree, new_tree, delta):
604
return inv_entry.name, rev_tree.get_file_text(file_id)
607
def parse_delta(self, delta, get_diffs=True, old_tree=None, new_tree=None):
646
Return a list of processed diffs, in the format::
609
Return a nested data structure containing the changes in a delta::
611
added: list((filename, file_id)),
612
renamed: list((old_filename, new_filename, file_id)),
613
deleted: list((filename, file_id)),
664
for old_path, new_path, fid, kind, text_modified, meta_modified in delta.renamed:
666
process.append((old_path, new_path, fid, kind))
667
for path, fid, kind, text_modified, meta_modified in delta.modified:
668
process.append((path, path, fid, kind))
670
for old_path, new_path, fid, kind in process:
671
old_lines = old_tree.get_file_lines(fid)
672
new_lines = new_tree.get_file_lines(fid)
674
if old_lines != new_lines:
676
bzrlib.diff.internal_diff(old_path, old_lines,
677
new_path, new_lines, buffer)
678
except bzrlib.errors.BinaryFile:
681
diff = buffer.getvalue()
684
out.append(util.Container(filename=rich_filename(new_path, kind), file_id=fid, chunks=self._process_diff(diff), raw_diff=diff))
688
def _process_diff(self, diff):
689
# doesn't really need to be a method; could be static.
692
for line in diff.splitlines():
695
if line.startswith('+++ ') or line.startswith('--- '):
697
if line.startswith('@@ '):
699
if chunk is not None:
701
chunk = util.Container()
703
lines = [int(x.split(',')[0][1:]) for x in line.split(' ')[1:3]]
704
old_lineno = lines[0]
705
new_lineno = lines[1]
706
elif line.startswith(' '):
707
chunk.diff.append(util.Container(old_lineno=old_lineno,
708
new_lineno=new_lineno,
713
elif line.startswith('+'):
714
chunk.diff.append(util.Container(old_lineno=None,
715
new_lineno=new_lineno,
716
type='insert', line=line[1:]))
718
elif line.startswith('-'):
719
chunk.diff.append(util.Container(old_lineno=old_lineno,
721
type='delete', line=line[1:]))
724
chunk.diff.append(util.Container(old_lineno=None,
728
if chunk is not None:
732
def parse_delta(self, delta):
734
Return a nested data structure containing the changes in a delta::
736
added: list((filename, file_id)),
737
renamed: list((old_filename, new_filename, file_id)),
738
deleted: list((filename, file_id)),
627
if C{get_diffs} is false, the C{chunks} will be omitted.
634
def rich_filename(path, kind):
635
if kind == 'directory':
637
if kind == 'symlink':
641
def process_diff(diff):
644
for line in diff.splitlines():
647
if line.startswith('+++ ') or line.startswith('--- '):
649
if line.startswith('@@ '):
651
if chunk is not None:
653
chunk = util.Container()
655
lines = [int(x.split(',')[0][1:]) for x in line.split(' ')[1:3]]
656
old_lineno = lines[0]
657
new_lineno = lines[1]
658
elif line.startswith(' '):
659
chunk.diff.append(util.Container(old_lineno=old_lineno, new_lineno=new_lineno,
660
type='context', line=util.html_clean(line[1:])))
663
elif line.startswith('+'):
664
chunk.diff.append(util.Container(old_lineno=None, new_lineno=new_lineno,
665
type='insert', line=util.html_clean(line[1:])))
667
elif line.startswith('-'):
668
chunk.diff.append(util.Container(old_lineno=old_lineno, new_lineno=None,
669
type='delete', line=util.html_clean(line[1:])))
672
chunk.diff.append(util.Container(old_lineno=None, new_lineno=None,
673
type='unknown', line=util.html_clean(repr(line))))
674
if chunk is not None:
678
def handle_modify(old_path, new_path, fid, kind):
680
modified.append(util.Container(filename=rich_filename(new_path, kind), file_id=fid))
682
old_lines = old_tree.get_file_lines(fid)
683
new_lines = new_tree.get_file_lines(fid)
685
bzrlib.diff.internal_diff(old_path, old_lines, new_path, new_lines, buffer)
686
diff = buffer.getvalue()
687
modified.append(util.Container(filename=rich_filename(new_path, kind), file_id=fid, chunks=process_diff(diff), raw_diff=diff))
749
689
for path, fid, kind in delta.added:
750
690
added.append((rich_filename(path, kind), fid))
752
692
for path, fid, kind, text_modified, meta_modified in delta.modified:
753
modified.append(util.Container(filename=rich_filename(path, kind), file_id=fid))
755
for old_path, new_path, fid, kind, text_modified, meta_modified in delta.renamed:
756
renamed.append((rich_filename(old_path, kind), rich_filename(new_path, kind), fid))
693
handle_modify(path, path, fid, kind)
695
for oldpath, newpath, fid, kind, text_modified, meta_modified in delta.renamed:
696
renamed.append((rich_filename(oldpath, kind), rich_filename(newpath, kind), fid))
757
697
if meta_modified or text_modified:
758
modified.append(util.Container(filename=rich_filename(new_path, kind), file_id=fid))
698
handle_modify(oldpath, newpath, fid, kind)
760
700
for path, fid, kind in delta.removed:
761
701
removed.append((rich_filename(path, kind), fid))
763
703
return util.Container(added=added, renamed=renamed, removed=removed, modified=modified)
766
def add_side_by_side(changes):
767
# FIXME: this is a rotten API.
768
for change in changes:
769
for m in change.changes.modified:
770
m.sbs_chunks = _make_side_by_side(m.chunks)
772
def get_filelist(self, inv, file_id, sort_type=None):
706
def get_filelist(self, inv, path, sort_type=None):
774
708
return the list of all files (and their attributes) within a given
778
dir_ie = inv[file_id]
779
path = inv.id2path(file_id)
711
while path.endswith('/'):
713
if path.startswith('/'):
716
entries = inv.entries()
719
for filepath, entry in entries:
720
fetch_set.add(entry.revision)
721
change_dict = dict([(c.revid, c) for c in self.get_changes(list(fetch_set))])
784
for filename, entry in dir_ie.children.iteritems():
785
revid_set.add(entry.revision)
788
for change in self.get_changes(list(revid_set)):
789
change_dict[change.revid] = change
791
for filename, entry in dir_ie.children.iteritems():
724
for filepath, entry in entries:
725
if posixpath.dirname(filepath) != path:
727
filename = posixpath.basename(filepath)
728
rich_filename = filename
792
729
pathname = filename
793
730
if entry.kind == 'directory':
796
734
revid = entry.revision
798
file = util.Container(
799
filename=filename, executable=entry.executable, kind=entry.kind,
800
pathname=pathname, file_id=entry.file_id, size=entry.text_size,
801
revid=revid, change=change_dict[revid])
735
change = change_dict[revid]
737
file = util.Container(filename=filename, rich_filename=rich_filename, executable=entry.executable, kind=entry.kind,
738
pathname=pathname, file_id=entry.file_id, size=entry.text_size, revid=revid, change=change)
802
739
file_list.append(file)
804
if sort_type == 'filename' or sort_type is None:
805
file_list.sort(key=lambda x: x.filename.lower()) # case-insensitive
741
if sort_type == 'filename':
742
file_list.sort(key=lambda x: x.filename)
806
743
elif sort_type == 'size':
807
744
file_list.sort(key=lambda x: x.size)
808
745
elif sort_type == 'date':
809
746
file_list.sort(key=lambda x: x.change.date)
811
# Always sort by kind to get directories first
812
file_list.sort(key=lambda x: x.kind != 'directory')
815
749
for file in file_list:
816
750
file.parity = parity