38
from bzrlib import tag
36
from StringIO import StringIO
38
from loggerhead import search
39
from loggerhead import util
40
from loggerhead.wholehistory import compute_whole_history_data
39
43
import bzrlib.branch
41
45
import bzrlib.errors
46
import bzrlib.progress
43
47
import bzrlib.revision
45
from loggerhead import search
46
from loggerhead import util
47
from loggerhead.wholehistory import compute_whole_history_data
51
# bzrlib's UIFactory is not thread-safe
52
uihack = threading.local()
54
class ThreadSafeUIFactory (bzrlib.ui.SilentUIFactory):
55
def nested_progress_bar(self):
56
if getattr(uihack, '_progress_bar_stack', None) is None:
57
uihack._progress_bar_stack = bzrlib.progress.ProgressBarStack(klass=bzrlib.progress.DummyProgress)
58
return uihack._progress_bar_stack.get_nested()
60
bzrlib.ui.ui_factory = ThreadSafeUIFactory()
63
def _process_side_by_side_buffers(line_list, delete_list, insert_list):
64
while len(delete_list) < len(insert_list):
65
delete_list.append((None, '', 'context'))
66
while len(insert_list) < len(delete_list):
67
insert_list.append((None, '', 'context'))
68
while len(delete_list) > 0:
69
d = delete_list.pop(0)
70
i = insert_list.pop(0)
71
line_list.append(util.Container(old_lineno=d[0], new_lineno=i[0],
72
old_line=d[1], new_line=i[1],
73
old_type=d[2], new_type=i[2]))
76
def _make_side_by_side(chunk_list):
78
turn a normal unified-style diff (post-processed by parse_delta) into a
79
side-by-side diff structure. the new structure is::
87
type: str('context' or 'changed'),
92
for chunk in chunk_list:
94
delete_list, insert_list = [], []
95
for line in chunk.diff:
96
if line.type == 'context':
97
if len(delete_list) or len(insert_list):
98
_process_side_by_side_buffers(line_list, delete_list, insert_list)
99
delete_list, insert_list = [], []
100
line_list.append(util.Container(old_lineno=line.old_lineno, new_lineno=line.new_lineno,
101
old_line=line.line, new_line=line.line,
102
old_type=line.type, new_type=line.type))
103
elif line.type == 'delete':
104
delete_list.append((line.old_lineno, line.line, line.type))
105
elif line.type == 'insert':
106
insert_list.append((line.new_lineno, line.line, line.type))
107
if len(delete_list) or len(insert_list):
108
_process_side_by_side_buffers(line_list, delete_list, insert_list)
109
out_chunk_list.append(util.Container(diff=line_list))
110
return out_chunk_list
50
113
def is_branch(folder):
101
166
def __getitem__(self, index):
102
167
"""Get the date of the index'd item"""
103
return datetime.datetime.fromtimestamp(self.repository.get_revision(
104
self.revid_list[index]).timestamp)
168
return datetime.datetime.fromtimestamp(self.repository.get_revision(self.revid_list[index]).timestamp)
106
170
def __len__(self):
107
171
return len(self.revid_list)
109
class FileChangeReporter(object):
111
def __init__(self, old_inv, new_inv):
116
self.text_changes = []
117
self.old_inv = old_inv
118
self.new_inv = new_inv
120
def revid(self, inv, file_id):
122
return inv[file_id].revision
123
except bzrlib.errors.NoSuchId:
126
def report(self, file_id, paths, versioned, renamed, modified,
128
if modified not in ('unchanged', 'kind changed'):
129
if versioned == 'removed':
130
filename = rich_filename(paths[0], kind[0])
132
filename = rich_filename(paths[1], kind[1])
133
self.text_changes.append(util.Container(
134
filename=filename, file_id=file_id,
135
old_revision=self.revid(self.old_inv, file_id),
136
new_revision=self.revid(self.new_inv, file_id)))
137
if versioned == 'added':
138
self.added.append(util.Container(
139
filename=rich_filename(paths[1], kind), kind=kind[1]))
140
elif versioned == 'removed':
141
self.removed.append(util.Container(
142
filename=rich_filename(paths[0], kind), kind=kind[0]))
144
self.renamed.append(util.Container(
145
old_filename=rich_filename(paths[0], kind[0]),
146
new_filename=rich_filename(paths[1], kind[1]),
147
text_modified=modified == 'modified', exe_change=exe_change))
149
self.modified.append(util.Container(
150
filename=rich_filename(paths[1], kind),
151
text_modified=modified == 'modified', exe_change=exe_change))
153
# The lru_cache is not thread-safe, so we need a lock around it for
155
rev_info_memory_cache_lock = threading.RLock()
157
class RevInfoMemoryCache(object):
158
"""A store that validates values against the revids they were stored with.
160
We use a unique key for each branch.
162
The reason for not just using the revid as the key is so that when a new
163
value is provided for a branch, we replace the old value used for the
166
There is another implementation of the same interface in
167
loggerhead.changecache.RevInfoDiskCache.
170
def __init__(self, cache):
173
def get(self, key, revid):
174
"""Return the data associated with `key`, subject to a revid check.
176
If a value was stored under `key`, with the same revid, return it.
177
Otherwise return None.
179
rev_info_memory_cache_lock.acquire()
181
cached = self._cache.get(key)
183
rev_info_memory_cache_lock.release()
186
stored_revid, data = cached
187
if revid == stored_revid:
192
def set(self, key, revid, data):
193
"""Store `data` under `key`, to be checked against `revid` on get().
195
rev_info_memory_cache_lock.acquire()
197
self._cache[key] = (revid, data)
199
rev_info_memory_cache_lock.release()
201
# Used to store locks that prevent multiple threads from building a
202
# revision graph for the same branch at the same time, because that can
203
# cause severe performance issues that are so bad that the system seems
205
revision_graph_locks = {}
206
revision_graph_check_lock = threading.Lock()
208
class History(object):
174
class History (object):
209
175
"""Decorate a branch to provide information for rendering.
211
177
History objects are expected to be short lived -- when serving a request
213
179
around it, serve the request, throw the History object away, unlock the
214
180
branch and throw it away.
216
:ivar _rev_info: A list of information about revisions. This is by far
217
the most cryptic data structure in loggerhead. At the top level, it
218
is a list of 3-tuples [(merge-info, where-merged, parents)].
219
`merge-info` is (seq, revid, merge_depth, revno_str, end_of_merge) --
220
like a merged sorted list, but the revno is stringified.
221
`where-merged` is a tuple of revisions that have this revision as a
222
non-lefthand parent. Finally, `parents` is just the usual list of
223
parents of this revision.
224
:ivar _rev_indices: A dictionary mapping each revision id to the index of
225
the information about it in _rev_info.
226
:ivar _revno_revid: A dictionary mapping stringified revnos to revision
182
:ivar _file_change_cache: xx
230
def _load_whole_history_data(self, caches, cache_key):
231
"""Set the attributes relating to the whole history of the branch.
233
:param caches: a list of caches with interfaces like
234
`RevInfoMemoryCache` and be ordered from fastest to slowest.
235
:param cache_key: the key to use with the caches.
237
self._rev_indices = None
238
self._rev_info = None
241
def update_missed_caches():
242
for cache in missed_caches:
243
cache.set(cache_key, self.last_revid, self._rev_info)
245
# Theoretically, it's possible for two threads to race in creating
246
# the Lock() object for their branch, so we put a lock around
247
# creating the per-branch Lock().
248
revision_graph_check_lock.acquire()
250
if cache_key not in revision_graph_locks:
251
revision_graph_locks[cache_key] = threading.Lock()
253
revision_graph_check_lock.release()
255
revision_graph_locks[cache_key].acquire()
258
data = cache.get(cache_key, self.last_revid)
260
self._rev_info = data
261
update_missed_caches()
264
missed_caches.append(cache)
266
whole_history_data = compute_whole_history_data(self._branch)
267
self._rev_info, self._rev_indices = whole_history_data
268
update_missed_caches()
270
revision_graph_locks[cache_key].release()
272
if self._rev_indices is not None:
273
self._revno_revid = {}
274
for ((_, revid, _, revno_str, _), _, _) in self._rev_info:
275
self._revno_revid[revno_str] = revid
277
self._revno_revid = {}
278
self._rev_indices = {}
279
for ((seq, revid, _, revno_str, _), _, _) in self._rev_info:
280
self._rev_indices[revid] = seq
281
self._revno_revid[revno_str] = revid
283
def __init__(self, branch, whole_history_data_cache,
284
revinfo_disk_cache=None, cache_key=None):
185
def __init__(self, branch, whole_history_data_cache):
285
186
assert branch.is_locked(), (
286
187
"Can only construct a History object with a read-locked branch.")
188
self._file_change_cache = None
287
189
self._branch = branch
288
self._branch_tags = None
289
self._inventory_cache = {}
290
self._branch_nick = self._branch.get_config().get_nickname()
291
self.log = logging.getLogger('loggerhead.%s' % (self._branch_nick,))
190
self.log = logging.getLogger('loggerhead.%s' % (branch.nick,))
293
192
self.last_revid = branch.last_revision()
295
caches = [RevInfoMemoryCache(whole_history_data_cache)]
296
if revinfo_disk_cache:
297
caches.append(revinfo_disk_cache)
298
self._load_whole_history_data(caches, cache_key)
194
whole_history_data = whole_history_data_cache.get(self.last_revid)
195
if whole_history_data is None:
196
whole_history_data = compute_whole_history_data(branch)
197
whole_history_data_cache[self.last_revid] = whole_history_data
199
(self._revision_graph, self._full_history, self._revision_info,
200
self._revno_revid, self._merge_sort, self._where_merged
201
) = whole_history_data
203
def use_file_cache(self, cache):
204
self._file_change_cache = cache
301
207
def has_revisions(self):
318
223
revid in revid_list.
320
225
if revid_list is None:
321
# Just yield the mainline, starting at start_revid
323
is_null = bzrlib.revision.is_null
324
while not is_null(revid):
326
parents = self._rev_info[self._rev_indices[revid]][2]
226
revid_list = self._full_history
331
227
revid_set = set(revid_list)
332
228
revid = start_revid
334
229
def introduced_revisions(revid):
336
seq = self._rev_indices[revid]
337
md = self._rev_info[seq][0][2]
231
seq, revid, md, revno, end_of_merge = self._revision_info[revid]
339
while i < len(self._rev_info) and self._rev_info[i][0][2] > md:
340
r.add(self._rev_info[i][0][1])
233
while i < len(self._merge_sort) and self._merge_sort[i][2] > md:
234
r.add(self._merge_sort[i][1])
344
238
if bzrlib.revision.is_null(revid):
346
rev_introduced = introduced_revisions(revid)
347
matching = rev_introduced.intersection(revid_set)
349
# We don't need to look for these anymore.
350
revid_set.difference_update(matching)
240
if introduced_revisions(revid) & revid_set:
352
parents = self._rev_info[self._rev_indices[revid]][2]
242
parents = self._revision_graph[revid]
353
243
if len(parents) == 0:
355
245
revid = parents[0]
357
247
def get_short_revision_history_by_fileid(self, file_id):
248
# wow. is this really the only way we can get this list? by
249
# man-handling the weave store directly? :-0
358
250
# FIXME: would be awesome if we could get, for a folder, the list of
359
# revisions where items within that folder changed.i
360
possible_keys = [(file_id, revid) for revid in self._rev_indices]
361
get_parent_map = self._branch.repository.texts.get_parent_map
362
# We chunk the requests as this works better with GraphIndex.
363
# See _filter_revisions_touching_file_id in bzrlib/log.py
364
# for more information.
367
for start in xrange(0, len(possible_keys), chunk_size):
368
next_keys = possible_keys[start:start + chunk_size]
369
revids += [k[1] for k in get_parent_map(next_keys)]
370
del possible_keys, next_keys
251
# revisions where items within that folder changed.
252
possible_keys = [(file_id, revid) for revid in self._full_history]
253
existing_keys = self._branch.repository.texts.get_parent_map(possible_keys)
254
return [revid for _, revid in existing_keys.iterkeys()]
373
256
def get_revision_history_since(self, revid_list, date):
374
257
# if a user asks for revisions starting at 01-sep, they mean inclusive,
375
258
# so start at midnight on 02-sep.
376
259
date = date + datetime.timedelta(days=1)
377
# our revid list is sorted in REVERSE date order,
378
# so go thru some hoops here...
260
# our revid list is sorted in REVERSE date order, so go thru some hoops here...
379
261
revid_list.reverse()
380
index = bisect.bisect(_RevListToTimestamps(revid_list,
381
self._branch.repository),
262
index = bisect.bisect(_RevListToTimestamps(revid_list, self._branch.repository), date)
385
265
revid_list.reverse()
401
280
# all the relevant changes (time-consuming) only to return a list of
402
281
# revids which will be used to fetch a set of changes again.
404
# if they entered a revid, just jump straight there;
405
# ignore the passed-in revid_list
283
# if they entered a revid, just jump straight there; ignore the passed-in revid_list
406
284
revid = self.fix_revid(query)
407
285
if revid is not None:
408
286
if isinstance(revid, unicode):
409
287
revid = revid.encode('utf-8')
410
changes = self.get_changes([revid])
288
changes = self.get_changes([ revid ])
411
289
if (changes is not None) and (len(changes) > 0):
415
293
m = self.us_date_re.match(query)
416
294
if m is not None:
417
date = datetime.datetime(util.fix_year(int(m.group(3))),
295
date = datetime.datetime(util.fix_year(int(m.group(3))), int(m.group(1)), int(m.group(2)))
421
297
m = self.earth_date_re.match(query)
422
298
if m is not None:
423
date = datetime.datetime(util.fix_year(int(m.group(3))),
299
date = datetime.datetime(util.fix_year(int(m.group(3))), int(m.group(2)), int(m.group(1)))
427
301
m = self.iso_date_re.match(query)
428
302
if m is not None:
429
date = datetime.datetime(util.fix_year(int(m.group(1))),
303
date = datetime.datetime(util.fix_year(int(m.group(1))), int(m.group(2)), int(m.group(3)))
432
304
if date is not None:
433
305
if revid_list is None:
434
# if no limit to the query was given,
435
# search only the direct-parent path.
306
# if no limit to the query was given, search only the direct-parent path.
436
307
revid_list = list(self.get_revids_from(None, self.last_revid))
437
308
return self.get_revision_history_since(revid_list, date)
468
336
if revid is None:
469
337
revid = self.last_revid
470
338
if file_id is not None:
472
self.get_short_revision_history_by_fileid(file_id))
473
revlist = self.get_revids_from(revlist, revid)
339
# since revid is 'start_revid', possibly should start the path
340
# tracing from revid... FIXME
341
revlist = list(self.get_short_revision_history_by_fileid(file_id))
342
revlist = list(self.get_revids_from(revlist, revid))
475
revlist = self.get_revids_from(None, revid)
344
revlist = list(self.get_revids_from(None, revid))
479
def _iterate_sufficiently(iterable, stop_at, extra_rev_count):
480
"""Return a list of iterable.
482
If extra_rev_count is None, fully consume iterable.
483
Otherwise, stop at 'stop_at' + extra_rev_count.
486
iterate until you find stop_at, then iterate 10 more times.
488
if extra_rev_count is None:
489
return list(iterable)
498
for count, n in enumerate(iterable):
499
if count >= extra_rev_count:
504
def get_view(self, revid, start_revid, file_id, query=None,
505
extra_rev_count=None):
347
def get_view(self, revid, start_revid, file_id, query=None):
507
349
use the URL parameters (revid, start_revid, file_id, and query) to
508
350
determine the revision list we're viewing (start_revid, file_id, query)
621
452
revnol = revno.split(".")
622
453
revnos = ".".join(revnol[:-2])
623
454
revnolast = int(revnol[-1])
455
if d.has_key(revnos):
626
457
if revnolast < m:
627
d[revnos] = (revnolast, revid)
458
d[revnos] = ( revnolast, revid )
629
d[revnos] = (revnolast, revid)
631
return [revid for (_, revid) in d.itervalues()]
633
def add_branch_nicks(self, change):
460
d[revnos] = ( revnolast, revid )
462
return [ d[revnos][1] for revnos in d.keys() ]
464
def get_branch_nicks(self, changes):
635
given a 'change', fill in the branch nicks on all parents and merge
466
given a list of changes from L{get_changes}, fill in the branch nicks
467
on all parents and merge points.
638
469
fetch_set = set()
639
for p in change.parents:
640
fetch_set.add(p.revid)
641
for p in change.merge_points:
642
fetch_set.add(p.revid)
470
for change in changes:
471
for p in change.parents:
472
fetch_set.add(p.revid)
473
for p in change.merge_points:
474
fetch_set.add(p.revid)
643
475
p_changes = self.get_changes(list(fetch_set))
644
476
p_change_dict = dict([(c.revid, c) for c in p_changes])
645
for p in change.parents:
646
if p.revid in p_change_dict:
647
p.branch_nick = p_change_dict[p.revid].branch_nick
649
p.branch_nick = '(missing)'
650
for p in change.merge_points:
651
if p.revid in p_change_dict:
652
p.branch_nick = p_change_dict[p.revid].branch_nick
654
p.branch_nick = '(missing)'
477
for change in changes:
478
# arch-converted branches may not have merged branch info :(
479
for p in change.parents:
480
if p.revid in p_change_dict:
481
p.branch_nick = p_change_dict[p.revid].branch_nick
483
p.branch_nick = '(missing)'
484
for p in change.merge_points:
485
if p.revid in p_change_dict:
486
p.branch_nick = p_change_dict[p.revid].branch_nick
488
p.branch_nick = '(missing)'
656
490
def get_changes(self, revid_list):
657
491
"""Return a list of changes objects for the given revids.
697
527
return [self._change_from_revision(rev) for rev in rev_list]
529
def _get_deltas_for_revisions_with_trees(self, revisions):
530
"""Produce a list of revision deltas.
532
Note that the input is a sequence of REVISIONS, not revision_ids.
533
Trees will be held in memory until the generator exits.
534
Each delta is relative to the revision's lefthand predecessor.
535
(This is copied from bzrlib.)
537
required_trees = set()
538
for revision in revisions:
539
required_trees.add(revision.revid)
540
required_trees.update([p.revid for p in revision.parents[:1]])
541
trees = dict((t.get_revision_id(), t) for
542
t in self._branch.repository.revision_trees(required_trees))
544
self._branch.repository.lock_read()
546
for revision in revisions:
547
if not revision.parents:
548
old_tree = self._branch.repository.revision_tree(
549
bzrlib.revision.NULL_REVISION)
551
old_tree = trees[revision.parents[0].revid]
552
tree = trees[revision.revid]
553
ret.append(tree.changes_from(old_tree))
556
self._branch.repository.unlock()
699
558
def _change_from_revision(self, revision):
701
560
Given a bzrlib Revision, return a processed "change" for use in
563
commit_time = datetime.datetime.fromtimestamp(revision.timestamp)
565
parents = [util.Container(revid=r, revno=self.get_revno(r)) for r in revision.parent_ids]
704
567
message, short_message = clean_message(revision.message)
706
if self._branch_tags is None:
707
self._branch_tags = self._branch.tags.get_reverse_tag_dict()
710
if revision.revision_id in self._branch_tags:
711
# tag.sort_* functions expect (tag, data) pairs, so we generate them,
712
# and then strip them
713
tags = [(t, None) for t in self._branch_tags[revision.revision_id]]
714
sort_func = getattr(tag, 'sort_natural', None)
715
if sort_func is None:
718
sort_func(self._branch, tags)
719
revtags = u', '.join([t[0] for t in tags])
722
570
'revid': revision.revision_id,
723
'date': datetime.datetime.fromtimestamp(revision.timestamp),
724
'utc_date': datetime.datetime.utcfromtimestamp(revision.timestamp),
725
'committer': revision.committer,
726
'authors': revision.get_apparent_authors(),
572
'author': revision.get_apparent_author(),
727
573
'branch_nick': revision.properties.get('branch-nick', None),
728
574
'short_comment': short_message,
729
575
'comment': revision.message,
730
576
'comment_clean': [util.html_clean(s) for s in message],
731
577
'parents': revision.parent_ids,
732
'bugs': [bug.split()[0] for bug in revision.properties.get('bugs', '').splitlines()],
735
if isinstance(revision, bzrlib.foreign.ForeignRevision):
736
foreign_revid, mapping = (
737
revision.foreign_revid, revision.mapping)
738
elif ":" in revision.revision_id:
740
foreign_revid, mapping = \
741
bzrlib.foreign.foreign_vcs_registry.parse_revision_id(
742
revision.revision_id)
743
except bzrlib.errors.InvalidRevisionId:
748
if foreign_revid is not None:
749
entry["foreign_vcs"] = mapping.vcs.abbreviation
750
entry["foreign_revid"] = mapping.vcs.show_foreign_revid(foreign_revid)
751
579
return util.Container(entry)
753
def get_file_changes(self, entry):
755
old_revid = entry.parents[0].revid
581
def get_file_changes_uncached(self, entries):
582
delta_list = self._get_deltas_for_revisions_with_trees(entries)
584
return [self.parse_delta(delta) for delta in delta_list]
586
def get_file_changes(self, entries):
587
if self._file_change_cache is None:
588
return self.get_file_changes_uncached(entries)
757
old_revid = bzrlib.revision.NULL_REVISION
758
return self.file_changes_for_revision_ids(old_revid, entry.revid)
760
def add_changes(self, entry):
761
changes = self.get_file_changes(entry)
762
entry.changes = changes
590
return self._file_change_cache.get_file_changes(entries)
592
def add_changes(self, entries):
593
changes_list = self.get_file_changes(entries)
595
for entry, changes in zip(entries, changes_list):
596
entry.changes = changes
598
def get_change_with_diff(self, revid, compare_revid=None):
599
change = self.get_changes([revid])[0]
601
if compare_revid is None:
603
compare_revid = change.parents[0].revid
605
compare_revid = 'null:'
607
rev_tree1 = self._branch.repository.revision_tree(compare_revid)
608
rev_tree2 = self._branch.repository.revision_tree(revid)
609
delta = rev_tree2.changes_from(rev_tree1)
611
change.changes = self.parse_delta(delta)
612
change.changes.modified = self._parse_diffs(rev_tree1, rev_tree2, delta)
764
616
def get_file(self, file_id, revid):
765
"""Returns (path, filename, file contents)"""
617
"returns (path, filename, data)"
766
618
inv = self.get_inventory(revid)
767
619
inv_entry = inv[file_id]
768
620
rev_tree = self._branch.repository.revision_tree(inv_entry.revision)
771
623
path = '/' + path
772
624
return path, inv_entry.name, rev_tree.get_file_text(file_id)
774
def file_changes_for_revision_ids(self, old_revid, new_revid):
626
def _parse_diffs(self, old_tree, new_tree, delta):
628
Return a list of processed diffs, in the format::
637
type: str('context', 'delete', or 'insert'),
646
for old_path, new_path, fid, kind, text_modified, meta_modified in delta.renamed:
648
process.append((old_path, new_path, fid, kind))
649
for path, fid, kind, text_modified, meta_modified in delta.modified:
650
process.append((path, path, fid, kind))
652
for old_path, new_path, fid, kind in process:
653
old_lines = old_tree.get_file_lines(fid)
654
new_lines = new_tree.get_file_lines(fid)
656
if old_lines != new_lines:
658
bzrlib.diff.internal_diff(old_path, old_lines,
659
new_path, new_lines, buffer)
660
except bzrlib.errors.BinaryFile:
663
diff = buffer.getvalue()
666
out.append(util.Container(filename=rich_filename(new_path, kind), file_id=fid, chunks=self._process_diff(diff), raw_diff=diff))
670
def _process_diff(self, diff):
671
# doesn't really need to be a method; could be static.
674
for line in diff.splitlines():
677
if line.startswith('+++ ') or line.startswith('--- '):
679
if line.startswith('@@ '):
681
if chunk is not None:
683
chunk = util.Container()
685
lines = [int(x.split(',')[0][1:]) for x in line.split(' ')[1:3]]
686
old_lineno = lines[0]
687
new_lineno = lines[1]
688
elif line.startswith(' '):
689
chunk.diff.append(util.Container(old_lineno=old_lineno, new_lineno=new_lineno,
690
type='context', line=util.fixed_width(line[1:])))
693
elif line.startswith('+'):
694
chunk.diff.append(util.Container(old_lineno=None, new_lineno=new_lineno,
695
type='insert', line=util.fixed_width(line[1:])))
697
elif line.startswith('-'):
698
chunk.diff.append(util.Container(old_lineno=old_lineno, new_lineno=None,
699
type='delete', line=util.fixed_width(line[1:])))
702
chunk.diff.append(util.Container(old_lineno=None, new_lineno=None,
703
type='unknown', line=util.fixed_width(repr(line))))
704
if chunk is not None:
708
def parse_delta(self, delta):
776
710
Return a nested data structure containing the changes in a delta::
785
text_changes: list((filename, file_id)),
787
repo = self._branch.repository
788
if (bzrlib.revision.is_null(old_revid) or
789
bzrlib.revision.is_null(new_revid)):
790
old_tree, new_tree = map(
791
repo.revision_tree, [old_revid, new_revid])
793
old_tree, new_tree = repo.revision_trees([old_revid, new_revid])
795
reporter = FileChangeReporter(old_tree.inventory, new_tree.inventory)
797
bzrlib.delta.report_changes(new_tree.iter_changes(old_tree), reporter)
799
return util.Container(
800
added=sorted(reporter.added, key=lambda x: x.filename),
801
renamed=sorted(reporter.renamed, key=lambda x: x.new_filename),
802
removed=sorted(reporter.removed, key=lambda x: x.filename),
803
modified=sorted(reporter.modified, key=lambda x: x.filename),
804
text_changes=sorted(reporter.text_changes,
805
key=lambda x: x.filename))
725
for path, fid, kind in delta.added:
726
added.append((rich_filename(path, kind), fid))
728
for path, fid, kind, text_modified, meta_modified in delta.modified:
729
modified.append(util.Container(filename=rich_filename(path, kind), file_id=fid))
731
for old_path, new_path, fid, kind, text_modified, meta_modified in delta.renamed:
732
renamed.append((rich_filename(old_path, kind), rich_filename(new_path, kind), fid))
733
if meta_modified or text_modified:
734
modified.append(util.Container(filename=rich_filename(new_path, kind), file_id=fid))
736
for path, fid, kind in delta.removed:
737
removed.append((rich_filename(path, kind), fid))
739
return util.Container(added=added, renamed=renamed, removed=removed, modified=modified)
742
def add_side_by_side(changes):
743
# FIXME: this is a rotten API.
744
for change in changes:
745
for m in change.changes.modified:
746
m.sbs_chunks = _make_side_by_side(m.chunks)
748
def get_filelist(self, inv, file_id, sort_type=None):
750
return the list of all files (and their attributes) within a given
754
dir_ie = inv[file_id]
755
path = inv.id2path(file_id)
760
for filename, entry in dir_ie.children.iteritems():
761
revid_set.add(entry.revision)
764
for change in self.get_changes(list(revid_set)):
765
change_dict[change.revid] = change
767
for filename, entry in dir_ie.children.iteritems():
769
if entry.kind == 'directory':
772
revid = entry.revision
774
file = util.Container(
775
filename=filename, executable=entry.executable, kind=entry.kind,
776
pathname=pathname, file_id=entry.file_id, size=entry.text_size,
777
revid=revid, change=change_dict[revid])
778
file_list.append(file)
780
if sort_type == 'filename' or sort_type is None:
781
file_list.sort(key=lambda x: x.filename.lower()) # case-insensitive
782
elif sort_type == 'size':
783
file_list.sort(key=lambda x: x.size)
784
elif sort_type == 'date':
785
file_list.sort(key=lambda x: x.change.date)
787
# Always sort by kind to get directories first
788
file_list.sort(key=lambda x: x.kind != 'directory')
791
for file in file_list:
798
_BADCHARS_RE = re.compile(ur'[\x00-\x08\x0b\x0e-\x1f]')
800
def annotate_file(self, file_id, revid):
805
file_revid = self.get_inventory(revid)[file_id].revision
807
tree = self._branch.repository.revision_tree(file_revid)
810
for line_revid, text in tree.annotate_iter(file_id):
811
revid_set.add(line_revid)
812
if self._BADCHARS_RE.match(text):
813
# bail out; this isn't displayable text
814
yield util.Container(parity=0, lineno=1, status='same',
815
text='(This is a binary file.)',
816
change=util.Container())
818
change_cache = dict([(c.revid, c) \
819
for c in self.get_changes(list(revid_set))])
821
last_line_revid = None
822
for line_revid, text in tree.annotate_iter(file_id):
823
if line_revid == last_line_revid:
824
# remember which lines have a new revno and which don't
829
last_line_revid = line_revid
830
change = change_cache[line_revid]
831
trunc_revno = change.revno
832
if len(trunc_revno) > 10:
833
trunc_revno = trunc_revno[:9] + '...'
835
yield util.Container(parity=parity, lineno=lineno, status=status,
836
change=change, text=util.fixed_width(text))
839
self.log.debug('annotate: %r secs' % (time.time() - z,))