38
from bzrlib import tag
38
from StringIO import StringIO
40
from loggerhead import search
41
from loggerhead import util
42
from loggerhead.wholehistory import compute_whole_history_data
39
45
import bzrlib.branch
41
47
import bzrlib.errors
48
import bzrlib.progress
43
49
import bzrlib.revision
45
from loggerhead import search
46
from loggerhead import util
47
from loggerhead.wholehistory import compute_whole_history_data
48
from bzrlib.export.tar_exporter import export_tarball
53
# bzrlib's UIFactory is not thread-safe
54
uihack = threading.local()
56
class ThreadSafeUIFactory (bzrlib.ui.SilentUIFactory):
57
def nested_progress_bar(self):
58
if getattr(uihack, '_progress_bar_stack', None) is None:
59
uihack._progress_bar_stack = bzrlib.progress.ProgressBarStack(klass=bzrlib.progress.DummyProgress)
60
return uihack._progress_bar_stack.get_nested()
62
bzrlib.ui.ui_factory = ThreadSafeUIFactory()
65
def _process_side_by_side_buffers(line_list, delete_list, insert_list):
66
while len(delete_list) < len(insert_list):
67
delete_list.append((None, '', 'context'))
68
while len(insert_list) < len(delete_list):
69
insert_list.append((None, '', 'context'))
70
while len(delete_list) > 0:
71
d = delete_list.pop(0)
72
i = insert_list.pop(0)
73
line_list.append(util.Container(old_lineno=d[0], new_lineno=i[0],
74
old_line=d[1], new_line=i[1],
75
old_type=d[2], new_type=i[2]))
78
def _make_side_by_side(chunk_list):
80
turn a normal unified-style diff (post-processed by parse_delta) into a
81
side-by-side diff structure. the new structure is::
89
type: str('context' or 'changed'),
94
for chunk in chunk_list:
97
delete_list, insert_list = [], []
98
for line in chunk.diff:
99
# Add <wbr/> every X characters so we can wrap properly
100
wrap_line = re.findall(r'.{%d}|.+$' % 78, line.line)
101
wrap_lines = [util.html_clean(_line) for _line in wrap_line]
102
wrapped_line = wrap_char.join(wrap_lines)
104
if line.type == 'context':
105
if len(delete_list) or len(insert_list):
106
_process_side_by_side_buffers(line_list, delete_list,
108
delete_list, insert_list = [], []
109
line_list.append(util.Container(old_lineno=line.old_lineno,
110
new_lineno=line.new_lineno,
111
old_line=wrapped_line,
112
new_line=wrapped_line,
115
elif line.type == 'delete':
116
delete_list.append((line.old_lineno, wrapped_line, line.type))
117
elif line.type == 'insert':
118
insert_list.append((line.new_lineno, wrapped_line, line.type))
119
if len(delete_list) or len(insert_list):
120
_process_side_by_side_buffers(line_list, delete_list, insert_list)
121
out_chunk_list.append(util.Container(diff=line_list))
122
return out_chunk_list
51
125
def is_branch(folder):
102
178
def __getitem__(self, index):
103
179
"""Get the date of the index'd item"""
104
return datetime.datetime.fromtimestamp(self.repository.get_revision(
105
self.revid_list[index]).timestamp)
180
return datetime.datetime.fromtimestamp(self.repository.get_revision(self.revid_list[index]).timestamp)
107
182
def __len__(self):
108
183
return len(self.revid_list)
110
class FileChangeReporter(object):
112
def __init__(self, old_inv, new_inv):
117
self.text_changes = []
118
self.old_inv = old_inv
119
self.new_inv = new_inv
121
def revid(self, inv, file_id):
123
return inv[file_id].revision
124
except bzrlib.errors.NoSuchId:
127
def report(self, file_id, paths, versioned, renamed, modified,
129
if modified not in ('unchanged', 'kind changed'):
130
if versioned == 'removed':
131
filename = rich_filename(paths[0], kind[0])
133
filename = rich_filename(paths[1], kind[1])
134
self.text_changes.append(util.Container(
135
filename=filename, file_id=file_id,
136
old_revision=self.revid(self.old_inv, file_id),
137
new_revision=self.revid(self.new_inv, file_id)))
138
if versioned == 'added':
139
self.added.append(util.Container(
140
filename=rich_filename(paths[1], kind),
141
file_id=file_id, kind=kind[1]))
142
elif versioned == 'removed':
143
self.removed.append(util.Container(
144
filename=rich_filename(paths[0], kind),
145
file_id=file_id, kind=kind[0]))
147
self.renamed.append(util.Container(
148
old_filename=rich_filename(paths[0], kind[0]),
149
new_filename=rich_filename(paths[1], kind[1]),
151
text_modified=modified == 'modified'))
153
self.modified.append(util.Container(
154
filename=rich_filename(paths[1], kind),
157
# The lru_cache is not thread-safe, so we need a lock around it for
159
rev_info_memory_cache_lock = threading.RLock()
161
class RevInfoMemoryCache(object):
162
"""A store that validates values against the revids they were stored with.
164
We use a unique key for each branch.
166
The reason for not just using the revid as the key is so that when a new
167
value is provided for a branch, we replace the old value used for the
170
There is another implementation of the same interface in
171
loggerhead.changecache.RevInfoDiskCache.
174
def __init__(self, cache):
177
def get(self, key, revid):
178
"""Return the data associated with `key`, subject to a revid check.
180
If a value was stored under `key`, with the same revid, return it.
181
Otherwise return None.
183
rev_info_memory_cache_lock.acquire()
185
cached = self._cache.get(key)
187
rev_info_memory_cache_lock.release()
190
stored_revid, data = cached
191
if revid == stored_revid:
196
def set(self, key, revid, data):
197
"""Store `data` under `key`, to be checked against `revid` on get().
199
rev_info_memory_cache_lock.acquire()
201
self._cache[key] = (revid, data)
203
rev_info_memory_cache_lock.release()
205
# Used to store locks that prevent multiple threads from building a
206
# revision graph for the same branch at the same time, because that can
207
# cause severe performance issues that are so bad that the system seems
209
revision_graph_locks = {}
210
revision_graph_check_lock = threading.Lock()
212
class History(object):
186
class History (object):
213
187
"""Decorate a branch to provide information for rendering.
215
189
History objects are expected to be short lived -- when serving a request
217
191
around it, serve the request, throw the History object away, unlock the
218
192
branch and throw it away.
220
:ivar _file_change_cache: An object that caches information about the
221
files that changed between two revisions.
222
:ivar _rev_info: A list of information about revisions. This is by far
223
the most cryptic data structure in loggerhead. At the top level, it
224
is a list of 3-tuples [(merge-info, where-merged, parents)].
225
`merge-info` is (seq, revid, merge_depth, revno_str, end_of_merge) --
226
like a merged sorted list, but the revno is stringified.
227
`where-merged` is a tuple of revisions that have this revision as a
228
non-lefthand parent. Finally, `parents` is just the usual list of
229
parents of this revision.
230
:ivar _rev_indices: A dictionary mapping each revision id to the index of
231
the information about it in _rev_info.
232
:ivar _revno_revid: A dictionary mapping stringified revnos to revision
194
:ivar _file_change_cache: xx
236
def _load_whole_history_data(self, caches, cache_key):
237
"""Set the attributes relating to the whole history of the branch.
239
:param caches: a list of caches with interfaces like
240
`RevInfoMemoryCache` and be ordered from fastest to slowest.
241
:param cache_key: the key to use with the caches.
243
self._rev_indices = None
244
self._rev_info = None
247
def update_missed_caches():
248
for cache in missed_caches:
249
cache.set(cache_key, self.last_revid, self._rev_info)
251
# Theoretically, it's possible for two threads to race in creating
252
# the Lock() object for their branch, so we put a lock around
253
# creating the per-branch Lock().
254
revision_graph_check_lock.acquire()
256
if cache_key not in revision_graph_locks:
257
revision_graph_locks[cache_key] = threading.Lock()
259
revision_graph_check_lock.release()
261
revision_graph_locks[cache_key].acquire()
264
data = cache.get(cache_key, self.last_revid)
266
self._rev_info = data
267
update_missed_caches()
270
missed_caches.append(cache)
272
whole_history_data = compute_whole_history_data(self._branch)
273
self._rev_info, self._rev_indices = whole_history_data
274
update_missed_caches()
276
revision_graph_locks[cache_key].release()
278
if self._rev_indices is not None:
279
self._revno_revid = {}
280
for ((_, revid, _, revno_str, _), _, _) in self._rev_info:
281
self._revno_revid[revno_str] = revid
283
self._revno_revid = {}
284
self._rev_indices = {}
285
for ((seq, revid, _, revno_str, _), _, _) in self._rev_info:
286
self._rev_indices[revid] = seq
287
self._revno_revid[revno_str] = revid
289
def __init__(self, branch, whole_history_data_cache, file_cache=None,
290
revinfo_disk_cache=None, cache_key=None):
197
def __init__(self, branch, whole_history_data_cache):
291
198
assert branch.is_locked(), (
292
199
"Can only construct a History object with a read-locked branch.")
293
if file_cache is not None:
294
self._file_change_cache = file_cache
295
file_cache.history = self
297
self._file_change_cache = None
200
self._file_change_cache = None
298
201
self._branch = branch
299
self._branch_tags = None
300
202
self._inventory_cache = {}
301
self._branch_nick = self._branch.get_config().get_nickname()
302
self.log = logging.getLogger('loggerhead.%s' % (self._branch_nick,))
203
self.log = logging.getLogger('loggerhead.%s' % (branch.nick,))
304
205
self.last_revid = branch.last_revision()
306
caches = [RevInfoMemoryCache(whole_history_data_cache)]
307
if revinfo_disk_cache:
308
caches.append(revinfo_disk_cache)
309
self._load_whole_history_data(caches, cache_key)
207
whole_history_data = whole_history_data_cache.get(self.last_revid)
208
if whole_history_data is None:
209
whole_history_data = compute_whole_history_data(branch)
210
whole_history_data_cache[self.last_revid] = whole_history_data
212
(self._revision_graph, self._full_history, self._revision_info,
213
self._revno_revid, self._merge_sort, self._where_merged
214
) = whole_history_data
217
def use_file_cache(self, cache):
218
self._file_change_cache = cache
312
221
def has_revisions(self):
329
237
revid in revid_list.
331
239
if revid_list is None:
332
# Just yield the mainline, starting at start_revid
334
is_null = bzrlib.revision.is_null
335
while not is_null(revid):
337
parents = self._rev_info[self._rev_indices[revid]][2]
240
revid_list = self._full_history
342
241
revid_set = set(revid_list)
343
242
revid = start_revid
345
243
def introduced_revisions(revid):
347
seq = self._rev_indices[revid]
348
md = self._rev_info[seq][0][2]
245
seq, revid, md, revno, end_of_merge = self._revision_info[revid]
350
while i < len(self._rev_info) and self._rev_info[i][0][2] > md:
351
r.add(self._rev_info[i][0][1])
247
while i < len(self._merge_sort) and self._merge_sort[i][2] > md:
248
r.add(self._merge_sort[i][1])
355
252
if bzrlib.revision.is_null(revid):
357
rev_introduced = introduced_revisions(revid)
358
matching = rev_introduced.intersection(revid_set)
360
# We don't need to look for these anymore.
361
revid_set.difference_update(matching)
254
if introduced_revisions(revid) & revid_set:
363
parents = self._rev_info[self._rev_indices[revid]][2]
256
parents = self._revision_graph[revid]
364
257
if len(parents) == 0:
366
259
revid = parents[0]
368
261
def get_short_revision_history_by_fileid(self, file_id):
369
262
# FIXME: would be awesome if we could get, for a folder, the list of
370
263
# revisions where items within that folder changed.i
371
possible_keys = [(file_id, revid) for revid in self._rev_indices]
372
get_parent_map = self._branch.repository.texts.get_parent_map
373
# We chunk the requests as this works better with GraphIndex.
374
# See _filter_revisions_touching_file_id in bzrlib/log.py
375
# for more information.
378
for start in xrange(0, len(possible_keys), chunk_size):
379
next_keys = possible_keys[start:start + chunk_size]
380
revids += [k[1] for k in get_parent_map(next_keys)]
381
del possible_keys, next_keys
265
# FIXME: Workaround for bzr versions prior to 1.6b3.
266
# Remove me eventually pretty please :)
267
w = self._branch.repository.weave_store.get_weave(file_id, self._branch.repository.get_transaction())
268
w_revids = w.versions()
269
revids = [r for r in self._full_history if r in w_revids]
270
except AttributeError:
271
possible_keys = [(file_id, revid) for revid in self._full_history]
272
existing_keys = self._branch.repository.texts.get_parent_map(possible_keys)
273
revids = [revid for _, revid in existing_keys.iterkeys()]
384
276
def get_revision_history_since(self, revid_list, date):
385
277
# if a user asks for revisions starting at 01-sep, they mean inclusive,
386
278
# so start at midnight on 02-sep.
387
279
date = date + datetime.timedelta(days=1)
388
# our revid list is sorted in REVERSE date order,
389
# so go thru some hoops here...
280
# our revid list is sorted in REVERSE date order, so go thru some hoops here...
390
281
revid_list.reverse()
391
index = bisect.bisect(_RevListToTimestamps(revid_list,
392
self._branch.repository),
282
index = bisect.bisect(_RevListToTimestamps(revid_list, self._branch.repository), date)
396
285
revid_list.reverse()
412
300
# all the relevant changes (time-consuming) only to return a list of
413
301
# revids which will be used to fetch a set of changes again.
415
# if they entered a revid, just jump straight there;
416
# ignore the passed-in revid_list
303
# if they entered a revid, just jump straight there; ignore the passed-in revid_list
417
304
revid = self.fix_revid(query)
418
305
if revid is not None:
419
306
if isinstance(revid, unicode):
420
307
revid = revid.encode('utf-8')
421
changes = self.get_changes([revid])
308
changes = self.get_changes([ revid ])
422
309
if (changes is not None) and (len(changes) > 0):
426
313
m = self.us_date_re.match(query)
427
314
if m is not None:
428
date = datetime.datetime(util.fix_year(int(m.group(3))),
315
date = datetime.datetime(util.fix_year(int(m.group(3))), int(m.group(1)), int(m.group(2)))
432
317
m = self.earth_date_re.match(query)
433
318
if m is not None:
434
date = datetime.datetime(util.fix_year(int(m.group(3))),
319
date = datetime.datetime(util.fix_year(int(m.group(3))), int(m.group(2)), int(m.group(1)))
438
321
m = self.iso_date_re.match(query)
439
322
if m is not None:
440
date = datetime.datetime(util.fix_year(int(m.group(1))),
323
date = datetime.datetime(util.fix_year(int(m.group(1))), int(m.group(2)), int(m.group(3)))
443
324
if date is not None:
444
325
if revid_list is None:
445
# if no limit to the query was given,
446
# search only the direct-parent path.
326
# if no limit to the query was given, search only the direct-parent path.
447
327
revid_list = list(self.get_revids_from(None, self.last_revid))
448
328
return self.get_revision_history_since(revid_list, date)
479
356
if revid is None:
480
357
revid = self.last_revid
481
358
if file_id is not None:
483
self.get_short_revision_history_by_fileid(file_id))
484
revlist = self.get_revids_from(revlist, revid)
359
# since revid is 'start_revid', possibly should start the path
360
# tracing from revid... FIXME
361
revlist = list(self.get_short_revision_history_by_fileid(file_id))
362
revlist = list(self.get_revids_from(revlist, revid))
486
revlist = self.get_revids_from(None, revid)
364
revlist = list(self.get_revids_from(None, revid))
490
def _iterate_sufficiently(iterable, stop_at, extra_rev_count):
491
"""Return a list of iterable.
493
If extra_rev_count is None, fully consume iterable.
494
Otherwise, stop at 'stop_at' + extra_rev_count.
497
iterate until you find stop_at, then iterate 10 more times.
499
if extra_rev_count is None:
500
return list(iterable)
509
for count, n in enumerate(iterable):
510
if count >= extra_rev_count:
515
def get_view(self, revid, start_revid, file_id, query=None,
516
extra_rev_count=None):
367
def get_view(self, revid, start_revid, file_id, query=None):
518
369
use the URL parameters (revid, start_revid, file_id, and query) to
519
370
determine the revision list we're viewing (start_revid, file_id, query)
632
475
revnol = revno.split(".")
633
476
revnos = ".".join(revnol[:-2])
634
477
revnolast = int(revnol[-1])
478
if d.has_key(revnos):
637
480
if revnolast < m:
638
d[revnos] = (revnolast, revid)
481
d[revnos] = ( revnolast, revid )
640
d[revnos] = (revnolast, revid)
642
return [revid for (_, revid) in d.itervalues()]
644
def add_branch_nicks(self, change):
483
d[revnos] = ( revnolast, revid )
485
return [ d[revnos][1] for revnos in d.keys() ]
487
def get_branch_nicks(self, changes):
646
given a 'change', fill in the branch nicks on all parents and merge
489
given a list of changes from L{get_changes}, fill in the branch nicks
490
on all parents and merge points.
649
492
fetch_set = set()
650
for p in change.parents:
651
fetch_set.add(p.revid)
652
for p in change.merge_points:
653
fetch_set.add(p.revid)
493
for change in changes:
494
for p in change.parents:
495
fetch_set.add(p.revid)
496
for p in change.merge_points:
497
fetch_set.add(p.revid)
654
498
p_changes = self.get_changes(list(fetch_set))
655
499
p_change_dict = dict([(c.revid, c) for c in p_changes])
656
for p in change.parents:
657
if p.revid in p_change_dict:
658
p.branch_nick = p_change_dict[p.revid].branch_nick
660
p.branch_nick = '(missing)'
661
for p in change.merge_points:
662
if p.revid in p_change_dict:
663
p.branch_nick = p_change_dict[p.revid].branch_nick
665
p.branch_nick = '(missing)'
500
for change in changes:
501
# arch-converted branches may not have merged branch info :(
502
for p in change.parents:
503
if p.revid in p_change_dict:
504
p.branch_nick = p_change_dict[p.revid].branch_nick
506
p.branch_nick = '(missing)'
507
for p in change.merge_points:
508
if p.revid in p_change_dict:
509
p.branch_nick = p_change_dict[p.revid].branch_nick
511
p.branch_nick = '(missing)'
667
513
def get_changes(self, revid_list):
668
514
"""Return a list of changes objects for the given revids.
708
550
return [self._change_from_revision(rev) for rev in rev_list]
552
def _get_deltas_for_revisions_with_trees(self, revisions):
553
"""Produce a list of revision deltas.
555
Note that the input is a sequence of REVISIONS, not revision_ids.
556
Trees will be held in memory until the generator exits.
557
Each delta is relative to the revision's lefthand predecessor.
558
(This is copied from bzrlib.)
560
required_trees = set()
561
for revision in revisions:
562
required_trees.add(revision.revid)
563
required_trees.update([p.revid for p in revision.parents[:1]])
564
trees = dict((t.get_revision_id(), t) for
565
t in self._branch.repository.revision_trees(required_trees))
567
for revision in revisions:
568
if not revision.parents:
569
old_tree = self._branch.repository.revision_tree(
570
bzrlib.revision.NULL_REVISION)
572
old_tree = trees[revision.parents[0].revid]
573
tree = trees[revision.revid]
574
ret.append(tree.changes_from(old_tree))
710
577
def _change_from_revision(self, revision):
712
579
Given a bzrlib Revision, return a processed "change" for use in
582
commit_time = datetime.datetime.fromtimestamp(revision.timestamp)
584
parents = [util.Container(revid=r, revno=self.get_revno(r)) for r in revision.parent_ids]
715
586
message, short_message = clean_message(revision.message)
717
if self._branch_tags is None:
718
self._branch_tags = self._branch.tags.get_reverse_tag_dict()
721
if revision.revision_id in self._branch_tags:
722
# tag.sort_* functions expect (tag, data) pairs, so we generate them,
723
# and then strip them
724
tags = [(t, None) for t in self._branch_tags[revision.revision_id]]
725
sort_func = getattr(tag, 'sort_natural', None)
726
if sort_func is None:
729
sort_func(self._branch, tags)
730
revtags = u', '.join([t[0] for t in tags])
733
589
'revid': revision.revision_id,
734
'date': datetime.datetime.fromtimestamp(revision.timestamp),
735
'utc_date': datetime.datetime.utcfromtimestamp(revision.timestamp),
736
'committer': revision.committer,
737
'authors': revision.get_apparent_authors(),
591
'author': revision.get_apparent_author(),
738
592
'branch_nick': revision.properties.get('branch-nick', None),
739
593
'short_comment': short_message,
740
594
'comment': revision.message,
741
595
'comment_clean': [util.html_clean(s) for s in message],
742
596
'parents': revision.parent_ids,
743
'bugs': [bug.split()[0] for bug in revision.properties.get('bugs', '').splitlines()],
746
if isinstance(revision, bzrlib.foreign.ForeignRevision):
747
foreign_revid, mapping = (
748
revision.foreign_revid, revision.mapping)
749
elif ":" in revision.revision_id:
751
foreign_revid, mapping = \
752
bzrlib.foreign.foreign_vcs_registry.parse_revision_id(
753
revision.revision_id)
754
except bzrlib.errors.InvalidRevisionId:
759
if foreign_revid is not None:
760
entry["foreign_vcs"] = mapping.vcs.abbreviation
761
entry["foreign_revid"] = mapping.vcs.show_foreign_revid(foreign_revid)
762
598
return util.Container(entry)
764
def get_file_changes_uncached(self, entry):
766
old_revid = entry.parents[0].revid
768
old_revid = bzrlib.revision.NULL_REVISION
769
return self.file_changes_for_revision_ids(old_revid, entry.revid)
771
def get_file_changes(self, entry):
600
def get_file_changes_uncached(self, entries):
601
delta_list = self._get_deltas_for_revisions_with_trees(entries)
603
return [self.parse_delta(delta) for delta in delta_list]
605
def get_file_changes(self, entries):
772
606
if self._file_change_cache is None:
773
return self.get_file_changes_uncached(entry)
607
return self.get_file_changes_uncached(entries)
775
return self._file_change_cache.get_file_changes(entry)
777
def add_changes(self, entry):
778
changes = self.get_file_changes(entry)
779
entry.changes = changes
609
return self._file_change_cache.get_file_changes(entries)
611
def add_changes(self, entries):
612
changes_list = self.get_file_changes(entries)
614
for entry, changes in zip(entries, changes_list):
615
entry.changes = changes
617
def get_change_with_diff(self, revid, compare_revid=None):
618
change = self.get_changes([revid])[0]
620
if compare_revid is None:
622
compare_revid = change.parents[0].revid
624
compare_revid = 'null:'
626
rev_tree1 = self._branch.repository.revision_tree(compare_revid)
627
rev_tree2 = self._branch.repository.revision_tree(revid)
628
delta = rev_tree2.changes_from(rev_tree1)
630
change.changes = self.parse_delta(delta)
631
change.changes.modified = self._parse_diffs(rev_tree1, rev_tree2, delta)
781
635
def get_file(self, file_id, revid):
782
"""Returns (path, filename, file contents)"""
636
"returns (path, filename, data)"
783
637
inv = self.get_inventory(revid)
784
638
inv_entry = inv[file_id]
785
639
rev_tree = self._branch.repository.revision_tree(inv_entry.revision)
788
642
path = '/' + path
789
643
return path, inv_entry.name, rev_tree.get_file_text(file_id)
791
def file_changes_for_revision_ids(self, old_revid, new_revid):
645
def _parse_diffs(self, old_tree, new_tree, delta):
647
Return a list of processed diffs, in the format::
656
type: str('context', 'delete', or 'insert'),
665
for old_path, new_path, fid, kind, text_modified, meta_modified in delta.renamed:
667
process.append((old_path, new_path, fid, kind))
668
for path, fid, kind, text_modified, meta_modified in delta.modified:
669
process.append((path, path, fid, kind))
671
for old_path, new_path, fid, kind in process:
672
old_lines = old_tree.get_file_lines(fid)
673
new_lines = new_tree.get_file_lines(fid)
675
if old_lines != new_lines:
677
bzrlib.diff.internal_diff(old_path, old_lines,
678
new_path, new_lines, buffer)
679
except bzrlib.errors.BinaryFile:
682
diff = buffer.getvalue()
685
out.append(util.Container(filename=rich_filename(new_path, kind), file_id=fid, chunks=self._process_diff(diff), raw_diff=diff))
689
def _process_diff(self, diff):
690
# doesn't really need to be a method; could be static.
693
for line in diff.splitlines():
696
if line.startswith('+++ ') or line.startswith('--- '):
698
if line.startswith('@@ '):
700
if chunk is not None:
702
chunk = util.Container()
704
lines = [int(x.split(',')[0][1:]) for x in line.split(' ')[1:3]]
705
old_lineno = lines[0]
706
new_lineno = lines[1]
707
elif line.startswith(' '):
708
chunk.diff.append(util.Container(old_lineno=old_lineno,
709
new_lineno=new_lineno,
714
elif line.startswith('+'):
715
chunk.diff.append(util.Container(old_lineno=None,
716
new_lineno=new_lineno,
717
type='insert', line=line[1:]))
719
elif line.startswith('-'):
720
chunk.diff.append(util.Container(old_lineno=old_lineno,
722
type='delete', line=line[1:]))
725
chunk.diff.append(util.Container(old_lineno=None,
729
if chunk is not None:
733
def parse_delta(self, delta):
793
735
Return a nested data structure containing the changes in a delta::
802
text_changes: list((filename, file_id)),
804
repo = self._branch.repository
805
if (bzrlib.revision.is_null(old_revid) or
806
bzrlib.revision.is_null(new_revid)):
807
old_tree, new_tree = map(
808
repo.revision_tree, [old_revid, new_revid])
810
old_tree, new_tree = repo.revision_trees([old_revid, new_revid])
812
reporter = FileChangeReporter(old_tree.inventory, new_tree.inventory)
814
bzrlib.delta.report_changes(new_tree.iter_changes(old_tree), reporter)
816
return util.Container(
817
added=sorted(reporter.added, key=lambda x: x.filename),
818
renamed=sorted(reporter.renamed, key=lambda x: x.new_filename),
819
removed=sorted(reporter.removed, key=lambda x: x.filename),
820
modified=sorted(reporter.modified, key=lambda x: x.filename),
821
text_changes=sorted(reporter.text_changes,
822
key=lambda x: x.filename))
750
for path, fid, kind in delta.added:
751
added.append((rich_filename(path, kind), fid))
753
for path, fid, kind, text_modified, meta_modified in delta.modified:
754
modified.append(util.Container(filename=rich_filename(path, kind), file_id=fid))
756
for old_path, new_path, fid, kind, text_modified, meta_modified in delta.renamed:
757
renamed.append((rich_filename(old_path, kind), rich_filename(new_path, kind), fid))
758
if meta_modified or text_modified:
759
modified.append(util.Container(filename=rich_filename(new_path, kind), file_id=fid))
761
for path, fid, kind in delta.removed:
762
removed.append((rich_filename(path, kind), fid))
764
return util.Container(added=added, renamed=renamed, removed=removed, modified=modified)
767
def add_side_by_side(changes):
768
# FIXME: this is a rotten API.
769
for change in changes:
770
for m in change.changes.modified:
771
m.sbs_chunks = _make_side_by_side(m.chunks)
773
def get_filelist(self, inv, file_id, sort_type=None):
775
return the list of all files (and their attributes) within a given
779
dir_ie = inv[file_id]
780
path = inv.id2path(file_id)
785
for filename, entry in dir_ie.children.iteritems():
786
revid_set.add(entry.revision)
789
for change in self.get_changes(list(revid_set)):
790
change_dict[change.revid] = change
792
for filename, entry in dir_ie.children.iteritems():
794
if entry.kind == 'directory':
797
revid = entry.revision
799
file = util.Container(
800
filename=filename, executable=entry.executable, kind=entry.kind,
801
pathname=pathname, file_id=entry.file_id, size=entry.text_size,
802
revid=revid, change=change_dict[revid])
803
file_list.append(file)
805
if sort_type == 'filename' or sort_type is None:
806
file_list.sort(key=lambda x: x.filename.lower()) # case-insensitive
807
elif sort_type == 'size':
808
file_list.sort(key=lambda x: x.size)
809
elif sort_type == 'date':
810
file_list.sort(key=lambda x: x.change.date)
812
# Always sort by kind to get directories first
813
file_list.sort(key=lambda x: x.kind != 'directory')
816
for file in file_list:
823
_BADCHARS_RE = re.compile(ur'[\x00-\x08\x0b\x0e-\x1f]')
825
def annotate_file(self, file_id, revid):
830
file_revid = self.get_inventory(revid)[file_id].revision
832
tree = self._branch.repository.revision_tree(file_revid)
835
for line_revid, text in tree.annotate_iter(file_id):
836
revid_set.add(line_revid)
837
if self._BADCHARS_RE.match(text):
838
# bail out; this isn't displayable text
839
yield util.Container(parity=0, lineno=1, status='same',
840
text='(This is a binary file.)',
841
change=util.Container())
843
change_cache = dict([(c.revid, c) \
844
for c in self.get_changes(list(revid_set))])
846
last_line_revid = None
847
for line_revid, text in tree.annotate_iter(file_id):
848
if line_revid == last_line_revid:
849
# remember which lines have a new revno and which don't
854
last_line_revid = line_revid
855
change = change_cache[line_revid]
856
trunc_revno = change.revno
857
if len(trunc_revno) > 10:
858
trunc_revno = trunc_revno[:9] + '...'
860
yield util.Container(parity=parity, lineno=lineno, status=status,
861
change=change, text=util.fixed_width(text))
864
self.log.debug('annotate: %r secs' % (time.time() - z,))