45
46
import bzrlib.branch
47
49
import bzrlib.errors
48
50
import bzrlib.progress
49
51
import bzrlib.revision
52
import bzrlib.textfile
50
53
import bzrlib.tsort
53
56
# bzrlib's UIFactory is not thread-safe
54
57
uihack = threading.local()
56
60
class ThreadSafeUIFactory (bzrlib.ui.SilentUIFactory):
57
62
def nested_progress_bar(self):
58
63
if getattr(uihack, '_progress_bar_stack', None) is None:
59
uihack._progress_bar_stack = bzrlib.progress.ProgressBarStack(klass=bzrlib.progress.DummyProgress)
64
pbs = bzrlib.progress.ProgressBarStack(
65
klass=bzrlib.progress.DummyProgress)
66
uihack._progress_bar_stack = pbs
60
67
return uihack._progress_bar_stack.get_nested()
62
69
bzrlib.ui.ui_factory = ThreadSafeUIFactory()
65
def _process_side_by_side_buffers(line_list, delete_list, insert_list):
66
while len(delete_list) < len(insert_list):
67
delete_list.append((None, '', 'context'))
68
while len(insert_list) < len(delete_list):
69
insert_list.append((None, '', 'context'))
70
while len(delete_list) > 0:
71
d = delete_list.pop(0)
72
i = insert_list.pop(0)
73
line_list.append(util.Container(old_lineno=d[0], new_lineno=i[0],
74
old_line=d[1], new_line=i[1],
75
old_type=d[2], new_type=i[2]))
78
def _make_side_by_side(chunk_list):
80
turn a normal unified-style diff (post-processed by parse_delta) into a
81
side-by-side diff structure. the new structure is::
89
type: str('context' or 'changed'),
94
for chunk in chunk_list:
97
delete_list, insert_list = [], []
98
for line in chunk.diff:
99
# Add <wbr/> every X characters so we can wrap properly
100
wrap_line = re.findall(r'.{%d}|.+$' % 78, line.line)
101
wrap_lines = [util.html_clean(_line) for _line in wrap_line]
102
wrapped_line = wrap_char.join(wrap_lines)
104
if line.type == 'context':
105
if len(delete_list) or len(insert_list):
106
_process_side_by_side_buffers(line_list, delete_list,
108
delete_list, insert_list = [], []
109
line_list.append(util.Container(old_lineno=line.old_lineno,
110
new_lineno=line.new_lineno,
111
old_line=wrapped_line,
112
new_line=wrapped_line,
115
elif line.type == 'delete':
116
delete_list.append((line.old_lineno, wrapped_line, line.type))
117
elif line.type == 'insert':
118
insert_list.append((line.new_lineno, wrapped_line, line.type))
119
if len(delete_list) or len(insert_list):
120
_process_side_by_side_buffers(line_list, delete_list, insert_list)
121
out_chunk_list.append(util.Container(diff=line_list))
122
return out_chunk_list
125
71
def is_branch(folder):
127
73
bzrlib.branch.Branch.open(folder)
178
125
def __getitem__(self, index):
179
126
"""Get the date of the index'd item"""
180
return datetime.datetime.fromtimestamp(self.repository.get_revision(self.revid_list[index]).timestamp)
127
return datetime.datetime.fromtimestamp(self.repository.get_revision(
128
self.revid_list[index]).timestamp)
182
130
def __len__(self):
183
131
return len(self.revid_list)
133
class FileChangeReporter(object):
134
def __init__(self, old_inv, new_inv):
139
self.text_changes = []
140
self.old_inv = old_inv
141
self.new_inv = new_inv
143
def revid(self, inv, file_id):
145
return inv[file_id].revision
146
except bzrlib.errors.NoSuchId:
149
def report(self, file_id, paths, versioned, renamed, modified,
151
if modified not in ('unchanged', 'kind changed'):
152
if versioned == 'removed':
153
filename = rich_filename(paths[0], kind[0])
155
filename = rich_filename(paths[1], kind[1])
156
self.text_changes.append(util.Container(
157
filename=filename, file_id=file_id,
158
old_revision=self.revid(self.old_inv, file_id),
159
new_revision=self.revid(self.new_inv, file_id)))
160
if versioned == 'added':
161
self.added.append(util.Container(
162
filename=rich_filename(paths[1], kind),
163
file_id=file_id, kind=kind[1]))
164
elif versioned == 'removed':
165
self.removed.append(util.Container(
166
filename=rich_filename(paths[0], kind),
167
file_id=file_id, kind=kind[0]))
169
self.renamed.append(util.Container(
170
old_filename=rich_filename(paths[0], kind[0]),
171
new_filename=rich_filename(paths[1], kind[1]),
173
text_modified=modified == 'modified'))
175
self.modified.append(util.Container(
176
filename=rich_filename(paths[1], kind),
186
180
class History (object):
187
181
"""Decorate a branch to provide information for rendering.
262
258
# FIXME: would be awesome if we could get, for a folder, the list of
263
259
# revisions where items within that folder changed.i
265
# FIXME: Workaround for bzr versions prior to 1.6b3.
261
# FIXME: Workaround for bzr versions prior to 1.6b3.
266
262
# Remove me eventually pretty please :)
267
w = self._branch.repository.weave_store.get_weave(file_id, self._branch.repository.get_transaction())
268
w_revids = w.versions()
269
revids = [r for r in self._full_history if r in w_revids]
263
w = self._branch.repository.weave_store.get_weave(
264
file_id, self._branch.repository.get_transaction())
265
w_revids = w.versions()
266
revids = [r for r in self._full_history if r in w_revids]
270
267
except AttributeError:
271
268
possible_keys = [(file_id, revid) for revid in self._full_history]
272
existing_keys = self._branch.repository.texts.get_parent_map(possible_keys)
273
revids = [revid for _, revid in existing_keys.iterkeys()]
269
get_parent_map = self._branch.repository.texts.get_parent_map
270
# We chunk the requests as this works better with GraphIndex.
271
# See _filter_revisions_touching_file_id in bzrlib/log.py
272
# for more information.
275
for start in xrange(0, len(possible_keys), chunk_size):
276
next_keys = possible_keys[start:start + chunk_size]
277
revids += [k[1] for k in get_parent_map(next_keys)]
278
del possible_keys, next_keys
276
281
def get_revision_history_since(self, revid_list, date):
277
282
# if a user asks for revisions starting at 01-sep, they mean inclusive,
278
283
# so start at midnight on 02-sep.
279
284
date = date + datetime.timedelta(days=1)
280
# our revid list is sorted in REVERSE date order, so go thru some hoops here...
285
# our revid list is sorted in REVERSE date order,
286
# so go thru some hoops here...
281
287
revid_list.reverse()
282
index = bisect.bisect(_RevListToTimestamps(revid_list, self._branch.repository), date)
288
index = bisect.bisect(_RevListToTimestamps(revid_list,
289
self._branch.repository),
285
293
revid_list.reverse()
300
309
# all the relevant changes (time-consuming) only to return a list of
301
310
# revids which will be used to fetch a set of changes again.
303
# if they entered a revid, just jump straight there; ignore the passed-in revid_list
312
# if they entered a revid, just jump straight there;
313
# ignore the passed-in revid_list
304
314
revid = self.fix_revid(query)
305
315
if revid is not None:
306
316
if isinstance(revid, unicode):
307
317
revid = revid.encode('utf-8')
308
changes = self.get_changes([ revid ])
318
changes = self.get_changes([revid])
309
319
if (changes is not None) and (len(changes) > 0):
313
323
m = self.us_date_re.match(query)
314
324
if m is not None:
315
date = datetime.datetime(util.fix_year(int(m.group(3))), int(m.group(1)), int(m.group(2)))
325
date = datetime.datetime(util.fix_year(int(m.group(3))),
317
329
m = self.earth_date_re.match(query)
318
330
if m is not None:
319
date = datetime.datetime(util.fix_year(int(m.group(3))), int(m.group(2)), int(m.group(1)))
331
date = datetime.datetime(util.fix_year(int(m.group(3))),
321
335
m = self.iso_date_re.match(query)
322
336
if m is not None:
323
date = datetime.datetime(util.fix_year(int(m.group(1))), int(m.group(2)), int(m.group(3)))
337
date = datetime.datetime(util.fix_year(int(m.group(1))),
324
340
if date is not None:
325
341
if revid_list is None:
326
# if no limit to the query was given, search only the direct-parent path.
342
# if no limit to the query was given,
343
# search only the direct-parent path.
327
344
revid_list = list(self.get_revids_from(None, self.last_revid))
328
345
return self.get_revision_history_since(revid_list, date)
478
495
revnol = revno.split(".")
479
496
revnos = ".".join(revnol[:-2])
480
497
revnolast = int(revnol[-1])
481
if d.has_key(revnos):
498
if revnos in d.keys():
483
500
if revnolast < m:
484
d[revnos] = ( revnolast, revid )
501
d[revnos] = (revnolast, revid)
486
d[revnos] = ( revnolast, revid )
488
return [ d[revnos][1] for revnos in d.keys() ]
490
def get_branch_nicks(self, changes):
503
d[revnos] = (revnolast, revid)
505
return [d[revnos][1] for revnos in d.keys()]
507
def add_branch_nicks(self, change):
492
given a list of changes from L{get_changes}, fill in the branch nicks
493
on all parents and merge points.
509
given a 'change', fill in the branch nicks on all parents and merge
495
512
fetch_set = set()
496
for change in changes:
497
for p in change.parents:
498
fetch_set.add(p.revid)
499
for p in change.merge_points:
500
fetch_set.add(p.revid)
513
for p in change.parents:
514
fetch_set.add(p.revid)
515
for p in change.merge_points:
516
fetch_set.add(p.revid)
501
517
p_changes = self.get_changes(list(fetch_set))
502
518
p_change_dict = dict([(c.revid, c) for c in p_changes])
503
for change in changes:
504
# arch-converted branches may not have merged branch info :(
505
for p in change.parents:
506
if p.revid in p_change_dict:
507
p.branch_nick = p_change_dict[p.revid].branch_nick
509
p.branch_nick = '(missing)'
510
for p in change.merge_points:
511
if p.revid in p_change_dict:
512
p.branch_nick = p_change_dict[p.revid].branch_nick
514
p.branch_nick = '(missing)'
519
for p in change.parents:
520
if p.revid in p_change_dict:
521
p.branch_nick = p_change_dict[p.revid].branch_nick
523
p.branch_nick = '(missing)'
524
for p in change.merge_points:
525
if p.revid in p_change_dict:
526
p.branch_nick = p_change_dict[p.revid].branch_nick
528
p.branch_nick = '(missing)'
516
530
def get_changes(self, revid_list):
517
531
"""Return a list of changes objects for the given revids.
553
571
return [self._change_from_revision(rev) for rev in rev_list]
555
def _get_deltas_for_revisions_with_trees(self, revisions):
556
"""Produce a list of revision deltas.
558
Note that the input is a sequence of REVISIONS, not revision_ids.
559
Trees will be held in memory until the generator exits.
560
Each delta is relative to the revision's lefthand predecessor.
561
(This is copied from bzrlib.)
563
required_trees = set()
564
for revision in revisions:
565
required_trees.add(revision.revid)
566
required_trees.update([p.revid for p in revision.parents[:1]])
567
trees = dict((t.get_revision_id(), t) for
568
t in self._branch.repository.revision_trees(required_trees))
570
for revision in revisions:
571
if not revision.parents:
572
old_tree = self._branch.repository.revision_tree(
573
bzrlib.revision.NULL_REVISION)
575
old_tree = trees[revision.parents[0].revid]
576
tree = trees[revision.revid]
577
ret.append(tree.changes_from(old_tree))
580
573
def _change_from_revision(self, revision):
582
575
Given a bzrlib Revision, return a processed "change" for use in
601
600
return util.Container(entry)
603
def get_file_changes_uncached(self, entries):
604
delta_list = self._get_deltas_for_revisions_with_trees(entries)
606
return [self.parse_delta(delta) for delta in delta_list]
608
def get_file_changes(self, entries):
602
def get_file_changes_uncached(self, entry):
603
repo = self._branch.repository
605
old_revid = entry.parents[0].revid
607
old_revid = bzrlib.revision.NULL_REVISION
608
return self.file_changes_for_revision_ids(old_revid, entry.revid)
610
def get_file_changes(self, entry):
609
611
if self._file_change_cache is None:
610
return self.get_file_changes_uncached(entries)
612
return self.get_file_changes_uncached(entry)
612
return self._file_change_cache.get_file_changes(entries)
614
def add_changes(self, entries):
615
changes_list = self.get_file_changes(entries)
617
for entry, changes in zip(entries, changes_list):
618
entry.changes = changes
620
def get_change_with_diff(self, revid, compare_revid=None):
621
change = self.get_changes([revid])[0]
623
if compare_revid is None:
625
compare_revid = change.parents[0].revid
627
compare_revid = 'null:'
629
rev_tree1 = self._branch.repository.revision_tree(compare_revid)
630
rev_tree2 = self._branch.repository.revision_tree(revid)
631
delta = rev_tree2.changes_from(rev_tree1)
633
change.changes = self.parse_delta(delta)
634
change.changes.modified = self._parse_diffs(rev_tree1, rev_tree2, delta)
614
return self._file_change_cache.get_file_changes(entry)
616
def add_changes(self, entry):
617
changes = self.get_file_changes(entry)
618
entry.changes = changes
638
620
def get_file(self, file_id, revid):
639
621
"returns (path, filename, data)"
645
627
path = '/' + path
646
628
return path, inv_entry.name, rev_tree.get_file_text(file_id)
648
def _parse_diffs(self, old_tree, new_tree, delta):
650
Return a list of processed diffs, in the format::
659
type: str('context', 'delete', or 'insert'),
668
for old_path, new_path, fid, kind, text_modified, meta_modified in delta.renamed:
670
process.append((old_path, new_path, fid, kind))
671
for path, fid, kind, text_modified, meta_modified in delta.modified:
672
process.append((path, path, fid, kind))
674
for old_path, new_path, fid, kind in process:
675
old_lines = old_tree.get_file_lines(fid)
676
new_lines = new_tree.get_file_lines(fid)
678
if old_lines != new_lines:
680
bzrlib.diff.internal_diff(old_path, old_lines,
681
new_path, new_lines, buffer)
682
except bzrlib.errors.BinaryFile:
685
diff = buffer.getvalue()
688
out.append(util.Container(filename=rich_filename(new_path, kind), file_id=fid, chunks=self._process_diff(diff), raw_diff=diff))
692
def _process_diff(self, diff):
693
# doesn't really need to be a method; could be static.
696
for line in diff.splitlines():
699
if line.startswith('+++ ') or line.startswith('--- '):
701
if line.startswith('@@ '):
703
if chunk is not None:
705
chunk = util.Container()
707
lines = [int(x.split(',')[0][1:]) for x in line.split(' ')[1:3]]
708
old_lineno = lines[0]
709
new_lineno = lines[1]
710
elif line.startswith(' '):
711
chunk.diff.append(util.Container(old_lineno=old_lineno,
712
new_lineno=new_lineno,
717
elif line.startswith('+'):
718
chunk.diff.append(util.Container(old_lineno=None,
719
new_lineno=new_lineno,
720
type='insert', line=line[1:]))
722
elif line.startswith('-'):
723
chunk.diff.append(util.Container(old_lineno=old_lineno,
725
type='delete', line=line[1:]))
728
chunk.diff.append(util.Container(old_lineno=None,
732
if chunk is not None:
736
def parse_delta(self, delta):
630
def file_changes_for_revision_ids(self, old_revid, new_revid):
738
632
Return a nested data structure containing the changes in a delta::
753
for path, fid, kind in delta.added:
754
added.append((rich_filename(path, kind), fid))
756
for path, fid, kind, text_modified, meta_modified in delta.modified:
757
modified.append(util.Container(filename=rich_filename(path, kind), file_id=fid))
759
for old_path, new_path, fid, kind, text_modified, meta_modified in delta.renamed:
760
renamed.append((rich_filename(old_path, kind), rich_filename(new_path, kind), fid))
761
if meta_modified or text_modified:
762
modified.append(util.Container(filename=rich_filename(new_path, kind), file_id=fid))
764
for path, fid, kind in delta.removed:
765
removed.append((rich_filename(path, kind), fid))
767
return util.Container(added=added, renamed=renamed, removed=removed, modified=modified)
770
def add_side_by_side(changes):
771
# FIXME: this is a rotten API.
772
for change in changes:
773
for m in change.changes.modified:
774
m.sbs_chunks = _make_side_by_side(m.chunks)
776
def get_filelist(self, inv, file_id, sort_type=None):
778
return the list of all files (and their attributes) within a given
782
dir_ie = inv[file_id]
783
path = inv.id2path(file_id)
788
for filename, entry in dir_ie.children.iteritems():
789
revid_set.add(entry.revision)
792
for change in self.get_changes(list(revid_set)):
793
change_dict[change.revid] = change
795
for filename, entry in dir_ie.children.iteritems():
797
if entry.kind == 'directory':
800
revid = entry.revision
802
file = util.Container(
803
filename=filename, executable=entry.executable, kind=entry.kind,
804
pathname=pathname, file_id=entry.file_id, size=entry.text_size,
805
revid=revid, change=change_dict[revid])
806
file_list.append(file)
808
if sort_type == 'filename' or sort_type is None:
809
file_list.sort(key=lambda x: x.filename.lower()) # case-insensitive
810
elif sort_type == 'size':
811
file_list.sort(key=lambda x: x.size)
812
elif sort_type == 'date':
813
file_list.sort(key=lambda x: x.change.date)
815
# Always sort by kind to get directories first
816
file_list.sort(key=lambda x: x.kind != 'directory')
819
for file in file_list:
826
_BADCHARS_RE = re.compile(ur'[\x00-\x08\x0b\x0e-\x1f]')
828
def annotate_file(self, file_id, revid):
833
file_revid = self.get_inventory(revid)[file_id].revision
835
tree = self._branch.repository.revision_tree(file_revid)
838
for line_revid, text in tree.annotate_iter(file_id):
839
revid_set.add(line_revid)
840
if self._BADCHARS_RE.match(text):
841
# bail out; this isn't displayable text
842
yield util.Container(parity=0, lineno=1, status='same',
843
text='(This is a binary file.)',
844
change=util.Container())
846
change_cache = dict([(c.revid, c) \
847
for c in self.get_changes(list(revid_set))])
849
last_line_revid = None
850
for line_revid, text in tree.annotate_iter(file_id):
851
if line_revid == last_line_revid:
852
# remember which lines have a new revno and which don't
857
last_line_revid = line_revid
858
change = change_cache[line_revid]
859
trunc_revno = change.revno
860
if len(trunc_revno) > 10:
861
trunc_revno = trunc_revno[:9] + '...'
863
yield util.Container(parity=parity, lineno=lineno, status=status,
864
change=change, text=util.fixed_width(text))
867
self.log.debug('annotate: %r secs' % (time.time() - z,))
641
text_changes: list((filename, file_id)),
643
repo = self._branch.repository
644
if bzrlib.revision.is_null(old_revid) or \
645
bzrlib.revision.is_null(new_revid):
646
old_tree, new_tree = map(
647
repo.revision_tree, [old_revid, new_revid])
649
old_tree, new_tree = repo.revision_trees([old_revid, new_revid])
651
reporter = FileChangeReporter(old_tree.inventory, new_tree.inventory)
653
bzrlib.delta.report_changes(new_tree.iter_changes(old_tree), reporter)
655
return util.Container(
656
added=sorted(reporter.added, key=lambda x:x.filename),
657
renamed=sorted(reporter.renamed, key=lambda x:x.new_filename),
658
removed=sorted(reporter.removed, key=lambda x:x.filename),
659
modified=sorted(reporter.modified, key=lambda x:x.filename),
660
text_changes=sorted(reporter.text_changes, key=lambda x:x.filename))