36
from StringIO import StringIO
38
from loggerhead import util
39
from loggerhead.util import decorator
42
37
import bzrlib.branch
43
import bzrlib.bundle.serializer
45
39
import bzrlib.errors
46
import bzrlib.progress
47
40
import bzrlib.revision
52
with_branch_lock = util.with_lock('_lock', 'branch')
56
def with_bzrlib_read_lock(unbound):
57
def bzrlib_read_locked(self, *args, **kw):
58
#self.log.debug('-> %r bzr lock', id(threading.currentThread()))
59
self._branch.repository.lock_read()
61
return unbound(self, *args, **kw)
63
self._branch.repository.unlock()
64
#self.log.debug('<- %r bzr lock', id(threading.currentThread()))
65
return bzrlib_read_locked
68
# bzrlib's UIFactory is not thread-safe
69
uihack = threading.local()
71
class ThreadSafeUIFactory (bzrlib.ui.SilentUIFactory):
72
def nested_progress_bar(self):
73
if getattr(uihack, '_progress_bar_stack', None) is None:
74
uihack._progress_bar_stack = bzrlib.progress.ProgressBarStack(klass=bzrlib.progress.DummyProgress)
75
return uihack._progress_bar_stack.get_nested()
77
bzrlib.ui.ui_factory = ThreadSafeUIFactory()
80
def _process_side_by_side_buffers(line_list, delete_list, insert_list):
81
while len(delete_list) < len(insert_list):
82
delete_list.append((None, '', 'context'))
83
while len(insert_list) < len(delete_list):
84
insert_list.append((None, '', 'context'))
85
while len(delete_list) > 0:
86
d = delete_list.pop(0)
87
i = insert_list.pop(0)
88
line_list.append(util.Container(old_lineno=d[0], new_lineno=i[0],
89
old_line=d[1], new_line=i[1],
90
old_type=d[2], new_type=i[2]))
93
def _make_side_by_side(chunk_list):
95
turn a normal unified-style diff (post-processed by parse_delta) into a
96
side-by-side diff structure. the new structure is::
104
type: str('context' or 'changed'),
109
for chunk in chunk_list:
111
delete_list, insert_list = [], []
112
for line in chunk.diff:
113
if line.type == 'context':
114
if len(delete_list) or len(insert_list):
115
_process_side_by_side_buffers(line_list, delete_list, insert_list)
116
delete_list, insert_list = [], []
117
line_list.append(util.Container(old_lineno=line.old_lineno, new_lineno=line.new_lineno,
118
old_line=line.line, new_line=line.line,
119
old_type=line.type, new_type=line.type))
120
elif line.type == 'delete':
121
delete_list.append((line.old_lineno, line.line, line.type))
122
elif line.type == 'insert':
123
insert_list.append((line.new_lineno, line.line, line.type))
124
if len(delete_list) or len(insert_list):
125
_process_side_by_side_buffers(line_list, delete_list, insert_list)
126
out_chunk_list.append(util.Container(diff=line_list))
127
return out_chunk_list
42
from loggerhead import search
43
from loggerhead import util
44
from loggerhead.wholehistory import compute_whole_history_data
130
47
def is_branch(folder):
183
98
def __getitem__(self, index):
184
99
"""Get the date of the index'd item"""
185
return datetime.datetime.fromtimestamp(self.repository.get_revision(self.revid_list[index]).timestamp)
100
return datetime.datetime.fromtimestamp(self.repository.get_revision(
101
self.revid_list[index]).timestamp)
187
103
def __len__(self):
188
104
return len(self.revid_list)
191
class History (object):
194
self._file_change_cache = None
195
self._lock = threading.RLock()
198
def from_branch(cls, branch, name=None):
106
class FileChangeReporter(object):
108
def __init__(self, old_inv, new_inv):
113
self.text_changes = []
114
self.old_inv = old_inv
115
self.new_inv = new_inv
117
def revid(self, inv, file_id):
119
return inv[file_id].revision
120
except bzrlib.errors.NoSuchId:
123
def report(self, file_id, paths, versioned, renamed, modified,
125
if modified not in ('unchanged', 'kind changed'):
126
if versioned == 'removed':
127
filename = rich_filename(paths[0], kind[0])
129
filename = rich_filename(paths[1], kind[1])
130
self.text_changes.append(util.Container(
131
filename=filename, file_id=file_id,
132
old_revision=self.revid(self.old_inv, file_id),
133
new_revision=self.revid(self.new_inv, file_id)))
134
if versioned == 'added':
135
self.added.append(util.Container(
136
filename=rich_filename(paths[1], kind),
137
file_id=file_id, kind=kind[1]))
138
elif versioned == 'removed':
139
self.removed.append(util.Container(
140
filename=rich_filename(paths[0], kind),
141
file_id=file_id, kind=kind[0]))
143
self.renamed.append(util.Container(
144
old_filename=rich_filename(paths[0], kind[0]),
145
new_filename=rich_filename(paths[1], kind[1]),
147
text_modified=modified == 'modified'))
149
self.modified.append(util.Container(
150
filename=rich_filename(paths[1], kind),
154
class RevInfoMemoryCache(object):
155
"""A store that validates values against the revids they were stored with.
157
We use a unique key for each branch.
159
The reason for not just using the revid as the key is so that when a new
160
value is provided for a branch, we replace the old value used for the
163
There is another implementation of the same interface in
164
loggerhead.changecache.RevInfoDiskCache.
167
def __init__(self, cache):
170
def get(self, key, revid):
171
"""Return the data associated with `key`, subject to a revid check.
173
If a value was stored under `key`, with the same revid, return it.
174
Otherwise return None.
176
cached = self._cache.get(key)
179
stored_revid, data = cached
180
if revid == stored_revid:
185
def set(self, key, revid, data):
186
"""Store `data` under `key`, to be checked against `revid` on get().
188
self._cache[key] = (revid, data)
191
class History(object):
192
"""Decorate a branch to provide information for rendering.
194
History objects are expected to be short lived -- when serving a request
195
for a particular branch, open it, read-lock it, wrap a History object
196
around it, serve the request, throw the History object away, unlock the
197
branch and throw it away.
199
:ivar _file_change_cache: An object that caches information about the
200
files that changed between two revisions.
201
:ivar _rev_info: A list of information about revisions. This is by far
202
the most cryptic data structure in loggerhead. At the top level, it
203
is a list of 3-tuples [(merge-info, where-merged, parents)].
204
`merge-info` is (seq, revid, merge_depth, revno_str, end_of_merge) --
205
like a merged sorted list, but the revno is stringified.
206
`where-merged` is a tuple of revisions that have this revision as a
207
non-lefthand parent. Finally, `parents` is just the usual list of
208
parents of this revision.
209
:ivar _rev_indices: A dictionary mapping each revision id to the index of
210
the information about it in _rev_info.
211
:ivar _revno_revid: A dictionary mapping stringified revnos to revision
215
def _load_whole_history_data(self, caches, cache_key):
216
"""Set the attributes relating to the whole history of the branch.
218
:param caches: a list of caches with interfaces like
219
`RevInfoMemoryCache` and be ordered from fastest to slowest.
220
:param cache_key: the key to use with the caches.
222
self._rev_indices = None
223
self._rev_info = None
226
def update_missed_caches():
227
for cache in missed_caches:
228
cache.set(cache_key, self.last_revid, self._rev_info)
230
data = cache.get(cache_key, self.last_revid)
232
self._rev_info = data
233
update_missed_caches()
236
missed_caches.append(cache)
238
whole_history_data = compute_whole_history_data(self._branch)
239
self._rev_info, self._rev_indices = whole_history_data
240
update_missed_caches()
242
if self._rev_indices is not None:
243
self._revno_revid = {}
244
for ((_, revid, _, revno_str, _), _, _) in self._rev_info:
245
self._revno_revid[revno_str] = revid
247
self._revno_revid = {}
248
self._rev_indices = {}
249
for ((seq, revid, _, revno_str, _), _, _) in self._rev_info:
250
self._rev_indices[revid] = seq
251
self._revno_revid[revno_str] = revid
253
def __init__(self, branch, whole_history_data_cache, file_cache=None,
254
revinfo_disk_cache=None, cache_key=None):
255
assert branch.is_locked(), (
256
"Can only construct a History object with a read-locked branch.")
257
if file_cache is not None:
258
self._file_change_cache = file_cache
259
file_cache.history = self
261
self._file_change_cache = None
201
262
self._branch = branch
202
self._last_revid = self._branch.last_revision()
205
name = self._branch.nick
207
self.log = logging.getLogger('loggerhead.%s' % (name,))
209
graph = branch.repository.get_graph()
210
parent_map = dict(((key, value) for key, value in
211
graph.iter_ancestry([self._last_revid]) if value is not None))
213
self._revision_graph = self._strip_NULL_ghosts(parent_map)
214
self._full_history = []
215
self._revision_info = {}
216
self._revno_revid = {}
217
if bzrlib.revision.is_null(self._last_revid):
218
self._merge_sort = []
220
self._merge_sort = bzrlib.tsort.merge_sort(
221
self._revision_graph, self._last_revid, generate_revno=True)
223
for (seq, revid, merge_depth, revno, end_of_merge) in self._merge_sort:
224
self._full_history.append(revid)
225
revno_str = '.'.join(str(n) for n in revno)
226
self._revno_revid[revno_str] = revid
227
self._revision_info[revid] = (
228
seq, revid, merge_depth, revno_str, end_of_merge)
231
self._where_merged = {}
233
for revid in self._revision_graph.keys():
234
if self._revision_info[revid][2] == 0:
236
for parent in self._revision_graph[revid]:
237
self._where_merged.setdefault(parent, set()).add(revid)
239
self.log.info('built revision graph cache: %r secs' % (time.time() - z,))
243
def _strip_NULL_ghosts(revision_graph):
245
Copied over from bzrlib meant as a temporary workaround deprecated
249
# Filter ghosts, and null:
250
if bzrlib.revision.NULL_REVISION in revision_graph:
251
del revision_graph[bzrlib.revision.NULL_REVISION]
252
for key, parents in revision_graph.items():
253
revision_graph[key] = tuple(parent for parent in parents if parent
255
return revision_graph
258
def from_folder(cls, path, name=None):
259
b = bzrlib.branch.Branch.open(path)
262
return cls.from_branch(b, name)
267
def out_of_date(self):
268
# the branch may have been upgraded on disk, in which case we're stale.
269
newly_opened = bzrlib.branch.Branch.open(self._branch.base)
270
if self._branch.__class__ is not \
271
newly_opened.__class__:
273
if self._branch.repository.__class__ is not \
274
newly_opened.repository.__class__:
276
return self._branch.last_revision() != self._last_revid
278
def use_file_cache(self, cache):
279
self._file_change_cache = cache
263
self._inventory_cache = {}
264
self._branch_nick = self._branch.get_config().get_nickname()
265
self.log = logging.getLogger('loggerhead.%s' % (self._branch_nick,))
267
self.last_revid = branch.last_revision()
269
caches = [RevInfoMemoryCache(whole_history_data_cache)]
270
if revinfo_disk_cache:
271
caches.append(revinfo_disk_cache)
272
self._load_whole_history_data(caches, cache_key)
282
275
def has_revisions(self):
283
276
return not bzrlib.revision.is_null(self.last_revid)
285
last_revid = property(lambda self: self._last_revid, None, None)
288
278
def get_config(self):
289
279
return self._branch.get_config()
292
281
def get_revno(self, revid):
293
if revid not in self._revision_info:
282
if revid not in self._rev_indices:
296
seq, revid, merge_depth, revno_str, end_of_merge = self._revision_info[revid]
299
def get_revision_history(self):
300
return self._full_history
285
seq = self._rev_indices[revid]
286
revno = self._rev_info[seq][0][3]
302
289
def get_revids_from(self, revid_list, start_revid):
322
311
if introduced_revisions(revid) & revid_set:
324
parents = self._revision_graph[revid]
313
parents = self._rev_info[self._rev_indices[revid]][2]
325
314
if len(parents) == 0:
327
316
revid = parents[0]
330
318
def get_short_revision_history_by_fileid(self, file_id):
331
# wow. is this really the only way we can get this list? by
332
# man-handling the weave store directly? :-0
333
319
# FIXME: would be awesome if we could get, for a folder, the list of
334
# revisions where items within that folder changed.
335
w = self._branch.repository.weave_store.get_weave(file_id, self._branch.repository.get_transaction())
336
w_revids = w.versions()
337
revids = [r for r in self._full_history if r in w_revids]
320
# revisions where items within that folder changed.i
321
possible_keys = [(file_id, revid) for revid in self._rev_indices]
322
get_parent_map = self._branch.repository.texts.get_parent_map
323
# We chunk the requests as this works better with GraphIndex.
324
# See _filter_revisions_touching_file_id in bzrlib/log.py
325
# for more information.
328
for start in xrange(0, len(possible_keys), chunk_size):
329
next_keys = possible_keys[start:start + chunk_size]
330
revids += [k[1] for k in get_parent_map(next_keys)]
331
del possible_keys, next_keys
341
334
def get_revision_history_since(self, revid_list, date):
342
335
# if a user asks for revisions starting at 01-sep, they mean inclusive,
343
336
# so start at midnight on 02-sep.
344
337
date = date + datetime.timedelta(days=1)
345
# our revid list is sorted in REVERSE date order, so go thru some hoops here...
338
# our revid list is sorted in REVERSE date order,
339
# so go thru some hoops here...
346
340
revid_list.reverse()
347
index = bisect.bisect(_RevListToTimestamps(revid_list, self._branch.repository), date)
341
index = bisect.bisect(_RevListToTimestamps(revid_list,
342
self._branch.repository),
350
346
revid_list.reverse()
352
348
return revid_list[index:]
355
350
def get_search_revid_list(self, query, revid_list):
357
352
given a "quick-search" query, try a few obvious possible meanings:
359
354
- revision id or # ("128.1.3")
360
- date (US style "mm/dd/yy", earth style "dd-mm-yy", or iso style "yyyy-mm-dd")
355
- date (US style "mm/dd/yy", earth style "dd-mm-yy", or \
356
iso style "yyyy-mm-dd")
361
357
- comment text as a fallback
363
359
and return a revid list that matches.
541
548
revnol = revno.split(".")
542
549
revnos = ".".join(revnol[:-2])
543
550
revnolast = int(revnol[-1])
544
if d.has_key(revnos):
546
553
if revnolast < m:
547
d[revnos] = ( revnolast, revid )
554
d[revnos] = (revnolast, revid)
549
d[revnos] = ( revnolast, revid )
551
return [ d[revnos][1] for revnos in d.keys() ]
553
def get_branch_nicks(self, changes):
556
d[revnos] = (revnolast, revid)
558
return [revid for (_, revid) in d.itervalues()]
560
def add_branch_nicks(self, change):
555
given a list of changes from L{get_changes}, fill in the branch nicks
556
on all parents and merge points.
562
given a 'change', fill in the branch nicks on all parents and merge
558
565
fetch_set = set()
559
for change in changes:
560
for p in change.parents:
561
fetch_set.add(p.revid)
562
for p in change.merge_points:
563
fetch_set.add(p.revid)
566
for p in change.parents:
567
fetch_set.add(p.revid)
568
for p in change.merge_points:
569
fetch_set.add(p.revid)
564
570
p_changes = self.get_changes(list(fetch_set))
565
571
p_change_dict = dict([(c.revid, c) for c in p_changes])
566
for change in changes:
567
# arch-converted branches may not have merged branch info :(
568
for p in change.parents:
569
if p.revid in p_change_dict:
570
p.branch_nick = p_change_dict[p.revid].branch_nick
572
p.branch_nick = '(missing)'
573
for p in change.merge_points:
574
if p.revid in p_change_dict:
575
p.branch_nick = p_change_dict[p.revid].branch_nick
577
p.branch_nick = '(missing)'
572
for p in change.parents:
573
if p.revid in p_change_dict:
574
p.branch_nick = p_change_dict[p.revid].branch_nick
576
p.branch_nick = '(missing)'
577
for p in change.merge_points:
578
if p.revid in p_change_dict:
579
p.branch_nick = p_change_dict[p.revid].branch_nick
581
p.branch_nick = '(missing)'
580
583
def get_changes(self, revid_list):
581
584
"""Return a list of changes objects for the given revids.
619
624
return [self._change_from_revision(rev) for rev in rev_list]
621
def _get_deltas_for_revisions_with_trees(self, revisions):
622
"""Produce a list of revision deltas.
624
Note that the input is a sequence of REVISIONS, not revision_ids.
625
Trees will be held in memory until the generator exits.
626
Each delta is relative to the revision's lefthand predecessor.
627
(This is copied from bzrlib.)
629
required_trees = set()
630
for revision in revisions:
631
required_trees.add(revision.revid)
632
required_trees.update([p.revid for p in revision.parents[:1]])
633
trees = dict((t.get_revision_id(), t) for
634
t in self._branch.repository.revision_trees(required_trees))
636
self._branch.repository.lock_read()
638
for revision in revisions:
639
if not revision.parents:
640
old_tree = self._branch.repository.revision_tree(
641
bzrlib.revision.NULL_REVISION)
643
old_tree = trees[revision.parents[0].revid]
644
tree = trees[revision.revid]
645
ret.append(tree.changes_from(old_tree))
648
self._branch.repository.unlock()
650
626
def _change_from_revision(self, revision):
652
628
Given a bzrlib Revision, return a processed "change" for use in
655
commit_time = datetime.datetime.fromtimestamp(revision.timestamp)
657
parents = [util.Container(revid=r, revno=self.get_revno(r)) for r in revision.parent_ids]
659
631
message, short_message = clean_message(revision.message)
633
tags = self._branch.tags.get_reverse_tag_dict()
636
if tags.has_key(revision.revision_id):
637
revtags = ', '.join(tags[revision.revision_id])
662
640
'revid': revision.revision_id,
664
'author': revision.committer,
641
'date': datetime.datetime.fromtimestamp(revision.timestamp),
642
'utc_date': datetime.datetime.utcfromtimestamp(revision.timestamp),
643
'authors': revision.get_apparent_authors(),
665
644
'branch_nick': revision.properties.get('branch-nick', None),
666
645
'short_comment': short_message,
667
646
'comment': revision.message,
668
647
'comment_clean': [util.html_clean(s) for s in message],
669
648
'parents': revision.parent_ids,
649
'bugs': [bug.split()[0] for bug in revision.properties.get('bugs', '').splitlines()],
671
652
return util.Container(entry)
673
def get_file_changes_uncached(self, entries):
674
delta_list = self._get_deltas_for_revisions_with_trees(entries)
676
return [self.parse_delta(delta) for delta in delta_list]
679
def get_file_changes(self, entries):
654
def get_file_changes_uncached(self, entry):
656
old_revid = entry.parents[0].revid
658
old_revid = bzrlib.revision.NULL_REVISION
659
return self.file_changes_for_revision_ids(old_revid, entry.revid)
661
def get_file_changes(self, entry):
680
662
if self._file_change_cache is None:
681
return self.get_file_changes_uncached(entries)
663
return self.get_file_changes_uncached(entry)
683
return self._file_change_cache.get_file_changes(entries)
685
def add_changes(self, entries):
686
changes_list = self.get_file_changes(entries)
688
for entry, changes in zip(entries, changes_list):
689
entry.changes = changes
692
def get_change_with_diff(self, revid, compare_revid=None):
693
change = self.get_changes([revid])[0]
695
if compare_revid is None:
697
compare_revid = change.parents[0].revid
699
compare_revid = 'null:'
701
rev_tree1 = self._branch.repository.revision_tree(compare_revid)
702
rev_tree2 = self._branch.repository.revision_tree(revid)
703
delta = rev_tree2.changes_from(rev_tree1)
705
change.changes = self.parse_delta(delta)
706
change.changes.modified = self._parse_diffs(rev_tree1, rev_tree2, delta)
665
return self._file_change_cache.get_file_changes(entry)
667
def add_changes(self, entry):
668
changes = self.get_file_changes(entry)
669
entry.changes = changes
711
671
def get_file(self, file_id, revid):
712
672
"returns (path, filename, data)"
713
673
inv = self.get_inventory(revid)
718
678
path = '/' + path
719
679
return path, inv_entry.name, rev_tree.get_file_text(file_id)
721
def _parse_diffs(self, old_tree, new_tree, delta):
723
Return a list of processed diffs, in the format::
732
type: str('context', 'delete', or 'insert'),
741
for old_path, new_path, fid, kind, text_modified, meta_modified in delta.renamed:
743
process.append((old_path, new_path, fid, kind))
744
for path, fid, kind, text_modified, meta_modified in delta.modified:
745
process.append((path, path, fid, kind))
747
for old_path, new_path, fid, kind in process:
748
old_lines = old_tree.get_file_lines(fid)
749
new_lines = new_tree.get_file_lines(fid)
751
if old_lines != new_lines:
753
bzrlib.diff.internal_diff(old_path, old_lines,
754
new_path, new_lines, buffer)
755
except bzrlib.errors.BinaryFile:
758
diff = buffer.getvalue()
761
out.append(util.Container(filename=rich_filename(new_path, kind), file_id=fid, chunks=self._process_diff(diff), raw_diff=diff))
765
def _process_diff(self, diff):
766
# doesn't really need to be a method; could be static.
769
for line in diff.splitlines():
772
if line.startswith('+++ ') or line.startswith('--- '):
774
if line.startswith('@@ '):
776
if chunk is not None:
778
chunk = util.Container()
780
lines = [int(x.split(',')[0][1:]) for x in line.split(' ')[1:3]]
781
old_lineno = lines[0]
782
new_lineno = lines[1]
783
elif line.startswith(' '):
784
chunk.diff.append(util.Container(old_lineno=old_lineno, new_lineno=new_lineno,
785
type='context', line=util.fixed_width(line[1:])))
788
elif line.startswith('+'):
789
chunk.diff.append(util.Container(old_lineno=None, new_lineno=new_lineno,
790
type='insert', line=util.fixed_width(line[1:])))
792
elif line.startswith('-'):
793
chunk.diff.append(util.Container(old_lineno=old_lineno, new_lineno=None,
794
type='delete', line=util.fixed_width(line[1:])))
797
chunk.diff.append(util.Container(old_lineno=None, new_lineno=None,
798
type='unknown', line=util.fixed_width(repr(line))))
799
if chunk is not None:
803
def parse_delta(self, delta):
681
def file_changes_for_revision_ids(self, old_revid, new_revid):
805
683
Return a nested data structure containing the changes in a delta::
820
for path, fid, kind in delta.added:
821
added.append((rich_filename(path, kind), fid))
823
for path, fid, kind, text_modified, meta_modified in delta.modified:
824
modified.append(util.Container(filename=rich_filename(path, kind), file_id=fid))
826
for old_path, new_path, fid, kind, text_modified, meta_modified in delta.renamed:
827
renamed.append((rich_filename(old_path, kind), rich_filename(new_path, kind), fid))
828
if meta_modified or text_modified:
829
modified.append(util.Container(filename=rich_filename(new_path, kind), file_id=fid))
831
for path, fid, kind in delta.removed:
832
removed.append((rich_filename(path, kind), fid))
834
return util.Container(added=added, renamed=renamed, removed=removed, modified=modified)
837
def add_side_by_side(changes):
838
# FIXME: this is a rotten API.
839
for change in changes:
840
for m in change.changes.modified:
841
m.sbs_chunks = _make_side_by_side(m.chunks)
844
def get_filelist(self, inv, file_id, sort_type=None):
846
return the list of all files (and their attributes) within a given
850
dir_ie = inv[file_id]
851
path = inv.id2path(file_id)
856
for filename, entry in dir_ie.children.iteritems():
857
revid_set.add(entry.revision)
860
for change in self.get_changes(list(revid_set)):
861
change_dict[change.revid] = change
863
for filename, entry in dir_ie.children.iteritems():
865
if entry.kind == 'directory':
868
revid = entry.revision
870
file = util.Container(
871
filename=filename, executable=entry.executable, kind=entry.kind,
872
pathname=pathname, file_id=entry.file_id, size=entry.text_size,
873
revid=revid, change=change_dict[revid])
874
file_list.append(file)
876
if sort_type == 'filename' or sort_type is None:
877
file_list.sort(key=lambda x: x.filename)
878
elif sort_type == 'size':
879
file_list.sort(key=lambda x: x.size)
880
elif sort_type == 'date':
881
file_list.sort(key=lambda x: x.change.date)
884
for file in file_list:
891
_BADCHARS_RE = re.compile(ur'[\x00-\x08\x0b\x0e-\x1f]')
894
def annotate_file(self, file_id, revid):
899
file_revid = self.get_inventory(revid)[file_id].revision
901
tree = self._branch.repository.revision_tree(file_revid)
904
for line_revid, text in tree.annotate_iter(file_id):
905
revid_set.add(line_revid)
906
if self._BADCHARS_RE.match(text):
907
# bail out; this isn't displayable text
908
yield util.Container(parity=0, lineno=1, status='same',
909
text='(This is a binary file.)',
910
change=util.Container())
912
change_cache = dict([(c.revid, c) \
913
for c in self.get_changes(list(revid_set))])
915
last_line_revid = None
916
for line_revid, text in tree.annotate_iter(file_id):
917
if line_revid == last_line_revid:
918
# remember which lines have a new revno and which don't
923
last_line_revid = line_revid
924
change = change_cache[line_revid]
925
trunc_revno = change.revno
926
if len(trunc_revno) > 10:
927
trunc_revno = trunc_revno[:9] + '...'
929
yield util.Container(parity=parity, lineno=lineno, status=status,
930
change=change, text=util.fixed_width(text))
933
self.log.debug('annotate: %r secs' % (time.time() - z,))
936
def get_bundle(self, revid, compare_revid=None):
937
if compare_revid is None:
938
parents = self._revision_graph[revid]
940
compare_revid = parents[0]
944
bzrlib.bundle.serializer.write_bundle(self._branch.repository, revid, compare_revid, s)
692
text_changes: list((filename, file_id)),
694
repo = self._branch.repository
695
if (bzrlib.revision.is_null(old_revid) or
696
bzrlib.revision.is_null(new_revid)):
697
old_tree, new_tree = map(
698
repo.revision_tree, [old_revid, new_revid])
700
old_tree, new_tree = repo.revision_trees([old_revid, new_revid])
702
reporter = FileChangeReporter(old_tree.inventory, new_tree.inventory)
704
bzrlib.delta.report_changes(new_tree.iter_changes(old_tree), reporter)
706
return util.Container(
707
added=sorted(reporter.added, key=lambda x:x.filename),
708
renamed=sorted(reporter.renamed, key=lambda x:x.new_filename),
709
removed=sorted(reporter.removed, key=lambda x:x.filename),
710
modified=sorted(reporter.modified, key=lambda x:x.filename),
711
text_changes=sorted(reporter.text_changes, key=lambda x:x.filename))