36
from StringIO import StringIO
38
from loggerhead import search
39
38
from loggerhead import util
40
from loggerhead.wholehistory import compute_whole_history_data
39
from loggerhead.util import decorator
43
42
import bzrlib.branch
43
import bzrlib.bundle.serializer
46
45
import bzrlib.errors
47
import bzrlib.lru_cache
48
46
import bzrlib.progress
49
47
import bzrlib.revision
50
import bzrlib.textfile
51
48
import bzrlib.tsort
52
with_branch_lock = util.with_lock('_lock', 'branch')
56
def with_bzrlib_read_lock(unbound):
57
def bzrlib_read_locked(self, *args, **kw):
58
#self.log.debug('-> %r bzr lock', id(threading.currentThread()))
59
self._branch.repository.lock_read()
61
return unbound(self, *args, **kw)
63
self._branch.repository.unlock()
64
#self.log.debug('<- %r bzr lock', id(threading.currentThread()))
65
return bzrlib_read_locked
54
68
# bzrlib's UIFactory is not thread-safe
55
69
uihack = threading.local()
58
71
class ThreadSafeUIFactory (bzrlib.ui.SilentUIFactory):
60
72
def nested_progress_bar(self):
61
73
if getattr(uihack, '_progress_bar_stack', None) is None:
62
pbs = bzrlib.progress.ProgressBarStack(
63
klass=bzrlib.progress.DummyProgress)
64
uihack._progress_bar_stack = pbs
74
uihack._progress_bar_stack = bzrlib.progress.ProgressBarStack(klass=bzrlib.progress.DummyProgress)
65
75
return uihack._progress_bar_stack.get_nested()
67
77
bzrlib.ui.ui_factory = ThreadSafeUIFactory()
80
def _process_side_by_side_buffers(line_list, delete_list, insert_list):
81
while len(delete_list) < len(insert_list):
82
delete_list.append((None, '', 'context'))
83
while len(insert_list) < len(delete_list):
84
insert_list.append((None, '', 'context'))
85
while len(delete_list) > 0:
86
d = delete_list.pop(0)
87
i = insert_list.pop(0)
88
line_list.append(util.Container(old_lineno=d[0], new_lineno=i[0],
89
old_line=d[1], new_line=i[1],
90
old_type=d[2], new_type=i[2]))
93
def _make_side_by_side(chunk_list):
95
turn a normal unified-style diff (post-processed by parse_delta) into a
96
side-by-side diff structure. the new structure is::
104
type: str('context' or 'changed'),
109
for chunk in chunk_list:
111
delete_list, insert_list = [], []
112
for line in chunk.diff:
113
if line.type == 'context':
114
if len(delete_list) or len(insert_list):
115
_process_side_by_side_buffers(line_list, delete_list, insert_list)
116
delete_list, insert_list = [], []
117
line_list.append(util.Container(old_lineno=line.old_lineno, new_lineno=line.new_lineno,
118
old_line=line.line, new_line=line.line,
119
old_type=line.type, new_type=line.type))
120
elif line.type == 'delete':
121
delete_list.append((line.old_lineno, line.line, line.type))
122
elif line.type == 'insert':
123
insert_list.append((line.new_lineno, line.line, line.type))
124
if len(delete_list) or len(insert_list):
125
_process_side_by_side_buffers(line_list, delete_list, insert_list)
126
out_chunk_list.append(util.Container(diff=line_list))
127
return out_chunk_list
69
130
def is_branch(folder):
71
132
bzrlib.branch.Branch.open(folder)
120
183
def __getitem__(self, index):
121
184
"""Get the date of the index'd item"""
122
return datetime.datetime.fromtimestamp(self.repository.get_revision(
123
self.revid_list[index]).timestamp)
185
return datetime.datetime.fromtimestamp(self.repository.get_revision(self.revid_list[index]).timestamp)
125
187
def __len__(self):
126
188
return len(self.revid_list)
128
class FileChangeReporter(object):
129
def __init__(self, old_inv, new_inv):
134
self.text_changes = []
135
self.old_inv = old_inv
136
self.new_inv = new_inv
138
def revid(self, inv, file_id):
140
return inv[file_id].revision
141
except bzrlib.errors.NoSuchId:
144
def report(self, file_id, paths, versioned, renamed, modified,
146
if modified not in ('unchanged', 'kind changed'):
147
if versioned == 'removed':
148
filename = rich_filename(paths[0], kind[0])
150
filename = rich_filename(paths[1], kind[1])
151
self.text_changes.append(util.Container(
152
filename=filename, file_id=file_id,
153
old_revision=self.revid(self.old_inv, file_id),
154
new_revision=self.revid(self.new_inv, file_id)))
155
if versioned == 'added':
156
self.added.append(util.Container(
157
filename=rich_filename(paths[1], kind),
158
file_id=file_id, kind=kind[1]))
159
elif versioned == 'removed':
160
self.removed.append(util.Container(
161
filename=rich_filename(paths[0], kind),
162
file_id=file_id, kind=kind[0]))
164
self.renamed.append(util.Container(
165
old_filename=rich_filename(paths[0], kind[0]),
166
new_filename=rich_filename(paths[1], kind[1]),
168
text_modified=modified == 'modified'))
170
self.modified.append(util.Container(
171
filename=rich_filename(paths[1], kind),
175
class RevInfoMemoryCache(object):
176
"""A store that validates values against the revids they were stored with.
178
We use a unique key for each branch.
180
The reason for not just using the revid as the key is so that when a new
181
value is provided for a branch, we replace the old value used for the
184
There is another implementation of the same interface in
185
loggerhead.changecache.RevInfoDiskCache.
188
def __init__(self, cache):
191
def get(self, key, revid):
192
"""Return the data associated with `key`, subject to a revid check.
194
If a value was stored under `key`, with the same revid, return it.
195
Otherwise return None.
197
cached = self._cache.get(key)
200
stored_revid, data = cached
201
if revid == stored_revid:
206
def set(self, key, revid, data):
207
"""Store `data` under `key`, to be checked against `revid` on get().
209
self._cache[key] = (revid, data)
212
191
class History (object):
213
"""Decorate a branch to provide information for rendering.
215
History objects are expected to be short lived -- when serving a request
216
for a particular branch, open it, read-lock it, wrap a History object
217
around it, serve the request, throw the History object away, unlock the
218
branch and throw it away.
220
:ivar _file_change_cache: An object that caches information about the
221
files that changed between two revisions.
222
:ivar _rev_info: A list of information about revisions. This is by far
223
the most cryptic data structure in loggerhead. At the top level, it
224
is a list of 3-tuples [(merge-info, where-merged, parents)].
225
`merge-info` is (seq, revid, merge_depth, revno_str, end_of_merge) --
226
like a merged sorted list, but the revno is stringified.
227
`where-merged` is a tuple of revisions that have this revision as a
228
non-lefthand parent. Finally, `parents` is just the usual list of
229
parents of this revision.
230
:ivar _rev_indices: A dictionary mapping each revision id to the index of
231
the information about it in _rev_info.
232
:ivar _revno_revid: A dictionary mapping stringified revnos to revision
236
def _load_whole_history_data(self, caches, cache_key):
237
"""Set the attributes relating to the whole history of the branch.
239
:param caches: a list of caches with interfaces like
240
`RevInfoMemoryCache` and be ordered from fastest to slowest.
241
:param cache_key: the key to use with the caches.
243
self._rev_indices = None
244
self._rev_info = None
247
def update_missed_caches():
248
for cache in missed_caches:
249
cache.set(cache_key, self.last_revid, self._rev_info)
251
data = cache.get(cache_key, self.last_revid)
253
self._rev_info = data
254
update_missed_caches()
257
missed_caches.append(cache)
259
whole_history_data = compute_whole_history_data(self._branch)
260
self._rev_info, self._rev_indices = whole_history_data
261
update_missed_caches()
263
if self._rev_indices is not None:
264
self._revno_revid = {}
265
for ((_, revid, _, revno_str, _), _, _) in self._rev_info:
266
self._revno_revid[revno_str] = revid
268
self._revno_revid = {}
269
self._rev_indices = {}
270
for ((seq, revid, _, revno_str, _), _, _) in self._rev_info:
271
self._rev_indices[revid] = seq
272
self._revno_revid[revno_str] = revid
274
def __init__(self, branch, whole_history_data_cache, file_cache=None,
275
revinfo_disk_cache=None, cache_key=None):
276
assert branch.is_locked(), (
277
"Can only construct a History object with a read-locked branch.")
278
if file_cache is not None:
279
self._file_change_cache = file_cache
280
file_cache.history = self
282
self._file_change_cache = None
194
self._file_change_cache = None
195
self._lock = threading.RLock()
198
def from_branch(cls, branch):
283
201
self._branch = branch
284
self._inventory_cache = {}
285
self._branch_nick = self._branch.get_config().get_nickname()
286
self.log = logging.getLogger('loggerhead.%s' % self._branch_nick)
288
self.last_revid = branch.last_revision()
290
caches = [RevInfoMemoryCache(whole_history_data_cache)]
291
if revinfo_disk_cache:
292
caches.append(revinfo_disk_cache)
293
self._load_whole_history_data(caches, cache_key)
202
self._last_revid = self._branch.last_revision()
204
self.log = logging.getLogger('loggerhead.%s' % (self._branch.nick,))
206
graph = branch.repository.get_graph()
207
parent_map = dict(((key, value) for key, value in
208
graph.iter_ancestry([self._last_revid]) if value is not None))
210
self._revision_graph = self._strip_NULL_ghosts(parent_map)
211
self._full_history = []
212
self._revision_info = {}
213
self._revno_revid = {}
214
if bzrlib.revision.is_null(self._last_revid):
215
self._merge_sort = []
217
self._merge_sort = bzrlib.tsort.merge_sort(
218
self._revision_graph, self._last_revid, generate_revno=True)
220
for (seq, revid, merge_depth, revno, end_of_merge) in self._merge_sort:
221
self._full_history.append(revid)
222
revno_str = '.'.join(str(n) for n in revno)
223
self._revno_revid[revno_str] = revid
224
self._revision_info[revid] = (
225
seq, revid, merge_depth, revno_str, end_of_merge)
228
self._where_merged = {}
230
for revid in self._revision_graph.keys():
231
if self._revision_info[revid][2] == 0:
233
for parent in self._revision_graph[revid]:
234
self._where_merged.setdefault(parent, set()).add(revid)
236
self.log.info('built revision graph cache: %r secs' % (time.time() - z,))
240
def _strip_NULL_ghosts(revision_graph):
242
Copied over from bzrlib meant as a temporary workaround deprecated
246
# Filter ghosts, and null:
247
if bzrlib.revision.NULL_REVISION in revision_graph:
248
del revision_graph[bzrlib.revision.NULL_REVISION]
249
for key, parents in revision_graph.items():
250
revision_graph[key] = tuple(parent for parent in parents if parent
252
return revision_graph
255
def from_folder(cls, path):
256
b = bzrlib.branch.Branch.open(path)
259
return cls.from_branch(b)
264
def out_of_date(self):
265
# the branch may have been upgraded on disk, in which case we're stale.
266
newly_opened = bzrlib.branch.Branch.open(self._branch.base)
267
if self._branch.__class__ is not \
268
newly_opened.__class__:
270
if self._branch.repository.__class__ is not \
271
newly_opened.repository.__class__:
273
return self._branch.last_revision() != self._last_revid
275
def use_file_cache(self, cache):
276
self._file_change_cache = cache
296
279
def has_revisions(self):
297
280
return not bzrlib.revision.is_null(self.last_revid)
282
last_revid = property(lambda self: self._last_revid, None, None)
299
285
def get_config(self):
300
286
return self._branch.get_config()
302
288
def get_revno(self, revid):
303
if revid not in self._rev_indices:
289
if revid not in self._revision_info:
306
seq = self._rev_indices[revid]
307
revno = self._rev_info[seq][0][3]
292
seq, revid, merge_depth, revno_str, end_of_merge = self._revision_info[revid]
295
def get_revision_history(self):
296
return self._full_history
310
298
def get_revids_from(self, revid_list, start_revid):
332
318
if introduced_revisions(revid) & revid_set:
334
parents = self._rev_info[self._rev_indices[revid]][2]
320
parents = self._revision_graph[revid]
335
321
if len(parents) == 0:
337
323
revid = parents[0]
339
326
def get_short_revision_history_by_fileid(self, file_id):
327
# wow. is this really the only way we can get this list? by
328
# man-handling the weave store directly? :-0
340
329
# FIXME: would be awesome if we could get, for a folder, the list of
341
# revisions where items within that folder changed.i
342
possible_keys = [(file_id, revid) for revid in self._rev_indices]
343
get_parent_map = self._branch.repository.texts.get_parent_map
344
# We chunk the requests as this works better with GraphIndex.
345
# See _filter_revisions_touching_file_id in bzrlib/log.py
346
# for more information.
349
for start in xrange(0, len(possible_keys), chunk_size):
350
next_keys = possible_keys[start:start + chunk_size]
351
revids += [k[1] for k in get_parent_map(next_keys)]
352
del possible_keys, next_keys
330
# revisions where items within that folder changed.
331
w = self._branch.repository.weave_store.get_weave(file_id, self._branch.repository.get_transaction())
332
w_revids = w.versions()
333
revids = [r for r in self._full_history if r in w_revids]
355
337
def get_revision_history_since(self, revid_list, date):
356
338
# if a user asks for revisions starting at 01-sep, they mean inclusive,
357
339
# so start at midnight on 02-sep.
358
340
date = date + datetime.timedelta(days=1)
359
# our revid list is sorted in REVERSE date order,
360
# so go thru some hoops here...
341
# our revid list is sorted in REVERSE date order, so go thru some hoops here...
361
342
revid_list.reverse()
362
index = bisect.bisect(_RevListToTimestamps(revid_list,
363
self._branch.repository),
343
index = bisect.bisect(_RevListToTimestamps(revid_list, self._branch.repository), date)
367
346
revid_list.reverse()
369
348
return revid_list[index:]
371
351
def get_search_revid_list(self, query, revid_list):
373
353
given a "quick-search" query, try a few obvious possible meanings:
375
355
- revision id or # ("128.1.3")
376
- date (US style "mm/dd/yy", earth style "dd-mm-yy", or \
377
iso style "yyyy-mm-dd")
356
- date (US style "mm/dd/yy", earth style "dd-mm-yy", or iso style "yyyy-mm-dd")
378
357
- comment text as a fallback
380
359
and return a revid list that matches.
569
537
revnol = revno.split(".")
570
538
revnos = ".".join(revnol[:-2])
571
539
revnolast = int(revnol[-1])
572
if revnos in d.keys():
540
if d.has_key(revnos):
574
542
if revnolast < m:
575
d[revnos] = (revnolast, revid)
543
d[revnos] = ( revnolast, revid )
577
d[revnos] = (revnolast, revid)
579
return [d[revnos][1] for revnos in d.keys()]
581
def add_branch_nicks(self, change):
545
d[revnos] = ( revnolast, revid )
547
return [ d[revnos][1] for revnos in d.keys() ]
549
def get_branch_nicks(self, changes):
583
given a 'change', fill in the branch nicks on all parents and merge
551
given a list of changes from L{get_changes}, fill in the branch nicks
552
on all parents and merge points.
586
554
fetch_set = set()
587
for p in change.parents:
588
fetch_set.add(p.revid)
589
for p in change.merge_points:
590
fetch_set.add(p.revid)
555
for change in changes:
556
for p in change.parents:
557
fetch_set.add(p.revid)
558
for p in change.merge_points:
559
fetch_set.add(p.revid)
591
560
p_changes = self.get_changes(list(fetch_set))
592
561
p_change_dict = dict([(c.revid, c) for c in p_changes])
593
for p in change.parents:
594
if p.revid in p_change_dict:
595
p.branch_nick = p_change_dict[p.revid].branch_nick
597
p.branch_nick = '(missing)'
598
for p in change.merge_points:
599
if p.revid in p_change_dict:
600
p.branch_nick = p_change_dict[p.revid].branch_nick
602
p.branch_nick = '(missing)'
562
for change in changes:
563
# arch-converted branches may not have merged branch info :(
564
for p in change.parents:
565
if p.revid in p_change_dict:
566
p.branch_nick = p_change_dict[p.revid].branch_nick
568
p.branch_nick = '(missing)'
569
for p in change.merge_points:
570
if p.revid in p_change_dict:
571
p.branch_nick = p_change_dict[p.revid].branch_nick
573
p.branch_nick = '(missing)'
604
576
def get_changes(self, revid_list):
605
577
"""Return a list of changes objects for the given revids.
645
615
return [self._change_from_revision(rev) for rev in rev_list]
617
def _get_deltas_for_revisions_with_trees(self, revisions):
618
"""Produce a list of revision deltas.
620
Note that the input is a sequence of REVISIONS, not revision_ids.
621
Trees will be held in memory until the generator exits.
622
Each delta is relative to the revision's lefthand predecessor.
623
(This is copied from bzrlib.)
625
required_trees = set()
626
for revision in revisions:
627
required_trees.add(revision.revid)
628
required_trees.update([p.revid for p in revision.parents[:1]])
629
trees = dict((t.get_revision_id(), t) for
630
t in self._branch.repository.revision_trees(required_trees))
632
self._branch.repository.lock_read()
634
for revision in revisions:
635
if not revision.parents:
636
old_tree = self._branch.repository.revision_tree(
637
bzrlib.revision.NULL_REVISION)
639
old_tree = trees[revision.parents[0].revid]
640
tree = trees[revision.revid]
641
ret.append(tree.changes_from(old_tree))
644
self._branch.repository.unlock()
647
646
def _change_from_revision(self, revision):
649
648
Given a bzrlib Revision, return a processed "change" for use in
652
parents = [util.Container(revid=r,
653
revno=self.get_revno(r)) for r in revision.parent_ids]
651
commit_time = datetime.datetime.fromtimestamp(revision.timestamp)
653
parents = [util.Container(revid=r, revno=self.get_revno(r)) for r in revision.parent_ids]
655
655
message, short_message = clean_message(revision.message)
658
tags = self._branch.tags.get_reverse_tag_dict()
661
if tags.has_key(revision.revision_id):
662
revtags = ', '.join(tags[revision.revision_id])
665
658
'revid': revision.revision_id,
666
'date': datetime.datetime.fromtimestamp(revision.timestamp),
667
'utc_date': datetime.datetime.utcfromtimestamp(revision.timestamp),
668
'authors': revision.get_apparent_authors(),
660
'author': revision.committer,
669
661
'branch_nick': revision.properties.get('branch-nick', None),
670
662
'short_comment': short_message,
671
663
'comment': revision.message,
672
664
'comment_clean': [util.html_clean(s) for s in message],
673
665
'parents': revision.parent_ids,
674
'bugs': [bug.split()[0] for bug in revision.properties.get('bugs', '').splitlines()],
677
667
return util.Container(entry)
679
def get_file_changes_uncached(self, entry):
680
repo = self._branch.repository
682
old_revid = entry.parents[0].revid
684
old_revid = bzrlib.revision.NULL_REVISION
685
return self.file_changes_for_revision_ids(old_revid, entry.revid)
687
def get_file_changes(self, entry):
669
def get_file_changes_uncached(self, entries):
670
delta_list = self._get_deltas_for_revisions_with_trees(entries)
672
return [self.parse_delta(delta) for delta in delta_list]
675
def get_file_changes(self, entries):
688
676
if self._file_change_cache is None:
689
return self.get_file_changes_uncached(entry)
677
return self.get_file_changes_uncached(entries)
691
return self._file_change_cache.get_file_changes(entry)
693
def add_changes(self, entry):
694
changes = self.get_file_changes(entry)
695
entry.changes = changes
679
return self._file_change_cache.get_file_changes(entries)
681
def add_changes(self, entries):
682
changes_list = self.get_file_changes(entries)
684
for entry, changes in zip(entries, changes_list):
685
entry.changes = changes
688
def get_change_with_diff(self, revid, compare_revid=None):
689
change = self.get_changes([revid])[0]
691
if compare_revid is None:
693
compare_revid = change.parents[0].revid
695
compare_revid = 'null:'
697
rev_tree1 = self._branch.repository.revision_tree(compare_revid)
698
rev_tree2 = self._branch.repository.revision_tree(revid)
699
delta = rev_tree2.changes_from(rev_tree1)
701
change.changes = self.parse_delta(delta)
702
change.changes.modified = self._parse_diffs(rev_tree1, rev_tree2, delta)
697
707
def get_file(self, file_id, revid):
698
708
"returns (path, filename, data)"
699
709
inv = self.get_inventory(revid)
704
714
path = '/' + path
705
715
return path, inv_entry.name, rev_tree.get_file_text(file_id)
707
def file_changes_for_revision_ids(self, old_revid, new_revid):
717
def _parse_diffs(self, old_tree, new_tree, delta):
719
Return a list of processed diffs, in the format::
728
type: str('context', 'delete', or 'insert'),
737
for old_path, new_path, fid, kind, text_modified, meta_modified in delta.renamed:
739
process.append((old_path, new_path, fid, kind))
740
for path, fid, kind, text_modified, meta_modified in delta.modified:
741
process.append((path, path, fid, kind))
743
for old_path, new_path, fid, kind in process:
744
old_lines = old_tree.get_file_lines(fid)
745
new_lines = new_tree.get_file_lines(fid)
747
if old_lines != new_lines:
749
bzrlib.diff.internal_diff(old_path, old_lines,
750
new_path, new_lines, buffer)
751
except bzrlib.errors.BinaryFile:
754
diff = buffer.getvalue()
757
out.append(util.Container(filename=rich_filename(new_path, kind), file_id=fid, chunks=self._process_diff(diff), raw_diff=diff))
761
def _process_diff(self, diff):
762
# doesn't really need to be a method; could be static.
765
for line in diff.splitlines():
768
if line.startswith('+++ ') or line.startswith('--- '):
770
if line.startswith('@@ '):
772
if chunk is not None:
774
chunk = util.Container()
776
lines = [int(x.split(',')[0][1:]) for x in line.split(' ')[1:3]]
777
old_lineno = lines[0]
778
new_lineno = lines[1]
779
elif line.startswith(' '):
780
chunk.diff.append(util.Container(old_lineno=old_lineno, new_lineno=new_lineno,
781
type='context', line=util.fixed_width(line[1:])))
784
elif line.startswith('+'):
785
chunk.diff.append(util.Container(old_lineno=None, new_lineno=new_lineno,
786
type='insert', line=util.fixed_width(line[1:])))
788
elif line.startswith('-'):
789
chunk.diff.append(util.Container(old_lineno=old_lineno, new_lineno=None,
790
type='delete', line=util.fixed_width(line[1:])))
793
chunk.diff.append(util.Container(old_lineno=None, new_lineno=None,
794
type='unknown', line=util.fixed_width(repr(line))))
795
if chunk is not None:
799
def parse_delta(self, delta):
709
801
Return a nested data structure containing the changes in a delta::
718
text_changes: list((filename, file_id)),
720
repo = self._branch.repository
721
if bzrlib.revision.is_null(old_revid) or \
722
bzrlib.revision.is_null(new_revid):
723
old_tree, new_tree = map(
724
repo.revision_tree, [old_revid, new_revid])
726
old_tree, new_tree = repo.revision_trees([old_revid, new_revid])
728
reporter = FileChangeReporter(old_tree.inventory, new_tree.inventory)
730
bzrlib.delta.report_changes(new_tree.iter_changes(old_tree), reporter)
732
return util.Container(
733
added=sorted(reporter.added, key=lambda x:x.filename),
734
renamed=sorted(reporter.renamed, key=lambda x:x.new_filename),
735
removed=sorted(reporter.removed, key=lambda x:x.filename),
736
modified=sorted(reporter.modified, key=lambda x:x.filename),
737
text_changes=sorted(reporter.text_changes, key=lambda x:x.filename))
816
for path, fid, kind in delta.added:
817
added.append((rich_filename(path, kind), fid))
819
for path, fid, kind, text_modified, meta_modified in delta.modified:
820
modified.append(util.Container(filename=rich_filename(path, kind), file_id=fid))
822
for old_path, new_path, fid, kind, text_modified, meta_modified in delta.renamed:
823
renamed.append((rich_filename(old_path, kind), rich_filename(new_path, kind), fid))
824
if meta_modified or text_modified:
825
modified.append(util.Container(filename=rich_filename(new_path, kind), file_id=fid))
827
for path, fid, kind in delta.removed:
828
removed.append((rich_filename(path, kind), fid))
830
return util.Container(added=added, renamed=renamed, removed=removed, modified=modified)
833
def add_side_by_side(changes):
834
# FIXME: this is a rotten API.
835
for change in changes:
836
for m in change.changes.modified:
837
m.sbs_chunks = _make_side_by_side(m.chunks)
840
def get_filelist(self, inv, file_id, sort_type=None):
842
return the list of all files (and their attributes) within a given
846
dir_ie = inv[file_id]
847
path = inv.id2path(file_id)
852
for filename, entry in dir_ie.children.iteritems():
853
revid_set.add(entry.revision)
856
for change in self.get_changes(list(revid_set)):
857
change_dict[change.revid] = change
859
for filename, entry in dir_ie.children.iteritems():
861
if entry.kind == 'directory':
864
revid = entry.revision
866
file = util.Container(
867
filename=filename, executable=entry.executable, kind=entry.kind,
868
pathname=pathname, file_id=entry.file_id, size=entry.text_size,
869
revid=revid, change=change_dict[revid])
870
file_list.append(file)
872
if sort_type == 'filename' or sort_type is None:
873
file_list.sort(key=lambda x: x.filename)
874
elif sort_type == 'size':
875
file_list.sort(key=lambda x: x.size)
876
elif sort_type == 'date':
877
file_list.sort(key=lambda x: x.change.date)
880
for file in file_list:
887
_BADCHARS_RE = re.compile(ur'[\x00-\x08\x0b\x0e-\x1f]')
890
def annotate_file(self, file_id, revid):
895
file_revid = self.get_inventory(revid)[file_id].revision
897
tree = self._branch.repository.revision_tree(file_revid)
900
for line_revid, text in tree.annotate_iter(file_id):
901
revid_set.add(line_revid)
902
if self._BADCHARS_RE.match(text):
903
# bail out; this isn't displayable text
904
yield util.Container(parity=0, lineno=1, status='same',
905
text='(This is a binary file.)',
906
change=util.Container())
908
change_cache = dict([(c.revid, c) \
909
for c in self.get_changes(list(revid_set))])
911
last_line_revid = None
912
for line_revid, text in tree.annotate_iter(file_id):
913
if line_revid == last_line_revid:
914
# remember which lines have a new revno and which don't
919
last_line_revid = line_revid
920
change = change_cache[line_revid]
921
trunc_revno = change.revno
922
if len(trunc_revno) > 10:
923
trunc_revno = trunc_revno[:9] + '...'
925
yield util.Container(parity=parity, lineno=lineno, status=status,
926
change=change, text=util.fixed_width(text))
929
self.log.debug('annotate: %r secs' % (time.time() - z,))
932
def get_bundle(self, revid, compare_revid=None):
933
if compare_revid is None:
934
parents = self._revision_graph[revid]
936
compare_revid = parents[0]
940
bzrlib.bundle.serializer.write_bundle(self._branch.repository, revid, compare_revid, s)