53
56
import bzrlib.tsort
60
with_branch_lock = util.with_lock('_lock', 'branch')
56
63
# bzrlib's UIFactory is not thread-safe
57
64
uihack = threading.local()
60
66
class ThreadSafeUIFactory (bzrlib.ui.SilentUIFactory):
62
67
def nested_progress_bar(self):
63
68
if getattr(uihack, '_progress_bar_stack', None) is None:
64
pbs = bzrlib.progress.ProgressBarStack(
65
klass=bzrlib.progress.DummyProgress)
66
uihack._progress_bar_stack = pbs
69
uihack._progress_bar_stack = bzrlib.progress.ProgressBarStack(klass=bzrlib.progress.DummyProgress)
67
70
return uihack._progress_bar_stack.get_nested()
69
72
bzrlib.ui.ui_factory = ThreadSafeUIFactory()
75
def _process_side_by_side_buffers(line_list, delete_list, insert_list):
76
while len(delete_list) < len(insert_list):
77
delete_list.append((None, '', 'context'))
78
while len(insert_list) < len(delete_list):
79
insert_list.append((None, '', 'context'))
80
while len(delete_list) > 0:
81
d = delete_list.pop(0)
82
i = insert_list.pop(0)
83
line_list.append(util.Container(old_lineno=d[0], new_lineno=i[0],
84
old_line=d[1], new_line=i[1],
85
old_type=d[2], new_type=i[2]))
88
def _make_side_by_side(chunk_list):
90
turn a normal unified-style diff (post-processed by parse_delta) into a
91
side-by-side diff structure. the new structure is::
99
type: str('context' or 'changed'),
104
for chunk in chunk_list:
106
delete_list, insert_list = [], []
107
for line in chunk.diff:
108
if line.type == 'context':
109
if len(delete_list) or len(insert_list):
110
_process_side_by_side_buffers(line_list, delete_list, insert_list)
111
delete_list, insert_list = [], []
112
line_list.append(util.Container(old_lineno=line.old_lineno, new_lineno=line.new_lineno,
113
old_line=line.line, new_line=line.line,
114
old_type=line.type, new_type=line.type))
115
elif line.type == 'delete':
116
delete_list.append((line.old_lineno, line.line, line.type))
117
elif line.type == 'insert':
118
insert_list.append((line.new_lineno, line.line, line.type))
119
if len(delete_list) or len(insert_list):
120
_process_side_by_side_buffers(line_list, delete_list, insert_list)
121
out_chunk_list.append(util.Container(diff=line_list))
122
return out_chunk_list
71
125
def is_branch(folder):
73
127
bzrlib.branch.Branch.open(folder)
125
178
def __getitem__(self, index):
126
179
"""Get the date of the index'd item"""
127
return datetime.datetime.fromtimestamp(self.repository.get_revision(
128
self.revid_list[index]).timestamp)
180
return datetime.datetime.fromtimestamp(self.repository.get_revision(self.revid_list[index]).timestamp)
130
182
def __len__(self):
131
183
return len(self.revid_list)
133
class FileChangeReporter(object):
134
def __init__(self, old_inv, new_inv):
139
self.text_changes = []
140
self.old_inv = old_inv
141
self.new_inv = new_inv
143
def revid(self, inv, file_id):
145
return inv[file_id].revision
146
except bzrlib.errors.NoSuchId:
149
def report(self, file_id, paths, versioned, renamed, modified,
151
if modified not in ('unchanged', 'kind changed'):
152
if versioned == 'removed':
153
filename = rich_filename(paths[0], kind[0])
155
filename = rich_filename(paths[1], kind[1])
156
self.text_changes.append(util.Container(
157
filename=filename, file_id=file_id,
158
old_revision=self.revid(self.old_inv, file_id),
159
new_revision=self.revid(self.new_inv, file_id)))
160
if versioned == 'added':
161
self.added.append(util.Container(
162
filename=rich_filename(paths[1], kind),
163
file_id=file_id, kind=kind[1]))
164
elif versioned == 'removed':
165
self.removed.append(util.Container(
166
filename=rich_filename(paths[0], kind),
167
file_id=file_id, kind=kind[0]))
169
self.renamed.append(util.Container(
170
old_filename=rich_filename(paths[0], kind[0]),
171
new_filename=rich_filename(paths[1], kind[1]),
173
text_modified=modified == 'modified'))
175
self.modified.append(util.Container(
176
filename=rich_filename(paths[1], kind),
180
186
class History (object):
181
"""Decorate a branch to provide information for rendering.
183
History objects are expected to be short lived -- when serving a request
184
for a particular branch, open it, read-lock it, wrap a History object
185
around it, serve the request, throw the History object away, unlock the
186
branch and throw it away.
188
:ivar _file_change_cache: xx
191
def __init__(self, branch, whole_history_data_cache):
192
assert branch.is_locked(), (
193
"Can only construct a History object with a read-locked branch.")
189
self._change_cache = None
194
190
self._file_change_cache = None
192
self._lock = threading.RLock()
195
def from_branch(cls, branch, name=None):
195
198
self._branch = branch
196
self._inventory_cache = {}
197
self._branch_nick = self._branch.get_config().get_nickname()
198
self.log = logging.getLogger('loggerhead.%s' % self._branch_nick)
200
self.last_revid = branch.last_revision()
202
whole_history_data = whole_history_data_cache.get(self.last_revid)
203
if whole_history_data is None:
204
whole_history_data = compute_whole_history_data(branch)
205
whole_history_data_cache[self.last_revid] = whole_history_data
207
(self._revision_graph, self._full_history, self._revision_info,
208
self._revno_revid, self._merge_sort, self._where_merged,
209
) = whole_history_data
199
self._last_revid = self._branch.last_revision()
200
if self._last_revid is not None:
201
self._revision_graph = branch.repository.get_revision_graph(self._last_revid)
203
self._revision_graph = {}
206
name = self._branch.nick
208
self.log = logging.getLogger('loggerhead.%s' % (name,))
210
self._full_history = []
211
self._revision_info = {}
212
self._revno_revid = {}
213
self._merge_sort = bzrlib.tsort.merge_sort(self._revision_graph, self._last_revid, generate_revno=True)
214
for (seq, revid, merge_depth, revno, end_of_merge) in self._merge_sort:
215
self._full_history.append(revid)
216
revno_str = '.'.join(str(n) for n in revno)
217
self._revno_revid[revno_str] = revid
218
self._revision_info[revid] = (seq, revid, merge_depth, revno_str, end_of_merge)
221
self._where_merged = {}
222
for revid in self._revision_graph.keys():
223
if not revid in self._full_history:
225
for parent in self._revision_graph[revid]:
226
self._where_merged.setdefault(parent, set()).add(revid)
228
self.log.info('built revision graph cache: %r secs' % (time.time() - z,))
232
def from_folder(cls, path, name=None):
233
b = bzrlib.branch.Branch.open(path)
234
return cls.from_branch(b, name)
237
def out_of_date(self):
238
# the branch may have been upgraded on disk, in which case we're stale.
239
if self._branch.__class__ is not \
240
bzrlib.branch.Branch.open(self._branch.base).__class__:
242
return self._branch.last_revision() != self._last_revid
244
def use_cache(self, cache):
245
self._change_cache = cache
211
247
def use_file_cache(self, cache):
212
248
self._file_change_cache = cache
215
def has_revisions(self):
216
return not bzrlib.revision.is_null(self.last_revid)
250
def use_search_index(self, index):
255
# called when a new history object needs to be created, because the
256
# branch history has changed. we need to immediately close and stop
257
# using our caches, because a new history object will be created to
258
# replace us, using the same cache files.
259
# (may also be called during server shutdown.)
260
if self._change_cache is not None:
261
self._change_cache.close()
262
self._change_cache = None
263
if self._index is not None:
267
def flush_cache(self):
268
if self._change_cache is None:
270
self._change_cache.flush()
272
def check_rebuild(self):
273
if self._change_cache is not None:
274
self._change_cache.check_rebuild()
275
if self._index is not None:
276
self._index.check_rebuild()
278
last_revid = property(lambda self: self._last_revid, None, None)
218
281
def get_config(self):
219
282
return self._branch.get_config()
222
285
if revid not in self._revision_info:
225
(seq, revid, merge_depth,
226
revno_str, end_of_merge) = self._revision_info[revid]
288
seq, revid, merge_depth, revno_str, end_of_merge = self._revision_info[revid]
229
def get_revids_from(self, revid_list, start_revid):
231
Yield the mainline (wrt start_revid) revisions that merged each
234
if revid_list is None:
235
revid_list = self._full_history
236
revid_set = set(revid_list)
291
def get_revision_history(self):
292
return self._full_history
239
def introduced_revisions(revid):
241
seq, revid, md, revno, end_of_merge = self._revision_info[revid]
243
while i < len(self._merge_sort) and self._merge_sort[i][2] > md:
244
r.add(self._merge_sort[i][1])
248
if bzrlib.revision.is_null(revid):
250
if introduced_revisions(revid) & revid_set:
294
def get_revids_from(self, revid_list, revid):
296
given a list of revision ids, yield revisions in graph order,
297
starting from revid. the list can be None if you just want to travel
298
across all revisions.
301
if (revid_list is None) or (revid in revid_list):
303
if not self._revision_graph.has_key(revid):
252
305
parents = self._revision_graph[revid]
253
306
if len(parents) == 0:
255
308
revid = parents[0]
257
311
def get_short_revision_history_by_fileid(self, file_id):
312
# wow. is this really the only way we can get this list? by
313
# man-handling the weave store directly? :-0
258
314
# FIXME: would be awesome if we could get, for a folder, the list of
259
# revisions where items within that folder changed.i
261
# FIXME: Workaround for bzr versions prior to 1.6b3.
262
# Remove me eventually pretty please :)
263
w = self._branch.repository.weave_store.get_weave(
264
file_id, self._branch.repository.get_transaction())
265
w_revids = w.versions()
266
revids = [r for r in self._full_history if r in w_revids]
267
except AttributeError:
268
possible_keys = [(file_id, revid) for revid in self._full_history]
269
get_parent_map = self._branch.repository.texts.get_parent_map
270
# We chunk the requests as this works better with GraphIndex.
271
# See _filter_revisions_touching_file_id in bzrlib/log.py
272
# for more information.
275
for start in xrange(0, len(possible_keys), chunk_size):
276
next_keys = possible_keys[start:start + chunk_size]
277
revids += [k[1] for k in get_parent_map(next_keys)]
278
del possible_keys, next_keys
315
# revisions where items within that folder changed.
316
w = self._branch.repository.weave_store.get_weave(file_id, self._branch.repository.get_transaction())
317
w_revids = w.versions()
318
revids = [r for r in self._full_history if r in w_revids]
281
322
def get_revision_history_since(self, revid_list, date):
282
323
# if a user asks for revisions starting at 01-sep, they mean inclusive,
283
324
# so start at midnight on 02-sep.
284
325
date = date + datetime.timedelta(days=1)
285
# our revid list is sorted in REVERSE date order,
286
# so go thru some hoops here...
326
# our revid list is sorted in REVERSE date order, so go thru some hoops here...
287
327
revid_list.reverse()
288
index = bisect.bisect(_RevListToTimestamps(revid_list,
289
self._branch.repository),
328
index = bisect.bisect(_RevListToTimestamps(revid_list, self._branch.repository), date)
293
331
revid_list.reverse()
295
333
return revid_list[index:]
336
def get_revision_history_matching(self, revid_list, text):
337
self.log.debug('searching %d revisions for %r', len(revid_list), text)
339
# this is going to be painfully slow. :(
342
for revid in revid_list:
343
change = self.get_changes([ revid ])[0]
344
if text in change.comment.lower():
346
self.log.debug('searched %d revisions for %r in %r secs', len(revid_list), text, time.time() - z)
349
def get_revision_history_matching_indexed(self, revid_list, text):
350
self.log.debug('searching %d revisions for %r', len(revid_list), text)
352
if self._index is None:
353
return self.get_revision_history_matching(revid_list, text)
354
out = self._index.find(text, revid_list)
355
self.log.debug('searched %d revisions for %r in %r secs: %d results', len(revid_list), text, time.time() - z, len(out))
356
# put them in some coherent order :)
357
out = [r for r in self._full_history if r in out]
297
361
def get_search_revid_list(self, query, revid_list):
299
363
given a "quick-search" query, try a few obvious possible meanings:
301
365
- revision id or # ("128.1.3")
302
- date (US style "mm/dd/yy", earth style "dd-mm-yy", or \
303
iso style "yyyy-mm-dd")
366
- date (US style "mm/dd/yy", earth style "dd-mm-yy", or iso style "yyyy-mm-dd")
304
367
- comment text as a fallback
306
369
and return a revid list that matches.
309
372
# all the relevant changes (time-consuming) only to return a list of
310
373
# revids which will be used to fetch a set of changes again.
312
# if they entered a revid, just jump straight there;
313
# ignore the passed-in revid_list
375
# if they entered a revid, just jump straight there; ignore the passed-in revid_list
314
376
revid = self.fix_revid(query)
315
377
if revid is not None:
316
378
if isinstance(revid, unicode):
317
379
revid = revid.encode('utf-8')
318
changes = self.get_changes([revid])
380
changes = self.get_changes([ revid ])
319
381
if (changes is not None) and (len(changes) > 0):
323
385
m = self.us_date_re.match(query)
324
386
if m is not None:
325
date = datetime.datetime(util.fix_year(int(m.group(3))),
387
date = datetime.datetime(util.fix_year(int(m.group(3))), int(m.group(1)), int(m.group(2)))
329
389
m = self.earth_date_re.match(query)
330
390
if m is not None:
331
date = datetime.datetime(util.fix_year(int(m.group(3))),
391
date = datetime.datetime(util.fix_year(int(m.group(3))), int(m.group(2)), int(m.group(1)))
335
393
m = self.iso_date_re.match(query)
336
394
if m is not None:
337
date = datetime.datetime(util.fix_year(int(m.group(1))),
395
date = datetime.datetime(util.fix_year(int(m.group(1))), int(m.group(2)), int(m.group(3)))
340
396
if date is not None:
341
397
if revid_list is None:
342
# if no limit to the query was given,
343
# search only the direct-parent path.
344
revid_list = list(self.get_revids_from(None, self.last_revid))
398
# if no limit to the query was given, search only the direct-parent path.
399
revid_list = list(self.get_revids_from(None, self._last_revid))
345
400
return self.get_revision_history_since(revid_list, date)
402
# check comment fields.
403
if revid_list is None:
404
revid_list = self._full_history
405
return self.get_revision_history_matching_indexed(revid_list, query)
347
407
revno_re = re.compile(r'^[\d\.]+$')
348
408
# the date regex are without a final '$' so that queries like
349
409
# "2006-11-30 12:15" still mostly work. (i think it's better to give
384
442
revlist = list(self.get_revids_from(None, revid))
387
446
def get_view(self, revid, start_revid, file_id, query=None):
389
448
use the URL parameters (revid, start_revid, file_id, and query) to
390
449
determine the revision list we're viewing (start_revid, file_id, query)
391
450
and where we are in it (revid).
393
- if a query is given, we're viewing query results.
394
- if a file_id is given, we're viewing revisions for a specific
396
- if a start_revid is given, we're viewing the branch from a
397
specific revision up the tree.
399
these may be combined to view revisions for a specific file, from
400
a specific revision, with a specific search query.
402
returns a new (revid, start_revid, revid_list) where:
452
if a query is given, we're viewing query results.
453
if a file_id is given, we're viewing revisions for a specific file.
454
if a start_revid is given, we're viewing the branch from a
455
specific revision up the tree.
456
(these may be combined to view revisions for a specific file, from
457
a specific revision, with a specific search query.)
459
returns a new (revid, start_revid, revid_list, scan_list) where:
404
461
- revid: current position within the view
405
462
- start_revid: starting revision of this view
427
484
revid_list = self.get_file_view(start_revid, file_id)
429
486
revid_list = None
430
revid_list = search.search_revisions(self._branch, query)
431
if revid_list and len(revid_list) > 0:
488
revid_list = self.get_search_revid_list(query, revid_list)
489
if len(revid_list) > 0:
432
490
if revid not in revid_list:
433
491
revid = revid_list[0]
434
492
return revid, start_revid, revid_list
436
# XXX: This should return a message saying that the search could
437
# not be completed due to either missing the plugin or missing a
439
495
return None, None, []
441
498
def get_inventory(self, revid):
442
if revid not in self._inventory_cache:
443
self._inventory_cache[revid] = (
444
self._branch.repository.get_revision_inventory(revid))
445
return self._inventory_cache[revid]
499
return self._branch.repository.get_revision_inventory(revid)
447
502
def get_path(self, revid, file_id):
448
503
if (file_id is None) or (file_id == ''):
450
path = self.get_inventory(revid).id2path(file_id)
505
path = self._branch.repository.get_revision_inventory(revid).id2path(file_id)
451
506
if (len(path) > 0) and not path.startswith('/'):
452
507
path = '/' + path
455
511
def get_file_id(self, revid, path):
456
512
if (len(path) > 0) and not path.startswith('/'):
457
513
path = '/' + path
458
return self.get_inventory(revid).path2id(path)
514
return self._branch.repository.get_revision_inventory(revid).path2id(path)
460
517
def get_merge_point_list(self, revid):
495
552
revnol = revno.split(".")
496
553
revnos = ".".join(revnol[:-2])
497
554
revnolast = int(revnol[-1])
498
if revnos in d.keys():
555
if d.has_key(revnos):
500
557
if revnolast < m:
501
d[revnos] = (revnolast, revid)
558
d[revnos] = ( revnolast, revid )
503
d[revnos] = (revnolast, revid)
505
return [d[revnos][1] for revnos in d.keys()]
507
def add_branch_nicks(self, change):
560
d[revnos] = ( revnolast, revid )
562
return [ d[revnos][1] for revnos in d.keys() ]
564
def get_branch_nicks(self, changes):
509
given a 'change', fill in the branch nicks on all parents and merge
566
given a list of changes from L{get_changes}, fill in the branch nicks
567
on all parents and merge points.
512
569
fetch_set = set()
513
for p in change.parents:
514
fetch_set.add(p.revid)
515
for p in change.merge_points:
516
fetch_set.add(p.revid)
570
for change in changes:
571
for p in change.parents:
572
fetch_set.add(p.revid)
573
for p in change.merge_points:
574
fetch_set.add(p.revid)
517
575
p_changes = self.get_changes(list(fetch_set))
518
576
p_change_dict = dict([(c.revid, c) for c in p_changes])
519
for p in change.parents:
520
if p.revid in p_change_dict:
521
p.branch_nick = p_change_dict[p.revid].branch_nick
523
p.branch_nick = '(missing)'
524
for p in change.merge_points:
525
if p.revid in p_change_dict:
526
p.branch_nick = p_change_dict[p.revid].branch_nick
528
p.branch_nick = '(missing)'
577
for change in changes:
578
# arch-converted branches may not have merged branch info :(
579
for p in change.parents:
580
if p.revid in p_change_dict:
581
p.branch_nick = p_change_dict[p.revid].branch_nick
583
p.branch_nick = '(missing)'
584
for p in change.merge_points:
585
if p.revid in p_change_dict:
586
p.branch_nick = p_change_dict[p.revid].branch_nick
588
p.branch_nick = '(missing)'
530
591
def get_changes(self, revid_list):
531
"""Return a list of changes objects for the given revids.
533
Revisions not present and NULL_REVISION will be ignored.
535
changes = self.get_changes_uncached(revid_list)
592
if self._change_cache is None:
593
changes = self.get_changes_uncached(revid_list)
595
changes = self._change_cache.get_changes(revid_list)
536
596
if len(changes) == 0:
539
599
# some data needs to be recalculated each time, because it may
540
600
# change as new revisions are added.
541
601
for change in changes:
542
merge_revids = self.simplify_merge_point_list(
543
self.get_merge_point_list(change.revid))
544
change.merge_points = [
545
util.Container(revid=r,
546
revno=self.get_revno(r)) for r in merge_revids]
547
if len(change.parents) > 0:
548
change.parents = [util.Container(revid=r,
549
revno=self.get_revno(r)) for r in change.parents]
602
merge_revids = self.simplify_merge_point_list(self.get_merge_point_list(change.revid))
603
change.merge_points = [util.Container(revid=r, revno=self.get_revno(r)) for r in merge_revids]
550
604
change.revno = self.get_revno(change.revid)
559
def get_changes_uncached(self, revid_list):
560
# FIXME: deprecated method in getting a null revision
561
revid_list = filter(lambda revid: not bzrlib.revision.is_null(revid),
563
parent_map = self._branch.repository.get_graph().get_parent_map(
565
# We need to return the answer in the same order as the input,
567
present_revids = [revid for revid in revid_list
568
if revid in parent_map]
569
rev_list = self._branch.repository.get_revisions(present_revids)
571
return [self._change_from_revision(rev) for rev in rev_list]
573
def _change_from_revision(self, revision):
575
Given a bzrlib Revision, return a processed "change" for use in
613
# alright, let's profile this sucka.
614
def _get_changes_profiled(self, revid_list, get_diffs=False):
615
from loggerhead.lsprof import profile
617
ret, stats = profile(self.get_changes_uncached, revid_list, get_diffs)
620
cPickle.dump(stats, open('lsprof.stats', 'w'), 2)
621
self.log.info('lsprof complete!')
624
def _get_deltas_for_revisions_with_trees(self, entries):
625
"""Produce a generator of revision deltas.
627
Note that the input is a sequence of REVISIONS, not revision_ids.
628
Trees will be held in memory until the generator exits.
629
Each delta is relative to the revision's lefthand predecessor.
631
required_trees = set()
632
for entry in entries:
633
required_trees.add(entry.revid)
634
required_trees.update([p.revid for p in entry.parents[:1]])
635
trees = dict((t.get_revision_id(), t) for
636
t in self._branch.repository.revision_trees(required_trees))
638
self._branch.repository.lock_read()
640
for entry in entries:
641
if not entry.parents:
642
old_tree = self._branch.repository.revision_tree(
643
bzrlib.revision.NULL_REVISION)
645
old_tree = trees[entry.parents[0].revid]
646
tree = trees[entry.revid]
647
ret.append(tree.changes_from(old_tree))
650
self._branch.repository.unlock()
652
def entry_from_revision(self, revision):
578
653
commit_time = datetime.datetime.fromtimestamp(revision.timestamp)
580
parents = [util.Container(revid=r,
581
revno=self.get_revno(r)) for r in revision.parent_ids]
655
parents = [util.Container(revid=r, revno=self.get_revno(r)) for r in revision.parent_ids]
583
657
message, short_message = clean_message(revision.message)
586
authors = revision.get_apparent_authors()
587
except AttributeError:
588
authors = [revision.get_apparent_author()]
591
660
'revid': revision.revision_id,
592
661
'date': commit_time,
662
'author': revision.committer,
594
663
'branch_nick': revision.properties.get('branch-nick', None),
595
664
'short_comment': short_message,
596
665
'comment': revision.message,
597
666
'comment_clean': [util.html_clean(s) for s in message],
598
'parents': revision.parent_ids,
600
669
return util.Container(entry)
602
def get_file_changes_uncached(self, entry):
603
repo = self._branch.repository
605
old_revid = entry.parents[0].revid
607
old_revid = bzrlib.revision.NULL_REVISION
608
return self.file_changes_for_revision_ids(old_revid, entry.revid)
610
def get_file_changes(self, entry):
672
def get_changes_uncached(self, revid_list):
673
# Because we may loop and call get_revisions multiple times (to throw
674
# out dud revids), we grab a read lock.
675
self._branch.lock_read()
679
rev_list = self._branch.repository.get_revisions(revid_list)
680
except (KeyError, bzrlib.errors.NoSuchRevision), e:
681
# this sometimes happens with arch-converted branches.
682
# i don't know why. :(
683
self.log.debug('No such revision (skipping): %s', e)
684
revid_list.remove(e.revision)
688
return [self.entry_from_revision(rev) for rev in rev_list]
690
self._branch.unlock()
692
def get_file_changes_uncached(self, entries):
693
delta_list = self._get_deltas_for_revisions_with_trees(entries)
695
return [self.parse_delta(delta) for delta in delta_list]
698
def get_file_changes(self, entries):
611
699
if self._file_change_cache is None:
612
return self.get_file_changes_uncached(entry)
700
return self.get_file_changes_uncached(entries)
614
return self._file_change_cache.get_file_changes(entry)
616
def add_changes(self, entry):
617
changes = self.get_file_changes(entry)
618
entry.changes = changes
702
return self._file_change_cache.get_file_changes(entries)
704
def add_changes(self, entries):
705
changes_list = self.get_file_changes(entries)
707
for entry, changes in zip(entries, changes_list):
708
entry.changes = changes
711
def get_change_with_diff(self, revid, compare_revid=None):
712
entry = self.get_changes([revid])[0]
714
if compare_revid is None:
716
compare_revid = entry.parents[0].revid
718
compare_revid = 'null:'
720
rev_tree1 = self._branch.repository.revision_tree(compare_revid)
721
rev_tree2 = self._branch.repository.revision_tree(revid)
722
delta = rev_tree2.changes_from(rev_tree1)
724
entry.changes = self.parse_delta(delta)
726
entry.changes.modified = self._parse_diffs(rev_tree1, rev_tree2, delta)
620
731
def get_file(self, file_id, revid):
621
732
"returns (path, filename, data)"
622
733
inv = self.get_inventory(revid)
627
738
path = '/' + path
628
739
return path, inv_entry.name, rev_tree.get_file_text(file_id)
630
def file_changes_for_revision_ids(self, old_revid, new_revid):
741
def _parse_diffs(self, old_tree, new_tree, delta):
743
Return a list of processed diffs, in the format::
752
type: str('context', 'delete', or 'insert'),
761
for old_path, new_path, fid, kind, text_modified, meta_modified in delta.renamed:
763
process.append((old_path, new_path, fid, kind))
764
for path, fid, kind, text_modified, meta_modified in delta.modified:
765
process.append((path, path, fid, kind))
767
for old_path, new_path, fid, kind in process:
768
old_lines = old_tree.get_file_lines(fid)
769
new_lines = new_tree.get_file_lines(fid)
771
if old_lines != new_lines:
773
bzrlib.diff.internal_diff(old_path, old_lines,
774
new_path, new_lines, buffer)
775
except bzrlib.errors.BinaryFile:
778
diff = buffer.getvalue()
781
out.append(util.Container(filename=rich_filename(new_path, kind), file_id=fid, chunks=self._process_diff(diff)))
785
def _process_diff(self, diff):
786
# doesn't really need to be a method; could be static.
789
for line in diff.splitlines():
792
if line.startswith('+++ ') or line.startswith('--- '):
794
if line.startswith('@@ '):
796
if chunk is not None:
798
chunk = util.Container()
800
lines = [int(x.split(',')[0][1:]) for x in line.split(' ')[1:3]]
801
old_lineno = lines[0]
802
new_lineno = lines[1]
803
elif line.startswith(' '):
804
chunk.diff.append(util.Container(old_lineno=old_lineno, new_lineno=new_lineno,
805
type='context', line=util.fixed_width(line[1:])))
808
elif line.startswith('+'):
809
chunk.diff.append(util.Container(old_lineno=None, new_lineno=new_lineno,
810
type='insert', line=util.fixed_width(line[1:])))
812
elif line.startswith('-'):
813
chunk.diff.append(util.Container(old_lineno=old_lineno, new_lineno=None,
814
type='delete', line=util.fixed_width(line[1:])))
817
chunk.diff.append(util.Container(old_lineno=None, new_lineno=None,
818
type='unknown', line=util.fixed_width(repr(line))))
819
if chunk is not None:
823
def parse_delta(self, delta):
632
825
Return a nested data structure containing the changes in a delta::
641
text_changes: list((filename, file_id)),
643
repo = self._branch.repository
644
if bzrlib.revision.is_null(old_revid) or \
645
bzrlib.revision.is_null(new_revid):
646
old_tree, new_tree = map(
647
repo.revision_tree, [old_revid, new_revid])
649
old_tree, new_tree = repo.revision_trees([old_revid, new_revid])
651
reporter = FileChangeReporter(old_tree.inventory, new_tree.inventory)
653
bzrlib.delta.report_changes(new_tree.iter_changes(old_tree), reporter)
655
return util.Container(
656
added=sorted(reporter.added, key=lambda x:x.filename),
657
renamed=sorted(reporter.renamed, key=lambda x:x.new_filename),
658
removed=sorted(reporter.removed, key=lambda x:x.filename),
659
modified=sorted(reporter.modified, key=lambda x:x.filename),
660
text_changes=sorted(reporter.text_changes, key=lambda x:x.filename))
840
for path, fid, kind in delta.added:
841
added.append((rich_filename(path, kind), fid))
843
for path, fid, kind, text_modified, meta_modified in delta.modified:
844
modified.append(util.Container(filename=rich_filename(path, kind), file_id=fid))
846
for old_path, new_path, fid, kind, text_modified, meta_modified in delta.renamed:
847
renamed.append((rich_filename(old_path, kind), rich_filename(new_path, kind), fid))
848
if meta_modified or text_modified:
849
modified.append(util.Container(filename=rich_filename(new_path, kind), file_id=fid))
851
for path, fid, kind in delta.removed:
852
removed.append((rich_filename(path, kind), fid))
854
return util.Container(added=added, renamed=renamed, removed=removed, modified=modified)
857
def add_side_by_side(changes):
858
# FIXME: this is a rotten API.
859
for change in changes:
860
for m in change.changes.modified:
861
m.sbs_chunks = _make_side_by_side(m.chunks)
864
def get_filelist(self, inv, file_id, sort_type=None):
866
return the list of all files (and their attributes) within a given
870
dir_ie = inv[file_id]
871
path = inv.id2path(file_id)
876
for filename, entry in dir_ie.children.iteritems():
877
revid_set.add(entry.revision)
880
for change in self.get_changes(list(revid_set)):
881
change_dict[change.revid] = change
883
for filename, entry in dir_ie.children.iteritems():
885
if entry.kind == 'directory':
888
revid = entry.revision
890
file = util.Container(
891
filename=filename, executable=entry.executable, kind=entry.kind,
892
pathname=pathname, file_id=entry.file_id, size=entry.text_size,
893
revid=revid, change=change_dict[revid])
894
file_list.append(file)
896
if sort_type == 'filename' or sort_type is None:
897
file_list.sort(key=lambda x: x.filename)
898
elif sort_type == 'size':
899
file_list.sort(key=lambda x: x.size)
900
elif sort_type == 'date':
901
file_list.sort(key=lambda x: x.change.date)
904
for file in file_list:
911
_BADCHARS_RE = re.compile(ur'[\x00-\x08\x0b\x0e-\x1f]')
914
def annotate_file(self, file_id, revid):
919
file_revid = self.get_inventory(revid)[file_id].revision
922
# because we cache revision metadata ourselves, it's actually much
923
# faster to call 'annotate_iter' on the weave directly than it is to
924
# ask bzrlib to annotate for us.
925
w = self._branch.repository.weave_store.get_weave(file_id, self._branch.repository.get_transaction())
928
for line_revid, text in w.annotate_iter(file_revid):
929
revid_set.add(line_revid)
930
if self._BADCHARS_RE.match(text):
931
# bail out; this isn't displayable text
932
yield util.Container(parity=0, lineno=1, status='same',
933
text='(This is a binary file.)',
934
change=util.Container())
936
change_cache = dict([(c.revid, c) for c in self.get_changes(list(revid_set))])
938
last_line_revid = None
939
for line_revid, text in w.annotate_iter(file_revid):
940
if line_revid == last_line_revid:
941
# remember which lines have a new revno and which don't
946
last_line_revid = line_revid
947
change = change_cache[line_revid]
948
trunc_revno = change.revno
949
if len(trunc_revno) > 10:
950
trunc_revno = trunc_revno[:9] + '...'
952
yield util.Container(parity=parity, lineno=lineno, status=status,
953
change=change, text=util.fixed_width(text))
956
self.log.debug('annotate: %r secs' % (time.time() - z,))
959
def get_bundle(self, revid, compare_revid=None):
960
if compare_revid is None:
961
parents = self._revision_graph[revid]
963
compare_revid = parents[0]
967
bzrlib.bundle.serializer.write_bundle(self._branch.repository, revid, compare_revid, s)