25
25
cached a change, it's good forever.
31
34
from loggerhead import util
35
from loggerhead.util import decorator
32
36
from loggerhead.lockfile import LockFile
34
39
with_lock = util.with_lock('_lock', 'ChangeCache')
36
SQLITE_INTERFACE = os.environ.get('SQLITE_INTERFACE', 'sqlite3')
38
if SQLITE_INTERFACE == 'sqlite3':
39
from sqlite3 import dbapi2
42
_select_stmt = ("select data from revisiondata where revid = ?"
43
).replace('?', _param_marker)
44
_insert_stmt = ("insert into revisiondata (revid, data) "
45
"values (?, ?)").replace('?', _param_marker)
50
class FakeShelf(object):
51
def __init__(self, filename):
52
create_table = not os.path.exists(filename)
53
self.connection = dbapi2.connect(filename)
54
self.cursor = self.connection.cursor()
57
def _create_table(self):
59
"create table RevisionData "
60
"(revid binary primary key, data binary)")
61
self.connection.commit()
62
def _serialize(self, obj):
63
r = dbapi2.Binary(cPickle.dumps(obj, protocol=2))
65
def _unserialize(self, data):
66
return cPickle.loads(str(data))
68
self.cursor.execute(_select_stmt, (revid,))
69
filechange = self.cursor.fetchone()
70
if filechange is None:
73
return self._unserialize(filechange[0])
74
def add(self, revid_obj_pairs):
75
for (r, d) in revid_obj_pairs:
76
self.cursor.execute(_insert_stmt, (r, self._serialize(d)))
77
self.connection.commit()
80
class FileChangeCache(object):
42
class ChangeCache (object):
81
44
def __init__(self, history, cache_path):
82
45
self.history = history
46
self.log = history.log
84
48
if not os.path.exists(cache_path):
85
49
os.mkdir(cache_path)
87
self._changes_filename = os.path.join(cache_path, 'filechanges.sql')
89
# use a lockfile since the cache folder could be shared across
90
# different processes.
91
self._lock = LockFile(os.path.join(cache_path, 'filechange-lock'))
94
def get_file_changes(self, entries):
97
missing_entry_indices = []
98
cache = FakeShelf(self._changes_filename)
100
changes = cache.get(entry.revid)
101
if changes is not None:
104
missing_entries.append(entry)
105
missing_entry_indices.append(len(out))
108
missing_changes = self.history.get_file_changes_uncached(missing_entries)
109
revid_changes_pairs = []
110
for i, entry, changes in zip(
111
missing_entry_indices, missing_entries, missing_changes):
112
revid_changes_pairs.append((entry.revid, changes))
114
cache.add(revid_changes_pairs)
51
# keep a separate cache for the diffs, because they're very time-consuming to fetch.
52
self._changes_filename = os.path.join(cache_path, 'changes')
53
self._changes_diffs_filename = os.path.join(cache_path, 'changes-diffs')
55
# use a lockfile since the cache folder could be shared across different processes.
56
self._lock = LockFile(os.path.join(cache_path, 'lock'))
59
# this is fluff; don't slow down startup time with it.
62
self.log.info('Using change cache %s; %d/%d entries.' % (cache_path, s1, s2))
63
threading.Thread(target=log_sizes).start()
67
self.log.debug('Closing cache file.')
79
def get_changes(self, revid_list, get_diffs=False):
81
get a list of changes by their revision_ids. any changes missing
82
from the cache are fetched by calling L{History.get_change_uncached}
83
and inserted into the cache before returning.
86
cache = shelve.open(self._changes_diffs_filename, 'c', protocol=2)
88
cache = shelve.open(self._changes_filename, 'c', protocol=2)
94
for revid in revid_list:
95
# if the revid is in unicode, use the utf-8 encoding as the key
96
srevid = util.to_utf8(revid)
99
out.append(cache[srevid])
101
#self.log.debug('Entry cache miss: %r' % (revid,))
103
fetch_list.append(revid)
104
sfetch_list.append(srevid)
106
if len(fetch_list) > 0:
107
# some revisions weren't in the cache; fetch them
108
changes = self.history.get_changes_uncached(fetch_list, get_diffs)
111
for i in xrange(len(revid_list)):
113
cache[sfetch_list.pop(0)] = out[i] = changes.pop(0)
119
def full(self, get_diffs=False):
121
cache = shelve.open(self._changes_diffs_filename, 'c', protocol=2)
123
cache = shelve.open(self._changes_filename, 'c', protocol=2)
125
return (len(cache) >= len(self.history.get_revision_history())) and (util.to_utf8(self.history.last_revid) in cache)
131
cache = shelve.open(self._changes_filename, 'c', protocol=2)
134
cache = shelve.open(self._changes_diffs_filename, 'c', protocol=2)
139
def check_rebuild(self, max_time=3600):
141
check if we need to fill in any missing pieces of the cache. pull in
142
any missing changes, but don't work any longer than C{max_time}
145
if self.closed() or self.full():
148
self.log.info('Building revision cache...')
149
start_time = time.time()
150
last_update = time.time()
153
work = list(self.history.get_revision_history())
155
for i in xrange(0, len(work), jump):
157
# must call into history so we grab the branch lock (otherwise, lock inversion)
158
self.history.get_changes(r)
164
if now - start_time > max_time:
165
self.log.info('Cache rebuilding will pause for now.')
168
if now - last_update > 60:
169
self.log.info('Revision cache rebuilding continues: %d/%d' % (min(count, len(work)), len(work)))
170
last_update = time.time()
172
# give someone else a chance at the lock
174
self.log.info('Revision cache rebuild completed.')