38
40
# We take an optimistic approach to concurrency here: we might do work twice
39
41
# in the case of races, but not crash or corrupt data.
43
def safe_init_db(filename, init_sql):
44
# To avoid races around creating the database, we create the db in
45
# a temporary file and rename it into the ultimate location.
46
fd, temp_path = tempfile.mkstemp(dir=os.path.dirname(filename))
47
con = dbapi2.connect(temp_path)
52
os.rename(temp_path, filename)
41
54
class FakeShelf(object):
43
56
def __init__(self, filename):
44
57
create_table = not os.path.exists(filename)
46
# To avoid races around creating the database, we create the db in
47
# a temporary file and rename it into the ultimate location.
48
fd, path = tempfile.mkstemp(dir=os.path.dirname(filename))
49
self._create_table(path)
50
os.rename(path, filename)
60
filename, "create table RevisionData "
61
"(revid binary primary key, data binary)")
51
62
self.connection = dbapi2.connect(filename)
52
63
self.cursor = self.connection.cursor()
90
101
class FileChangeCache(object):
92
def __init__(self, history, cache_path):
93
self.history = history
103
def __init__(self, cache_path):
95
105
if not os.path.exists(cache_path):
96
106
os.mkdir(cache_path)
104
114
changes = self.history.get_file_changes_uncached(entry)
105
115
cache.add(entry.revid, changes)
119
class RevInfoDiskCache(object):
120
"""Like `RevInfoMemoryCache` but backed in a sqlite DB."""
122
def __init__(self, cache_path):
123
if not os.path.exists(cache_path):
125
filename = os.path.join(cache_path, 'revinfo.sql')
126
create_table = not os.path.exists(filename)
129
filename, "create table Data "
130
"(key binary primary key, revid binary, data binary)")
131
self.connection = dbapi2.connect(filename)
132
self.cursor = self.connection.cursor()
134
def get(self, key, revid):
136
"select revid, data from data where key = ?", (dbapi2.Binary(key),))
137
row = self.cursor.fetchone()
140
elif str(row[0]) != revid:
143
return marshal.loads(zlib.decompress(row[1]))
145
def set(self, key, revid, data):
148
'delete from data where key = ?', (dbapi2.Binary(key), ))
149
blob = zlib.compress(marshal.dumps(data))
151
"insert into data (key, revid, data) values (?, ?, ?)",
152
map(dbapi2.Binary, [key, revid, blob]))
153
self.connection.commit()
154
except dbapi2.IntegrityError:
155
# If another thread or process attempted to set the same key, we
156
# don't care too much -- it's only a cache after all!