2
# Copyright (C) 2006 Robey Pointer <robey@lag.net>
4
# This program is free software; you can redistribute it and/or modify
5
# it under the terms of the GNU General Public License as published by
6
# the Free Software Foundation; either version 2 of the License, or
7
# (at your option) any later version.
9
# This program is distributed in the hope that it will be useful,
10
# but WITHOUT ANY WARRANTY; without even the implied warranty of
11
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12
# GNU General Public License for more details.
14
# You should have received a copy of the GNU General Public License
15
# along with this program; if not, write to the Free Software
16
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
20
indexing of the comment text of revisions, for fast searching.
22
two separate database files are created:
24
- recorded: revid -> 1 (if the revid is indexed)
25
- index: 3-letter substring -> list(revids)
32
from loggerhead import util
33
from loggerhead.lockfile import LockFile
34
from loggerhead.changecache import FakeShelf
36
# if any substring index reaches this many revids, replace the entry with
37
# an ALL marker -- it's not worth an explicit index.
42
with_lock = util.with_lock('_lock')
45
def normalize_string(s):
47
remove any punctuation and normalize all whitespace to a single space.
49
s = util.to_utf8(s).lower()
50
# remove apostrophes completely.
51
s = re.sub(r"'", '', s)
52
# convert other garbage into space
53
s = re.sub(r'[^\w\d]', ' ', s)
54
# compress multiple spaces into one.
55
s = re.sub(r'\s{2,}', ' ', s)
56
# and finally remove leading/trailing whitespace
61
class TextIndex (object):
62
def __init__(self, history, cache_path):
63
self.history = history
64
self.log = history.log
66
if not os.path.exists(cache_path):
69
self._recorded_filename = os.path.join(cache_path, 'textindex-recorded.sql')
70
self._index_filename = os.path.join(cache_path, 'textindex.sql')
72
# use a lockfile since the cache folder could be shared across different processes.
73
self._lock = LockFile(os.path.join(cache_path, 'index-lock'))
76
self.log.info('Using search index; %d entries.', len(self))
79
return FakeShelf(self._index_filename)
82
return FakeShelf(self._recorded_filename)
84
def _is_indexed(self, revid, recorded):
85
return recorded.get(util.to_utf8(revid)) is not None
88
def is_indexed(self, revid):
89
recorded = self._recorded()
91
return self._is_indexed(revid, recorded)
97
recorded = self._recorded()
99
return recorded.count()
117
recorded = self._recorded()
118
last_revid = util.to_utf8(self.history.last_revid)
120
return (recorded.count() >= len(self.history.get_revision_history())
121
and recorded.get(last_revid) is not None)
125
def _index_change(self, change, recorded, index):
127
currently, only indexes the 'comment' field.
129
comment = normalize_string(change.comment)
132
for i in xrange(len(comment) - 2):
133
sub = comment[i:i + 3]
134
orig = revid_set = index.get(sub)
135
if revid_set is None:
137
elif revid_set == ALL:
138
# this entry got too big
140
revid_set.add(change.revid)
141
if len(revid_set) > ALL_THRESHOLD:
144
index.update([(sub, revid_set)], commit=False)
146
index.add([(sub, revid_set)], commit=False)
148
recorded.add([(util.to_utf8(change.revid), True)], commit=False)
151
def index_changes(self, revid_list):
152
recorded = self._recorded()
153
index = self._index()
155
revid_list = [r for r in revid_list if not self._is_indexed(r, recorded)]
156
change_list = self.history.get_changes(revid_list)
157
for change in change_list:
158
self._index_change(change, recorded, index)
160
index.close(commit=True)
161
recorded.close(commit=True)
164
def find(self, text, revid_list=None):
165
index = self._index()
167
text = normalize_string(text)
172
if revid_list is not None:
173
total_set = set(revid_list)
176
for i in xrange(len(text) - 2):
178
revid_set = index.get(sub)
179
if revid_set is None:
180
# zero matches, stop here.
186
if total_set is None:
187
total_set = revid_set
189
total_set.intersection_update(revid_set)
190
if len(total_set) == 0:
195
# tricky: if seen_all is True, one of the substring indices was ALL
196
# (in other words, unindexed), so our results are actually a superset
197
# of the exact answer.
199
# if we cared, we could do a direct match on the result set and cull
200
# out any that aren't actually matches. for now, i'm gonna say that
201
# we DON'T care, and if one of the substrings hit ALL, there's a small
202
# chance that we'll give a few false positives.
205
def check_rebuild(self, max_time=3600):
207
check if there are any un-indexed revisions, and if so, index them.
208
but don't spend longer than C{max_time} on it.
210
if self.closed() or self.full():
214
self.log.info('Building search index...')
215
work = list(self.history.get_revision_history())
216
start_time = time.time()
217
last_update = time.time()
221
for i in xrange(0, len(work), jump):
223
self.index_changes(r)
229
if now - start_time > 3600:
230
# there's no point working for hours. eventually we might even
231
# hit the next re-index interval, which would suck mightily.
232
self.log.info('Search indexing has worked for an hour; giving up for now.')
234
if now - last_update > 60:
235
self.log.info('Search indexing continues: %d/%d' % (min(count, len(work)), len(work)))
236
last_update = time.time()
237
# give someone else a chance at the lock
239
self.log.info('Search index completed.')