server/search: cache by query rather than its text

This commit is contained in:
rr- 2016-06-03 13:50:38 +02:00
parent 59ad5fe402
commit f0d3589344
5 changed files with 116 additions and 3 deletions

View file

@ -11,12 +11,21 @@ class RangedCriterion(_BaseCriterion):
self.min_value = min_value
self.max_value = max_value
def __hash__(self):
return hash(('range', self.min_value, self.max_value))
class PlainCriterion(_BaseCriterion):
def __init__(self, original_text, value):
super().__init__(original_text)
self.value = value
def __hash__(self):
return hash(self.value)
class ArrayCriterion(_BaseCriterion):
def __init__(self, original_text, values):
super().__init__(original_text)
self.values = values
def __hash__(self):
return hash(tuple(['array'] + self.values))

View file

@ -32,12 +32,13 @@ class Executor(object):
Parse input and return tuple containing total record count and filtered
entities.
'''
key = (id(self.config), query_text, page, page_size)
if cache.has(key):
return cache.get(key)
search_query = self.parser.parse(query_text)
key = (id(self.config), hash(search_query), page, page_size)
if cache.has(key):
return cache.get(key)
filter_query = self.config.create_filter_query()
filter_query = filter_query.options(sqlalchemy.orm.lazyload('*'))
filter_query = self._prepare_db_query(filter_query, search_query, True)

View file

@ -64,6 +64,13 @@ class SearchQuery():
self.special_tokens = []
self.sort_tokens = []
def __hash__(self):
return hash((
tuple(self.anonymous_tokens),
tuple(self.named_tokens),
tuple(self.special_tokens),
tuple(self.sort_tokens)))
class Parser(object):
def parse(self, query_text):
query = SearchQuery()

View file

@ -3,11 +3,17 @@ class AnonymousToken(object):
self.criterion = criterion
self.negated = negated
def __hash__(self):
return hash((self.criterion, self.negated))
class NamedToken(AnonymousToken):
def __init__(self, name, criterion, negated):
super().__init__(criterion, negated)
self.name = name
def __hash__(self):
return hash((self.name, self.criterion, self.negated))
class SortToken(object):
SORT_DESC = 'desc'
SORT_ASC = 'asc'
@ -18,7 +24,13 @@ class SortToken(object):
self.name = name
self.direction = direction
def __hash__(self):
return hash((self.name, self.direction))
class SpecialToken(object):
def __init__(self, value, negated):
self.value = value
self.negated = negated
def __hash__(self):
return hash((self.value, self.negated))

View file

@ -0,0 +1,84 @@
import unittest.mock
from szurubooru import search
from szurubooru.func import cache
def test_retrieving_from_cache(user_factory):
config = unittest.mock.MagicMock()
with unittest.mock.patch('szurubooru.func.cache.has'), \
unittest.mock.patch('szurubooru.func.cache.get'):
cache.has.side_effect = lambda *args: True
executor = search.Executor(config)
executor.execute('test:whatever', 1, 10)
assert cache.get.called
def test_putting_equivalent_queries_into_cache(user_factory):
config = search.configs.PostSearchConfig()
with unittest.mock.patch('szurubooru.func.cache.has'), \
unittest.mock.patch('szurubooru.func.cache.put'):
hashes = []
def appender(key, value):
hashes.append(key)
cache.has.side_effect = lambda *args: False
cache.put.side_effect = appender
executor = search.Executor(config)
executor.execute('safety:safe test', 1, 10)
executor.execute('safety:safe test', 1, 10)
executor.execute('safety:safe test ', 1, 10)
executor.execute(' safety:safe test', 1, 10)
executor.execute(' SAFETY:safe test', 1, 10)
executor.execute('test safety:safe', 1, 10)
assert len(hashes) == 6
assert len(set(hashes)) == 1
def test_putting_non_equivalent_queries_into_cache(user_factory):
config = search.configs.PostSearchConfig()
with unittest.mock.patch('szurubooru.func.cache.has'), \
unittest.mock.patch('szurubooru.func.cache.put'):
hashes = []
def appender(key, value):
hashes.append(key)
cache.has.side_effect = lambda *args: False
cache.put.side_effect = appender
executor = search.Executor(config)
args = [
('', 1, 10),
('creation-time:2016', 1, 10),
('creation-time:2015', 1, 10),
('creation-time:2016-01', 1, 10),
('creation-time:2016-02', 1, 10),
('creation-time:2016-01-01', 1, 10),
('creation-time:2016-01-02', 1, 10),
('tag-count:1,3', 1, 10),
('tag-count:1,2', 1, 10),
('tag-count:1', 1, 10),
('tag-count:1..3', 1, 10),
('tag-count:1..4', 1, 10),
('tag-count:2..3', 1, 10),
('tag-count:1..', 1, 10),
('tag-count:2..', 1, 10),
('tag-count:..3', 1, 10),
('tag-count:..4', 1, 10),
('-tag-count:1..3', 1, 10),
('-tag-count:1..4', 1, 10),
('-tag-count:2..3', 1, 10),
('-tag-count:1..', 1, 10),
('-tag-count:2..', 1, 10),
('-tag-count:..3', 1, 10),
('-tag-count:..4', 1, 10),
('safety:safe', 1, 10),
('safety:safe', 1, 20),
('safety:safe', 2, 10),
('safety:sketchy', 1, 10),
('safety:safe test', 1, 10),
('-safety:safe', 1, 10),
('-safety:safe', 1, 20),
('-safety:safe', 2, 10),
('-safety:sketchy', 1, 10),
('-safety:safe test', 1, 10),
('safety:safe -test', 1, 10),
('-test', 1, 10),
]
for arg in args:
executor.execute(*arg)
assert len(hashes) == len(args)
assert len(set(hashes)) == len(args)