Skip to content

Commit

Permalink
Merge pull request #86 from evo-company/refactor-cache-settings
Browse files Browse the repository at this point in the history
refactor cache-Settings
  • Loading branch information
kindermax committed Oct 4, 2022
2 parents 73f2c85 + 0d08402 commit 123d97f
Show file tree
Hide file tree
Showing 3 changed files with 21 additions and 18 deletions.
13 changes: 6 additions & 7 deletions hiku/cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
defaultdict,
deque,
)
from dataclasses import dataclass
from typing import (
TYPE_CHECKING,
Any,
Expand Down Expand Up @@ -56,17 +57,15 @@ def set_many(self, items: Dict[str, Any], ttl: int) -> None:
raise NotImplementedError()


@dataclass
class CacheSettings:
def __init__(
self,
cache: BaseCache,
cache_key: Optional[CacheKeyFn] = None
):
self.cache = cache
self.cache_key = cache_key
cache: BaseCache
cache_key: Optional[CacheKeyFn] = None


class CacheInfo:
__slots__ = ('cache', 'cache_key')

def __init__(
self,
cache_settings: CacheSettings
Expand Down
7 changes: 4 additions & 3 deletions hiku/engine.py
Original file line number Diff line number Diff line change
Expand Up @@ -775,10 +775,10 @@ class Engine:
def __init__(
self,
executor: SyncAsyncExecutor,
cache: CacheSettings = None,
cache: Optional[CacheSettings] = None,
) -> None:
self.executor = executor
self.cache = CacheInfo(cache) if cache else None
self.cache_settings = cache

def execute(
self,
Expand All @@ -791,8 +791,9 @@ def execute(
query = InitOptions(graph).visit(query)
queue = Queue(self.executor)
task_set = queue.fork(None)
cache = CacheInfo(self.cache_settings) if self.cache_settings else None
query_workflow = Query(
queue, task_set, graph, query, Context(ctx), self.cache
queue, task_set, graph, query, Context(ctx), cache
)
query_workflow.start()
return self.executor.process(queue, query_workflow)
19 changes: 11 additions & 8 deletions tests/test_cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,7 @@
from hiku.cache import (
BaseCache,
CacheSettings,
CacheInfo,
)


Expand Down Expand Up @@ -499,7 +500,9 @@ def test_cached_link_one__sqlalchemy(sync_graph_sqlalchemy):

cache = InMemoryCache()
cache = Mock(wraps=cache)
engine = Engine(ThreadsExecutor(thread_pool), CacheSettings(cache))
cache_settings = CacheSettings(cache)
cache_info = CacheInfo(cache_settings)
engine = Engine(ThreadsExecutor(thread_pool), cache_settings)
ctx = {
SA_ENGINE_KEY: sa_engine,
'locale': 'en'
Expand All @@ -520,8 +523,8 @@ def execute(q):
.node.fields_map['photo']
)

company_key = engine.cache.query_hash(ctx, company_link, 10)
attributes_key = engine.cache.query_hash(ctx, attributes_link, [11, 12])
company_key = cache_info.query_hash(ctx, company_link, 10)
attributes_key = cache_info.query_hash(ctx, attributes_link, [11, 12])

company_cache = {
'User': {
Expand Down Expand Up @@ -627,6 +630,7 @@ def cache_key(ctx, hasher):
hasher.update(ctx['locale'].encode('utf-8'))

cache_settings = CacheSettings(cache, cache_key)
cache_info = CacheInfo(cache_settings)
engine = Engine(ThreadsExecutor(thread_pool), cache_settings)
ctx = {
SA_ENGINE_KEY: sa_engine,
Expand All @@ -649,11 +653,10 @@ def execute(q):
.node.fields_map['photo']
)

company10_key = engine.cache.query_hash(ctx, company_link, 10)
company20_key = engine.cache.query_hash(ctx, company_link, 20)
attributes11_12_key = engine.cache.query_hash(
ctx, attributes_link, [11, 12])
attributes_none_key = engine.cache.query_hash(ctx, attributes_link, [])
company10_key = cache_info.query_hash(ctx, company_link, 10)
company20_key = cache_info.query_hash(ctx, company_link, 20)
attributes11_12_key = cache_info.query_hash(ctx, attributes_link, [11, 12])
attributes_none_key = cache_info.query_hash(ctx, attributes_link, [])

company10_cache = {
'User': {
Expand Down

0 comments on commit 123d97f

Please sign in to comment.