Skip to content

Commit

Permalink
Browse files Browse the repository at this point in the history
  • Loading branch information
joocer committed May 18, 2024
1 parent 228de42 commit d49c16c
Show file tree
Hide file tree
Showing 5 changed files with 18 additions and 9 deletions.
8 changes: 4 additions & 4 deletions opteryx/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,8 @@

_config_values: dict = {}


OPTERYX_DEBUG = environ.get("OPTERYX_DEBUG") is not None
# we need a preliminary version of this variable
_OPTERYX_DEBUG = environ.get("OPTERYX_DEBUG") is not None


def memory_allocation_calculation(allocation) -> int:
Expand Down Expand Up @@ -109,10 +109,10 @@ def line_value(value):
if _config_path.exists():
with open(_config_path, "r") as _config_file:
_config_values = parse_yaml(_config_file.read())
if OPTERYX_DEBUG:
if _OPTERYX_DEBUG:
print(f"{datetime.datetime.now()} [LOADER] Loading config from {_config_path}")
except Exception as exception: # pragma: no cover # it doesn't matter why - just use the defaults
if OPTERYX_DEBUG:
if _OPTERYX_DEBUG:
print(
f"{datetime.datetime.now()} [LOADER] Config file {_config_path} not used - {exception}"
)
Expand Down
7 changes: 7 additions & 0 deletions opteryx/managers/cache/memcached.py
Original file line number Diff line number Diff line change
Expand Up @@ -90,6 +90,7 @@ def __init__(self, **kwargs):
self.misses: int = 0
self.skips: int = 0
self.errors: int = 0
self.sets: int = 0

def get(self, key: bytes) -> Union[bytes, None]:
if self._consecutive_failures >= MAXIMUM_CONSECUTIVE_FAILURES:
Expand Down Expand Up @@ -119,6 +120,12 @@ def set(self, key: bytes, value: bytes) -> None:
if self._consecutive_failures < MAXIMUM_CONSECUTIVE_FAILURES:
try:
self._server.set(key, value)
self.sets += 1
except:
# if we fail to set, stop trying
self._consecutive_failures = MAXIMUM_CONSECUTIVE_FAILURES
self.errors += 1

def __del__(self):
pass
# DEBUG: log(f"Memcached <hits={self.hits} misses={self.misses} sets={self.sets} skips={self.skips} errors={self.errors}>")
4 changes: 2 additions & 2 deletions opteryx/operators/base_plan_node.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,9 +63,9 @@ def __init__(self, properties: QueryProperties, **parameters):
self.statistics = QueryStatistics(properties.qid)
self.execution_time = 0
self.identity = random_string()
self.do = None
self.do: Optional[BasePlanDataObject] = None

def to_json(self) -> dict: # pragma: no cover
def to_json(self) -> bytes: # pragma: no cover

import orjson

Expand Down
5 changes: 3 additions & 2 deletions opteryx/operators/heap_sort_node.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,8 +25,9 @@
"""
import time
from dataclasses import dataclass
from typing import Iterable
from typing import Generator

import pyarrow
from pyarrow import concat_tables

from opteryx.exceptions import ColumnNotFoundError
Expand Down Expand Up @@ -67,7 +68,7 @@ def config(self): # pragma: no cover
def name(self): # pragma: no cover
return "Heap Sort"

def execute(self) -> Iterable:
def execute(self) -> Generator[pyarrow.Table, None, None]: # pragma: no cover
table = None
morsels = self._producers[0] # type:ignore

Expand Down
3 changes: 2 additions & 1 deletion tests/storage/test_cache_memcached.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@ def test_memcached_cache():
from opteryx.managers.cache import MemcachedCache

cache = MemcachedCache()
cache._server.flush_all()
opteryx.set_cache_manager(CacheManager(cache_backend=cache))

# read the data five times, this should populate the cache if it hasn't already
Expand All @@ -44,7 +45,7 @@ def test_memcached_cache():
), f"hits: {cache.hits}, misses: {cache.misses}, skips: {cache.skips}, errors: {cache.errors}"

assert stats["remote_cache_hits"] >= stats["blobs_read"], stats
# assert stats.get("cache_misses", 0) == 0, stats
assert stats.get("cache_misses", 0) == 0, stats


if __name__ == "__main__": # pragma: no cover
Expand Down

0 comments on commit d49c16c

Please sign in to comment.