Skip to content

Commit

Permalink
Added support for dry run in test_all_models function and updated tok… (
Browse files Browse the repository at this point in the history
#230)

* Added support for dry run in test_all_models function and updated token data for BSC and Polygon networks.

* simplify cache

* cache update 2

* fix: print as double quote

* update adt for some

* update history
  • Loading branch information
leafyoung committed May 31, 2023
1 parent 82de6b8 commit 4137c28
Show file tree
Hide file tree
Showing 8 changed files with 150 additions and 85 deletions.
5 changes: 5 additions & 0 deletions HISTORY.rst
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,11 @@
History
-------
0.8.113 [2023-05-29]
* feat: support for dry run and test model selection in test_all_models function
* feat: updated token data for BSC and Polygon networks
* chore: clean up cache object

0.8.112 [2023-05-26]
* feat: enable ledger for BSC

Expand Down
48 changes: 44 additions & 4 deletions credmark/cmf/credmark_dev.py
Original file line number Diff line number Diff line change
Expand Up @@ -182,6 +182,7 @@ def main(): # pylint: disable=too-many-statements
help='comma-separated list of prefixes to match slug to run')
parser_test_all.add_argument('-s', '--skip-test', required=False, default=None,
help='comma-separated list of prefixes to match slug to skip')
parser_test_all.add_argument('-r', '--dry-run', action='store_true', help='Dry run')
add_run_arg(parser_test_all)
add_api_url_arg(parser_test_all)
parser_test_all.add_argument('--run_id', help=argparse.SUPPRESS, required=False, default=None)
Expand Down Expand Up @@ -669,6 +670,7 @@ def test_all_models(args):
model_prefix = None if get_prefix is None else get_prefix.split(',')
get_skip_test = args.get('skip_test')
model_prefix_skip = None if get_skip_test is None else get_skip_test.split(',')
get_dry_run = args.get('dry_run')

input_chain_id = args.get('chain_id')
input_block_number = args.get('block_number')
Expand All @@ -692,13 +694,45 @@ def test_all_models(args):
model_args['model-slug'] = v
has_slug = True
if model_prefix is not None:
false_by_other = False
for pre in model_prefix:
if v.startswith(pre):
if pre.endswith('+'):
pre = pre[:-2] + chr(ord(pre[-2]) + 1)
if v >= pre and not false_by_other:
skip_test = False
else:
skip_test = True
false_by_other = True
elif pre.endswith('-'):
pre = pre[:-2] + chr(ord(pre[-2]) - 1)
if v <= pre and not false_by_other:
skip_test = False
else:
skip_test = True
false_by_other = True
elif v.startswith(pre):
skip_test = False
break
# print(f'Running {v} with {pre}')

if model_prefix_skip is not None:
for pre in model_prefix_skip:
if v.startswith(pre):
true_by_other = False
if pre.endswith('+'):
pre = pre[:-2] + chr(ord(pre[-2]) + 1)
if v >= pre and not true_by_other:
skip_test = True
else:
skip_test = False
true_by_other = True
elif pre.endswith('-'):
pre = pre[:-2] + chr(ord(pre[-2]) - 1)
if v <= pre and not true_by_other:
skip_test = True
else:
skip_test = False
true_by_other = True
elif v.startswith(pre):
skip_test = True
break
if has_input:
Expand Down Expand Up @@ -737,6 +771,10 @@ def test_all_models(args):
if skip_test:
continue

if get_dry_run:
print(model_args['model-slug'])
continue

_m_end_time = datetime.now()

if get_manifests:
Expand All @@ -761,8 +799,10 @@ def test_all_models(args):
}

if format_json:
print(json_dumps(model_run_args, indent=4).replace(
'\\n', '\n').replace('\\"', '\''))
print(json_dumps(model_run_args, indent=4)
.replace('\\"', '__DQ__')
.replace('"', "'")
.replace('__DQ__', '"'))
else:
print(model_run_args)

Expand Down
142 changes: 74 additions & 68 deletions credmark/cmf/engine/cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@ def my_decode(obj):
return json.loads(zlib.decompress(bytes(obj)))


class SqliteDB:
class BasicDB:
# non-deterministic model results
exclude_slugs = [
'rpc.get-latest-blocknumber',
Expand All @@ -75,99 +75,105 @@ class SqliteDB:
'console',
]

_trace = False
_stats = {'total': 0, 'hit': 0, 'miss': 0, 'exclude': 0, 'new': 0}
_enabled = True

def __init__(self):
self._db = {}
self.__stats = {'total': 0, 'hit': 0, 'miss': 0, 'exclude': 0, 'new': 0}
self.__enabled = True
self.__db = {}

self._logger = logging.getLogger(self.__class__.__name__)
self._trace = False

def log_on(self):
self._trace = True
self._logger.setLevel(logging.INFO)

def log_off(self):
self._trace = False
self._logger.setLevel(logging.INFO)

def clear(self, do_clear):
if do_clear:
self.__db.clear()

def encode(self, key):
return hashlib.sha256(key.encode('utf-8')).hexdigest()

def cache_exclude(self):
self._stats['exclude'] += 1
self._stats['total'] += 1
def _cache_exclude(self):
self.__stats['exclude'] += 1
self.__stats['total'] += 1

def cache_hit(self):
self._stats['hit'] += 1
self._stats['total'] += 1
def _cache_hit(self):
self.__stats['hit'] += 1
self.__stats['total'] += 1

def cache_miss(self):
self._stats['miss'] += 1
self._stats['total'] += 1
def _cache_miss(self):
self.__stats['miss'] += 1
self.__stats['total'] += 1

def cache_new(self):
self._stats['new'] += 1
self._stats['total'] += 1
def _cache_new(self):
self.__stats['new'] += 1
self.__stats['total'] += 1

def enable(self):
self._enabled = True
self.__enabled = True

def disable(self):
self._enabled = False
self.__enabled = False

@property
def enabled(self):
return self._enabled


class ModelRunCache(SqliteDB):
def __init__(self, enabled=True):
super().__init__()
self._enabled = enabled
return self.__enabled

@property
def stats(self):
return self._stats
return self.__stats

def __getitem__(self, key):
return self._db.__getitem__(key)
return self.__db.__getitem__(key)

def __setitem__(self, key, value):
return self._db.__setitem__(key, value)
return self.__db.__setitem__(key, value)

def __delitem__(self, key):
return self._db.__delitem__(key)

def log_on(self):
self._trace = True
self._logger.setLevel(logging.INFO)

def log_off(self):
self._trace = False
self._logger.setLevel(logging.INFO)

def clear(self, do_clear):
if do_clear:
self._db.clear()
return self.__db.__delitem__(key)

def keys(self):
yield from self._db.keys()
yield from self.__db.keys()

def items(self):
yield from self._db.items()
yield from self.__db.items()

def __iter__(self):
yield from self._db.__iter__()
yield from self.__db.__iter__()

def values(self):
yield from self._db.values()
yield from self.__db.values()

def __len__(self):
return self._db.__len__()
return self.__db.__len__()

def _get_cache(self, *args, **kwargs):
return self.__db.get(*args, **kwargs)


class ModelRunCache(BasicDB):
def __init__(self, enabled=True):
super().__init__()
if enabled:
self.enable()
else:
self.disable()

def slugs(self,
is_v: Callable[[Any], bool] = (lambda _: True)
) -> Generator[Tuple[str, str, int, str], None, None]:
for k, v in self._db.items():
for k, v in self.items():
if is_v(v):
yield (v['slug'], v['version'], v['block_number'], k)

def block_numbers(self):
block_numbers = set()
for v in self._db.values():
for v in self.values():
if v['block_number'] not in block_numbers:
block_numbers.add(v['block_number'])
yield v['block_number']
Expand All @@ -189,25 +195,25 @@ def encode_run_key(self, chain_id, block_number, slug, version, input):

def get(self, chain_id, block_number,
slug, version, input) -> \
Tuple[Optional[str], Tuple[Optional[Dict], Optional[Dict], Dict]]:
if not self._enabled:
return None, ({}, None, {})
Optional[Tuple[str, Optional[Dict], Optional[Dict], Dict]]:
if not self.enabled:
return None

if slug in self.exclude_slugs:
self.cache_exclude()
return None, ({}, None, {})
self._cache_exclude()
return None

key = self.encode_run_key(chain_id, block_number, slug, version, input)
needle = self._db.get(key, None)
cache_key = self.encode_run_key(chain_id, block_number, slug, version, input)
needle = self._get_cache(cache_key, None)
if needle is None:
if needle is None:
if self._trace:
self._logger.info(f'[{self.__class__.__name__}] Not found: '
f'{chain_id}/{block_number}/{(slug, version)}/[{input}]')
self.cache_miss()
return None, ({}, None, {})
self._cache_miss()
return None

self.cache_hit()
self._cache_hit()
if self._trace:
self._logger.info(f'[{self.__class__.__name__}] Found: '
f'{chain_id}/{block_number}/{(slug, version)}/{input}'
Expand All @@ -222,16 +228,16 @@ def get(self, chain_id, block_number,
output = needle['output']
errors = needle.get('errors')
depends = needle.get('dependencies', {})
return key, (output.copy() if output is not None else None,
errors.copy() if errors is not None else None,
depends.copy())
return (cache_key, output.copy() if output is not None else None,
errors.copy() if errors is not None else None,
depends.copy())

def put(self, chain_id, block_number, slug, version, input, output, dependencies, errors=None):
if not self._enabled or slug in self.exclude_slugs:
if not self.enabled or slug in self.exclude_slugs:
return

key = self.encode_run_key(chain_id, block_number, slug, version, input)
if key in self._db:
if key in self:
if self._trace:
self._logger.info('No case for overwriting cache: '
f'{chain_id}/{block_number}/{(slug, version)}/{input}/')
Expand All @@ -249,11 +255,11 @@ def put(self, chain_id, block_number, slug, version, input, output, dependencies
if self._trace:
self._logger.info(result)

self.cache_new()
self._db[key] = result
self._cache_new()
self[key] = result

def get_contract(self, address, chain_id=0):
return self.get(chain_id, 0,
'contract.metadata',
None,
{"contractAddress": address.lower()})[1][2]
{"contractAddress": address.lower()})
20 changes: 8 additions & 12 deletions credmark/cmf/engine/context.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,12 +29,7 @@
create_instance_from_error_dict,
)
from credmark.cmf.model.models import RunModelMethod
from credmark.cmf.types import (
BlockNumber,
BlockNumberOutOfRangeDetailDTO,
BlockNumberOutOfRangeError,
Network
)
from credmark.cmf.types import BlockNumber, BlockNumberOutOfRangeDetailDTO, BlockNumberOutOfRangeError, Network
from credmark.dto import DTOType, DTOValidationError
from credmark.dto.encoder import json_dumps
from credmark.dto.transform import (
Expand Down Expand Up @@ -686,16 +681,16 @@ def _run_model_with_class(self, # pylint: disable=too-many-locals,too-many-argu
version_requested = version
input_as_dict = transform_dto_to_dict(input)

in_cache, cached_output = (
(None, {}) if self.__model_cache is None
in_cache = (
None if self.__model_cache is None
else self.__model_cache.get(self.chain_id, int(run_block_number),
slug, version_requested, input_as_dict))

if in_cache is not None:
if debug_log:
self.debug_logger.debug(
f'cached from remote, {slug, version_requested, input_as_dict}')
output, error, dependencies = cached_output
_cache_key, output, error, dependencies = in_cache
else:
# We pass depth - 1 which is the callers depth
# since we already incremented for this model run request
Expand Down Expand Up @@ -806,8 +801,8 @@ def _run_local_model_with_class(self, # pylint: disable=too-many-locals,too-man

input_as_dict = transform_dto_to_dict(input)

in_cache, cached_output = (
(None, {}) if self.__model_cache is None
in_cache = (
None if self.__model_cache is None
else self.__model_cache.get(context.chain_id,
int(context.block_number),
model_class.slug,
Expand All @@ -817,7 +812,7 @@ def _run_local_model_with_class(self, # pylint: disable=too-many-locals,too-man
if debug_log:
self.debug_logger.debug(
f'cached from local {model_class.slug, model_class.version, input_as_dict}')
output, _error, _dependencies = cached_output
_cache_key, output, _error, _dependencies = in_cache
else:
ModelContext.set_current_context(context)
context.is_active = True
Expand Down Expand Up @@ -937,6 +932,7 @@ def _run_local_model_with_class(self, # pylint: disable=too-many-locals,too-man
self._add_dependency(slug, version, 1)

if in_cache is None and self.__model_cache is not None:
# Save to cache
output_as_dict = transform_dto_to_dict(output)
self.__model_cache.put(context.chain_id,
int(context.block_number),
Expand Down
Loading

0 comments on commit 4137c28

Please sign in to comment.