diff --git a/HISTORY.rst b/HISTORY.rst index 9f4bb87..77315ca 100644 --- a/HISTORY.rst +++ b/HISTORY.rst @@ -2,6 +2,11 @@ History ------- +0.8.113 [2023-05-29] +* feat: support for dry run and test model selection in test_all_models function +* feat: updated token data for BSC and Polygon networks +* chore: clean up cache object + 0.8.112 [2023-05-26] * feat: enable ledger for BSC diff --git a/credmark/cmf/credmark_dev.py b/credmark/cmf/credmark_dev.py index 3246009..86d83f5 100755 --- a/credmark/cmf/credmark_dev.py +++ b/credmark/cmf/credmark_dev.py @@ -182,6 +182,7 @@ def main(): # pylint: disable=too-many-statements help='comma-separated list of prefixes to match slug to run') parser_test_all.add_argument('-s', '--skip-test', required=False, default=None, help='comma-separated list of prefixes to match slug to skip') + parser_test_all.add_argument('-r', '--dry-run', action='store_true', help='Dry run') add_run_arg(parser_test_all) add_api_url_arg(parser_test_all) parser_test_all.add_argument('--run_id', help=argparse.SUPPRESS, required=False, default=None) @@ -669,6 +670,7 @@ def test_all_models(args): model_prefix = None if get_prefix is None else get_prefix.split(',') get_skip_test = args.get('skip_test') model_prefix_skip = None if get_skip_test is None else get_skip_test.split(',') + get_dry_run = args.get('dry_run') input_chain_id = args.get('chain_id') input_block_number = args.get('block_number') @@ -692,13 +694,45 @@ def test_all_models(args): model_args['model-slug'] = v has_slug = True if model_prefix is not None: + false_by_other = False for pre in model_prefix: - if v.startswith(pre): + if pre.endswith('+'): + pre = pre[:-2] + chr(ord(pre[-2]) + 1) + if v >= pre and not false_by_other: + skip_test = False + else: + skip_test = True + false_by_other = True + elif pre.endswith('-'): + pre = pre[:-2] + chr(ord(pre[-2]) - 1) + if v <= pre and not false_by_other: + skip_test = False + else: + skip_test = True + false_by_other = True + elif v.startswith(pre): skip_test = False + break # print(f'Running {v} with {pre}') + if model_prefix_skip is not None: for pre in model_prefix_skip: - if v.startswith(pre): + true_by_other = False + if pre.endswith('+'): + pre = pre[:-2] + chr(ord(pre[-2]) + 1) + if v >= pre and not true_by_other: + skip_test = True + else: + skip_test = False + true_by_other = True + elif pre.endswith('-'): + pre = pre[:-2] + chr(ord(pre[-2]) - 1) + if v <= pre and not true_by_other: + skip_test = True + else: + skip_test = False + true_by_other = True + elif v.startswith(pre): skip_test = True break if has_input: @@ -737,6 +771,10 @@ def test_all_models(args): if skip_test: continue + if get_dry_run: + print(model_args['model-slug']) + continue + _m_end_time = datetime.now() if get_manifests: @@ -761,8 +799,10 @@ def test_all_models(args): } if format_json: - print(json_dumps(model_run_args, indent=4).replace( - '\\n', '\n').replace('\\"', '\'')) + print(json_dumps(model_run_args, indent=4) + .replace('\\"', '__DQ__') + .replace('"', "'") + .replace('__DQ__', '"')) else: print(model_run_args) diff --git a/credmark/cmf/engine/cache.py b/credmark/cmf/engine/cache.py index b43b8e5..57178ac 100644 --- a/credmark/cmf/engine/cache.py +++ b/credmark/cmf/engine/cache.py @@ -65,7 +65,7 @@ def my_decode(obj): return json.loads(zlib.decompress(bytes(obj))) -class SqliteDB: +class BasicDB: # non-deterministic model results exclude_slugs = [ 'rpc.get-latest-blocknumber', @@ -75,99 +75,105 @@ class SqliteDB: 'console', ] - _trace = False - _stats = {'total': 0, 'hit': 0, 'miss': 0, 'exclude': 0, 'new': 0} - _enabled = True - def __init__(self): - self._db = {} + self.__stats = {'total': 0, 'hit': 0, 'miss': 0, 'exclude': 0, 'new': 0} + self.__enabled = True + self.__db = {} + self._logger = logging.getLogger(self.__class__.__name__) + self._trace = False + + def log_on(self): + self._trace = True + self._logger.setLevel(logging.INFO) + + def log_off(self): + self._trace = False + self._logger.setLevel(logging.INFO) + + def clear(self, do_clear): + if do_clear: + self.__db.clear() def encode(self, key): return hashlib.sha256(key.encode('utf-8')).hexdigest() - def cache_exclude(self): - self._stats['exclude'] += 1 - self._stats['total'] += 1 + def _cache_exclude(self): + self.__stats['exclude'] += 1 + self.__stats['total'] += 1 - def cache_hit(self): - self._stats['hit'] += 1 - self._stats['total'] += 1 + def _cache_hit(self): + self.__stats['hit'] += 1 + self.__stats['total'] += 1 - def cache_miss(self): - self._stats['miss'] += 1 - self._stats['total'] += 1 + def _cache_miss(self): + self.__stats['miss'] += 1 + self.__stats['total'] += 1 - def cache_new(self): - self._stats['new'] += 1 - self._stats['total'] += 1 + def _cache_new(self): + self.__stats['new'] += 1 + self.__stats['total'] += 1 def enable(self): - self._enabled = True + self.__enabled = True def disable(self): - self._enabled = False + self.__enabled = False @property def enabled(self): - return self._enabled - - -class ModelRunCache(SqliteDB): - def __init__(self, enabled=True): - super().__init__() - self._enabled = enabled + return self.__enabled @property def stats(self): - return self._stats + return self.__stats def __getitem__(self, key): - return self._db.__getitem__(key) + return self.__db.__getitem__(key) def __setitem__(self, key, value): - return self._db.__setitem__(key, value) + return self.__db.__setitem__(key, value) def __delitem__(self, key): - return self._db.__delitem__(key) - - def log_on(self): - self._trace = True - self._logger.setLevel(logging.INFO) - - def log_off(self): - self._trace = False - self._logger.setLevel(logging.INFO) - - def clear(self, do_clear): - if do_clear: - self._db.clear() + return self.__db.__delitem__(key) def keys(self): - yield from self._db.keys() + yield from self.__db.keys() def items(self): - yield from self._db.items() + yield from self.__db.items() def __iter__(self): - yield from self._db.__iter__() + yield from self.__db.__iter__() def values(self): - yield from self._db.values() + yield from self.__db.values() def __len__(self): - return self._db.__len__() + return self.__db.__len__() + + def _get_cache(self, *args, **kwargs): + return self.__db.get(*args, **kwargs) + + +class ModelRunCache(BasicDB): + def __init__(self, enabled=True): + super().__init__() + if enabled: + self.enable() + else: + self.disable() def slugs(self, is_v: Callable[[Any], bool] = (lambda _: True) ) -> Generator[Tuple[str, str, int, str], None, None]: - for k, v in self._db.items(): + for k, v in self.items(): if is_v(v): yield (v['slug'], v['version'], v['block_number'], k) def block_numbers(self): block_numbers = set() - for v in self._db.values(): + for v in self.values(): if v['block_number'] not in block_numbers: block_numbers.add(v['block_number']) yield v['block_number'] @@ -189,25 +195,25 @@ def encode_run_key(self, chain_id, block_number, slug, version, input): def get(self, chain_id, block_number, slug, version, input) -> \ - Tuple[Optional[str], Tuple[Optional[Dict], Optional[Dict], Dict]]: - if not self._enabled: - return None, ({}, None, {}) + Optional[Tuple[str, Optional[Dict], Optional[Dict], Dict]]: + if not self.enabled: + return None if slug in self.exclude_slugs: - self.cache_exclude() - return None, ({}, None, {}) + self._cache_exclude() + return None - key = self.encode_run_key(chain_id, block_number, slug, version, input) - needle = self._db.get(key, None) + cache_key = self.encode_run_key(chain_id, block_number, slug, version, input) + needle = self._get_cache(cache_key, None) if needle is None: if needle is None: if self._trace: self._logger.info(f'[{self.__class__.__name__}] Not found: ' f'{chain_id}/{block_number}/{(slug, version)}/[{input}]') - self.cache_miss() - return None, ({}, None, {}) + self._cache_miss() + return None - self.cache_hit() + self._cache_hit() if self._trace: self._logger.info(f'[{self.__class__.__name__}] Found: ' f'{chain_id}/{block_number}/{(slug, version)}/{input}' @@ -222,16 +228,16 @@ def get(self, chain_id, block_number, output = needle['output'] errors = needle.get('errors') depends = needle.get('dependencies', {}) - return key, (output.copy() if output is not None else None, - errors.copy() if errors is not None else None, - depends.copy()) + return (cache_key, output.copy() if output is not None else None, + errors.copy() if errors is not None else None, + depends.copy()) def put(self, chain_id, block_number, slug, version, input, output, dependencies, errors=None): - if not self._enabled or slug in self.exclude_slugs: + if not self.enabled or slug in self.exclude_slugs: return key = self.encode_run_key(chain_id, block_number, slug, version, input) - if key in self._db: + if key in self: if self._trace: self._logger.info('No case for overwriting cache: ' f'{chain_id}/{block_number}/{(slug, version)}/{input}/') @@ -249,11 +255,11 @@ def put(self, chain_id, block_number, slug, version, input, output, dependencies if self._trace: self._logger.info(result) - self.cache_new() - self._db[key] = result + self._cache_new() + self[key] = result def get_contract(self, address, chain_id=0): return self.get(chain_id, 0, 'contract.metadata', None, - {"contractAddress": address.lower()})[1][2] + {"contractAddress": address.lower()}) diff --git a/credmark/cmf/engine/context.py b/credmark/cmf/engine/context.py index b5bb493..c962996 100644 --- a/credmark/cmf/engine/context.py +++ b/credmark/cmf/engine/context.py @@ -29,12 +29,7 @@ create_instance_from_error_dict, ) from credmark.cmf.model.models import RunModelMethod -from credmark.cmf.types import ( - BlockNumber, - BlockNumberOutOfRangeDetailDTO, - BlockNumberOutOfRangeError, - Network -) +from credmark.cmf.types import BlockNumber, BlockNumberOutOfRangeDetailDTO, BlockNumberOutOfRangeError, Network from credmark.dto import DTOType, DTOValidationError from credmark.dto.encoder import json_dumps from credmark.dto.transform import ( @@ -686,8 +681,8 @@ def _run_model_with_class(self, # pylint: disable=too-many-locals,too-many-argu version_requested = version input_as_dict = transform_dto_to_dict(input) - in_cache, cached_output = ( - (None, {}) if self.__model_cache is None + in_cache = ( + None if self.__model_cache is None else self.__model_cache.get(self.chain_id, int(run_block_number), slug, version_requested, input_as_dict)) @@ -695,7 +690,7 @@ def _run_model_with_class(self, # pylint: disable=too-many-locals,too-many-argu if debug_log: self.debug_logger.debug( f'cached from remote, {slug, version_requested, input_as_dict}') - output, error, dependencies = cached_output + _cache_key, output, error, dependencies = in_cache else: # We pass depth - 1 which is the callers depth # since we already incremented for this model run request @@ -806,8 +801,8 @@ def _run_local_model_with_class(self, # pylint: disable=too-many-locals,too-man input_as_dict = transform_dto_to_dict(input) - in_cache, cached_output = ( - (None, {}) if self.__model_cache is None + in_cache = ( + None if self.__model_cache is None else self.__model_cache.get(context.chain_id, int(context.block_number), model_class.slug, @@ -817,7 +812,7 @@ def _run_local_model_with_class(self, # pylint: disable=too-many-locals,too-man if debug_log: self.debug_logger.debug( f'cached from local {model_class.slug, model_class.version, input_as_dict}') - output, _error, _dependencies = cached_output + _cache_key, output, _error, _dependencies = in_cache else: ModelContext.set_current_context(context) context.is_active = True @@ -937,6 +932,7 @@ def _run_local_model_with_class(self, # pylint: disable=too-many-locals,too-man self._add_dependency(slug, version, 1) if in_cache is None and self.__model_cache is not None: + # Save to cache output_as_dict = transform_dto_to_dict(output) self.__model_cache.put(context.chain_id, int(context.block_number), diff --git a/credmark/cmf/model/models.py b/credmark/cmf/model/models.py index 980257d..095133a 100644 --- a/credmark/cmf/model/models.py +++ b/credmark/cmf/model/models.py @@ -66,6 +66,8 @@ def __call__(self, version: Union[str, None] = None, **kwargs) -> Union[dict, DTOType]: + slug = f"{self.__prefix.replace('_', '-')}" + model_input = {} if self.__input is not None: if isinstance(self.__input, DTOTypesTuple): @@ -84,7 +86,7 @@ def __call__(self, input = model_input | kwargs return self.__context.run_model( - f"{self.__prefix.replace('_', '-')}", + slug, input, block_number=self.__block_number, version=version, @@ -165,6 +167,7 @@ class ModelResultOutput(DTO): class Models: """ + Models class under context """ def __init__(self, context, block_number: Union[int, None] = None, local: bool = False, diff --git a/credmark/cmf/types/adt.py b/credmark/cmf/types/adt.py index ae67848..547307b 100644 --- a/credmark/cmf/types/adt.py +++ b/credmark/cmf/types/adt.py @@ -73,6 +73,9 @@ def to_dataframe(self, fields: Optional[List[Tuple[str, Callable]]] = None): return pd.DataFrame(data_dict) + def sorted(self, key=None, reverse=False): + return sorted(self.some, key=key, reverse=reverse) + class Records(GenericDTO): records: List[Tuple] = DTOField([]) diff --git a/credmark/cmf/types/data/fungible_token_data_bsc.py b/credmark/cmf/types/data/fungible_token_data_bsc.py index fbf659c..240b9d2 100644 --- a/credmark/cmf/types/data/fungible_token_data_bsc.py +++ b/credmark/cmf/types/data/fungible_token_data_bsc.py @@ -23,6 +23,12 @@ 'name': 'USD Coin', 'address': '0x8AC76a51cc950d9822D68b83fE1Ad97B32Cd580d', }, + 'ATOM': { + 'symbol': 'ATOM', + 'decimals': 18, + 'name': 'Cosmos Token', + 'address': '0x0eb3a705fc54725037cc9e008bdede697f62f335', + }, 'WBNB': { 'symbol': 'WBNB', 'decimals': 18, diff --git a/credmark/cmf/types/data/fungible_token_data_polygon.py b/credmark/cmf/types/data/fungible_token_data_polygon.py index 921f359..7a4c0d5 100644 --- a/credmark/cmf/types/data/fungible_token_data_polygon.py +++ b/credmark/cmf/types/data/fungible_token_data_polygon.py @@ -24,6 +24,12 @@ "name": "miMATIC", 'address': '0xa3Fa99A148fA48D14Ed51d610c367C61876997F1', }, + "ATOM": { + "symbol": "ATOM", + "decimals": 6, + "name": "Cosmos (PoS)", + 'address': '0xac51C4c48Dc3116487eD4BC16542e27B5694Da1b', + }, "DPI": { "symbol": "DPI", "decimals": 18,