diff --git a/.gitignore b/.gitignore index d45d1aac..52d86b48 100644 --- a/.gitignore +++ b/.gitignore @@ -22,3 +22,4 @@ venv36 coverage.xml docs/source/_* __pycache__ +__open_alchemy_*_cache__ diff --git a/CHANGELOG.md b/CHANGELOG.md index 508baef6..8564df95 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -9,7 +9,11 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ### Fixed -- Removed unnecessary imports in `__init__.py` files. +- Removed unnecessary imports in `__init__.py` files. [#255] + +### Added + +- Caching validation results to speed up startup. [#251] ## [v2.1.0] - 2020-12-20 @@ -496,3 +500,5 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 [#201]: https://github.com/jdkandersson/OpenAlchemy/issues/201 [#202]: https://github.com/jdkandersson/OpenAlchemy/issues/202 [#236]: https://github.com/jdkandersson/OpenAlchemy/issues/236 +[#251]: https://github.com/jdkandersson/OpenAlchemy/issues/251 +[#255]: https://github.com/jdkandersson/OpenAlchemy/issues/255 diff --git a/open_alchemy/__init__.py b/open_alchemy/__init__.py index 43ab2c91..96905cfd 100644 --- a/open_alchemy/__init__.py +++ b/open_alchemy/__init__.py @@ -64,7 +64,7 @@ def init_model_factory( schemas = components.get("schemas", {}) # Pre-processing schemas - _schemas_module.process(schemas=schemas) + _schemas_module.process(schemas=schemas, spec_filename=spec_path) # Getting artifacts schemas_artifacts = _schemas_artifacts.get_from_schemas( diff --git a/open_alchemy/build/MANIFEST.j2 b/open_alchemy/build/MANIFEST.j2 index 03ae166a..5946843d 100644 --- a/open_alchemy/build/MANIFEST.j2 +++ b/open_alchemy/build/MANIFEST.j2 @@ -1,3 +1,3 @@ -recursive-include {{ name }} *.json +recursive-include {{ name }} *.json __open_alchemy_*_cache__ remove .* diff --git a/open_alchemy/build/__init__.py b/open_alchemy/build/__init__.py index e0d0e016..029cb194 100644 --- a/open_alchemy/build/__init__.py +++ b/open_alchemy/build/__init__.py @@ -10,6 +10,7 @@ import jinja2 +from .. import cache from .. import exceptions from .. import models_file as models_file_module from .. import schemas as schemas_module @@ -336,7 +337,9 @@ def dump( # Write files in the package directory. package = directory / name package.mkdir(parents=True, exist_ok=True) - (package / "spec.json").write_text(spec_str) + spec_file = package / "spec.json" + spec_file.write_text(spec_str) + cache.schemas_are_valid(str(spec_file)) (package / "__init__.py").write_text(init) except OSError as exc: raise exceptions.BuildError(str(exc)) from exc diff --git a/open_alchemy/build/setup.j2 b/open_alchemy/build/setup.j2 index 08493879..b045ed64 100644 --- a/open_alchemy/build/setup.j2 +++ b/open_alchemy/build/setup.j2 @@ -4,7 +4,7 @@ setuptools.setup( name="{{ name }}", version="{{ version }}", packages=setuptools.find_packages(), - python_requires=">=3.6", + python_requires=">=3.7", install_requires=[ "OpenAlchemy", ], diff --git a/open_alchemy/cache.py b/open_alchemy/cache.py new file mode 100644 index 00000000..effc24fb --- /dev/null +++ b/open_alchemy/cache.py @@ -0,0 +1,173 @@ +""" +Cache for OpenAlchemy. + +The name of the file is: +__open_alchemy__cache__ + +The structure of the file is: + +{ + "hash": "", + "data": { + "schemas": { + "valid": true/false + } + } +} +""" + +import hashlib +import json +import pathlib +import shutil + +from . import exceptions + + +def calculate_hash(value: str) -> str: + """Create hash of a value.""" + sha256 = hashlib.sha256() + sha256.update(value.encode()) + return sha256.hexdigest() + + +def calculate_cache_path(path: pathlib.Path) -> pathlib.Path: + """ + Calculate the name of the cache file. + + Args: + path: The path to the spec file. + + Returns: + The path to the cache file. + + """ + return path.parent / f"__open_alchemy_{calculate_hash(path.name)}_cache__" + + +_HASH_KEY = "hash" +_DATA_KEY = "data" +_DATA_SCHEMAS_KEY = "schemas" +_DATA_SCHEMAS_VALID_KEY = "valid" + + +def schemas_valid(filename: str) -> bool: + """ + Calculate whether the cache indicates that the schemas in the file are valid. + + Algorithm: + 1. If the file does not exist, return False. + 2. If the file is actually a folder, return False. + 3. If the spec file is actually a folder, return False. + 4. If the spec file does not exist, return False. + 5. Calculate the hash of the spec file contents. + 6. Try to load the cache, if it fails or it is not a dictionary, return False. + 7. Try to retrieve the hash key, if it does not exist, return False. + 8. If the value of the hash key is different to the hash of the file, return False. + 9. Look for the data.schemas.valid key, if it does not exist, return False. + 12. If the value of data.schemas.valid is True return True, otherwise return False. + + Args: + filename: The name of the OpenAPI specification file. + + Returns: + Whether the cache indicates that the schemas in the file are valid. + + """ + path = pathlib.Path(filename) + cache_path = calculate_cache_path(path) + + # Check that both file and cache exists and are files + if ( + not path.exists() + or not path.is_file() + or not cache_path.exists() + or not cache_path.is_file() + ): + return False + + file_hash = calculate_hash(path.read_text()) + + try: + cache = json.loads(cache_path.read_text()) + except json.JSONDecodeError: + return False + + cache_valid = ( + isinstance(cache, dict) + and _HASH_KEY in cache + and _DATA_KEY in cache + and isinstance(cache[_DATA_KEY], dict) + and _DATA_SCHEMAS_KEY in cache[_DATA_KEY] + and isinstance(cache[_DATA_KEY][_DATA_SCHEMAS_KEY], dict) + and _DATA_SCHEMAS_VALID_KEY in cache[_DATA_KEY][_DATA_SCHEMAS_KEY] + ) + if not cache_valid: + return False + + cache_file_hash = cache[_HASH_KEY] + if file_hash != cache_file_hash: + return False + + return cache[_DATA_KEY][_DATA_SCHEMAS_KEY][_DATA_SCHEMAS_VALID_KEY] is True + + +def schemas_are_valid(filename: str) -> None: + """ + Update the cache to indicate that the filename is valid. + + Algorithm: + 1. If the spec filename is actually a folder, raise a CacheError. + 2. If the spec filename does not exist, raise a CacheError. + 3. Calculate the hash of the spec file contents. + 4. If the chache is actually a folder, delete the folder. + 5. If the cache does not exist, create the cache. + 6. Read the contents of the cache. If it is not a dictionary, throw the contents + away and create an empty dictionary. + 7. Create or update the hash key in the cache dictionary to be the calculated value. + 8. Look for the data key in the cache dictionary. If it does not exist or is not a + dictionary, make it an empty dictionary. + 9. Look for the schemas key under data in the cache dictionary. If it does not exist + or is not a dictionary, set it to be an empty dictionary. + 10. Create or update the valid key under data.schemas and set it to True. + 11. Write the dictionary to the file as JSON. + + Args: + filename: The name of the spec file. + + """ + path = pathlib.Path(filename) + if not path.exists(): + raise exceptions.CacheError( + f"the spec file does not exists, filename={filename}" + ) + if not path.is_file(): + raise exceptions.CacheError(f"the spec file is not a file, filename={filename}") + file_hash = calculate_hash(path.read_text()) + + cache_path = calculate_cache_path(path) + if cache_path.exists() and not cache_path.is_file(): + shutil.rmtree(cache_path) + if not cache_path.exists(): + cache_path.write_text("", encoding="utf-8") + + try: + cache = json.loads(cache_path.read_text()) + except json.JSONDecodeError: + cache = {} + if not isinstance(cache, dict): + cache = {} + + cache[_HASH_KEY] = file_hash + + if _DATA_KEY not in cache or not isinstance(cache[_DATA_KEY], dict): + cache[_DATA_KEY] = {} + cache_data = cache[_DATA_KEY] + if _DATA_SCHEMAS_KEY not in cache_data or not isinstance( + cache_data[_DATA_SCHEMAS_KEY], dict + ): + cache_data[_DATA_SCHEMAS_KEY] = {} + cache_data_schemas = cache_data[_DATA_SCHEMAS_KEY] + cache_data_schemas[_DATA_SCHEMAS_VALID_KEY] = True + + cache_path.write_text(json.dumps(cache), encoding="utf-8") diff --git a/open_alchemy/exceptions.py b/open_alchemy/exceptions.py index 8b47a65e..681b7ab3 100644 --- a/open_alchemy/exceptions.py +++ b/open_alchemy/exceptions.py @@ -74,3 +74,7 @@ class BuildError(BaseError): class CLIError(BaseError): """Raised when an error occurs when the CLI is used.""" + + +class CacheError(BaseError): + """Raised when an error occurs when the cache is used.""" diff --git a/open_alchemy/schemas/__init__.py b/open_alchemy/schemas/__init__.py index 1f833a07..54b343e5 100644 --- a/open_alchemy/schemas/__init__.py +++ b/open_alchemy/schemas/__init__.py @@ -1,5 +1,7 @@ """Performs operations on the schemas to prepare them for further processing.""" +import typing + from .. import types as _types from . import association from . import backref @@ -7,7 +9,9 @@ from . import validation -def process(*, schemas: _types.Schemas) -> None: +def process( + *, schemas: _types.Schemas, spec_filename: typing.Optional[str] = None +) -> None: """ Pre-process schemas. @@ -18,7 +22,7 @@ def process(*, schemas: _types.Schemas) -> None: schemas: The schemas to pre-process in place. """ - validation.process(schemas=schemas) + validation.process(schemas=schemas, spec_filename=spec_filename) backref.process(schemas=schemas) foreign_key.process(schemas=schemas) association.process(schemas=schemas) diff --git a/open_alchemy/schemas/validation/__init__.py b/open_alchemy/schemas/validation/__init__.py index 4c60a203..a9631f4b 100644 --- a/open_alchemy/schemas/validation/__init__.py +++ b/open_alchemy/schemas/validation/__init__.py @@ -2,6 +2,7 @@ import typing +from ... import cache from ... import exceptions as _exceptions from ... import types as _oa_types from ..helpers import iterate @@ -87,14 +88,21 @@ def _other_schemas_checks(*, schemas: _oa_types.Schemas) -> types.Result: return types.Result(valid=True, reason=None) -def process(*, schemas: _oa_types.Schemas) -> None: +def process( + *, schemas: _oa_types.Schemas, spec_filename: typing.Optional[str] = None +) -> None: """ Validate schemas. Args: schemas: The schemas to validate. + spec_filename: The filename of the spec, used to cache the result. """ + if spec_filename is not None: + if cache.schemas_valid(spec_filename): + return + schemas_result = schemas_validation.check(schemas=schemas) if not schemas_result.valid: raise _exceptions.MalformedSchemaError(schemas_result.reason) @@ -121,6 +129,9 @@ def process(*, schemas: _oa_types.Schemas) -> None: if not other_results_result.valid: raise _exceptions.MalformedSchemaError(other_results_result.reason) + if spec_filename is not None: + cache.schemas_are_valid(spec_filename) + def check_one_model(*, schemas: _oa_types.Schemas) -> types.Result: """ diff --git a/setup.cfg b/setup.cfg index 2e76ed4c..32bb2670 100644 --- a/setup.cfg +++ b/setup.cfg @@ -27,6 +27,7 @@ markers = utility_base validation validate + cache python_functions = test_* mocked-sessions = examples.app.database.db.session flake8-max-line-length = 88 diff --git a/tests/open_alchemy/integration/test_init.py b/tests/open_alchemy/integration/test_init.py index d7c776b7..dd204f86 100644 --- a/tests/open_alchemy/integration/test_init.py +++ b/tests/open_alchemy/integration/test_init.py @@ -8,6 +8,7 @@ import yaml import open_alchemy +from open_alchemy import cache from open_alchemy.facades.sqlalchemy import types as sqlalchemy_types @@ -236,6 +237,9 @@ def test_init_json(engine, sessionmaker, tmp_path): queried_model = session.query(model).first() assert queried_model.column == value + # Checking for cache + assert cache.schemas_valid(str(spec_file)) is True + @pytest.mark.integration def test_init_json_remote(engine, sessionmaker, tmp_path, _clean_remote_schemas_store): @@ -315,6 +319,9 @@ def test_init_yaml(engine, sessionmaker, tmp_path): queried_model = session.query(model).first() assert queried_model.column == value + # Checking for cache + assert cache.schemas_valid(str(spec_file)) is True + @pytest.mark.integration def test_init_yaml_remote(engine, sessionmaker, tmp_path, _clean_remote_schemas_store): diff --git a/tests/open_alchemy/schemas/validation/test_validation.py b/tests/open_alchemy/schemas/validation/test_validation.py index 642518fe..f345a0e0 100644 --- a/tests/open_alchemy/schemas/validation/test_validation.py +++ b/tests/open_alchemy/schemas/validation/test_validation.py @@ -1,5 +1,7 @@ """Tests for validation rules.""" +import pathlib + import pytest from open_alchemy import exceptions @@ -259,6 +261,27 @@ def test_process(schemas, raises): validation.process(schemas=schemas) +def test_process_cache(tmpdir): + """ + GIVEN spec filename + WHEN process is called with the schemas and filename twice + THEN the schemas are not checked on the second run. + """ + tmpdir_path = pathlib.Path(tmpdir) + spec_file = tmpdir_path / "spec.json" + spec_file.write_text("spec 1", encoding="utf-8") + schemas = { + "Schema1": { + "type": "object", + "x-tablename": "schema_1", + "properties": {"prop_1": {"type": "integer"}}, + } + } + + validation.process(schemas=schemas, spec_filename=str(spec_file)) + validation.process(schemas={}, spec_filename=str(spec_file)) + + CHECK_TESTS = [ pytest.param( True, diff --git a/tests/open_alchemy/test_build.py b/tests/open_alchemy/test_build.py index 4f394b9f..8dd44408 100644 --- a/tests/open_alchemy/test_build.py +++ b/tests/open_alchemy/test_build.py @@ -3,6 +3,7 @@ import pytest from open_alchemy import build +from open_alchemy import cache from open_alchemy import exceptions from open_alchemy.helpers import command @@ -221,7 +222,7 @@ def test_generate_setup(): name="name 1", version="version 1", packages=setuptools.find_packages(), - python_requires=">=3.6", + python_requires=">=3.7", install_requires=[ "OpenAlchemy", ], @@ -243,7 +244,7 @@ def test_generate_manifest(): returned_contents = build.generate_manifest(name=name) - expected_contents = """recursive-include name 1 *.json + expected_contents = """recursive-include name 1 *.json __open_alchemy_*_cache__ remove .* """ @@ -444,6 +445,11 @@ def test_dump(tmp_path): with open(expected_spec_path) as in_file: assert in_file.read() == spec_str + # Check cache + expected_cache_path = cache.calculate_cache_path(expected_spec_path) + assert expected_cache_path.is_file() + assert cache.schemas_valid(str(expected_spec_path)) + # Check init file expected_init_path = package_path / "__init__.py" assert expected_init_path.is_file() @@ -691,6 +697,11 @@ def test_execute(tmp_path, package_format, extensions): assert "Schema" in spec_file_contents assert "x-tablename" in spec_file_contents + # Check cache + expected_cache_path = cache.calculate_cache_path(expected_spec_path) + assert expected_cache_path.is_file() + assert cache.schemas_valid(str(expected_spec_path)) + # Check init file expected_init_path = package_path / "__init__.py" assert expected_init_path.is_file() diff --git a/tests/open_alchemy/test_cache.py b/tests/open_alchemy/test_cache.py new file mode 100644 index 00000000..23a51321 --- /dev/null +++ b/tests/open_alchemy/test_cache.py @@ -0,0 +1,359 @@ +"""Tests for the cache.""" + +import json +import pathlib + +import pytest + +from open_alchemy import cache +from open_alchemy import exceptions + + +@pytest.mark.parametrize( + "spec_path, expected_cache_path", + [ + pytest.param( + pathlib.Path("some.file"), + pathlib.Path(f"__open_alchemy_{cache.calculate_hash('some.file')}_cache__"), + id="json file", + ), + pytest.param( + pathlib.Path("parent/some.file"), + pathlib.Path( + f"parent/__open_alchemy_{cache.calculate_hash('some.file')}_cache__" + ), + id="file in subfolder", + ), + ], +) +@pytest.mark.cache +def test_calculate_cache_path(spec_path, expected_cache_path): + """ + GIVEN spec path + WHEN calculate_cache_path is called with the spec path + THEN the expected path is returned. + """ + returned_path = cache.calculate_cache_path(spec_path) + + assert str(returned_path) == str(expected_cache_path) + + +@pytest.mark.cache +def test_schemas_valid_spec_file_not_exists(tmpdir): + """ + GIVEN spec file that does not exist and cache with contents + WHEN schemas_valid is called with the filename + THEN False is returned. + """ + path_tmpdir = pathlib.Path(tmpdir) + spec_file = path_tmpdir / "spec.json" + cache_file = path_tmpdir / "__open_alchemy_spec_json_cache__" + cache_file.write_text( + "cache 1", + encoding="utf-8", + ) + + returned_result = cache.schemas_valid(str(spec_file)) + + assert returned_result is False + + +@pytest.mark.cache +def test_schemas_valid_spec_file_is_folder(tmpdir): + """ + GIVEN spec file that is actually a folder and cache with contents + WHEN schemas_valid is called with the filename + THEN False is returned. + """ + path_tmpdir = pathlib.Path(tmpdir) + spec_file = path_tmpdir / "spec.json" + spec_file.mkdir() + cache_file = path_tmpdir / "__open_alchemy_spec_json_cache__" + cache_file.write_text( + "cache 1", + encoding="utf-8", + ) + + returned_result = cache.schemas_valid(str(spec_file)) + + assert returned_result is False + + +@pytest.mark.cache +def test_schemas_valid_cache_file_missing(tmpdir): + """ + GIVEN spec file with contents and cache that does not exist + WHEN schemas_valid is called with the filename + THEN False is returned. + """ + path_tmpdir = pathlib.Path(tmpdir) + spec_file = path_tmpdir / "spec.json" + spec_file.write_text("spec 1", encoding="utf-8") + + returned_result = cache.schemas_valid(str(spec_file)) + + assert returned_result is False + + +@pytest.mark.cache +def test_schemas_valid_cache_is_folder(tmpdir): + """ + GIVEN spec file with contents and cache that is a folder + WHEN schemas_valid is called with the filename + THEN False is returned. + """ + path_tmpdir = pathlib.Path(tmpdir) + spec_file = path_tmpdir / "spec.json" + spec_file.write_text("spec 1", encoding="utf-8") + cache_file = path_tmpdir / "__open_alchemy_spec_json_cache__" + cache_file.mkdir() + + returned_result = cache.schemas_valid(str(spec_file)) + + assert returned_result is False + + +@pytest.mark.parametrize( + "spec_contents, cache_contents, expected_result", + [ + pytest.param("spec 1", "", False, id="empty"), + pytest.param("spec 1", "invalid JSON", False, id="not json"), + pytest.param("spec 1", "true", False, id="not dictionary"), + pytest.param("spec 1", json.dumps({}), False, id="empty dictionary"), + pytest.param( + "spec 1", + json.dumps({"data": {"schemas": {"valid": True}}}), + False, + id="hash missing data schemas valid True", + ), + pytest.param( + "spec 1", + json.dumps({"hash": None, "data": {"schemas": {"valid": True}}}), + False, + id="hash not string data schemas valid True", + ), + pytest.param( + "spec 1", + json.dumps( + { + "hash": cache.calculate_hash("spec 2"), + "data": {"schemas": {"valid": True}}, + } + ), + False, + id="hash different data schemas valid True", + ), + pytest.param( + "spec 1", + json.dumps({"hash": cache.calculate_hash("spec 1")}), + False, + id="hash same data missing", + ), + pytest.param( + "spec 1", + json.dumps({"hash": cache.calculate_hash("spec 1"), "data": None}), + False, + id="hash same data not dict", + ), + pytest.param( + "spec 1", + json.dumps({"hash": cache.calculate_hash("spec 1"), "data": {}}), + False, + id="hash same data schemas missing", + ), + pytest.param( + "spec 1", + json.dumps( + {"hash": cache.calculate_hash("spec 1"), "data": {"schemas": None}} + ), + False, + id="hash same data schemas not dict", + ), + pytest.param( + "spec 1", + json.dumps( + {"hash": cache.calculate_hash("spec 1"), "data": {"schemas": {}}} + ), + False, + id="hash same data schemas valid missing", + ), + pytest.param( + "spec 1", + json.dumps( + { + "hash": cache.calculate_hash("spec 1"), + "data": {"schemas": {"valid": None}}, + } + ), + False, + id="hash same data schemas valid not boolean", + ), + pytest.param( + "spec 1", + json.dumps( + { + "hash": cache.calculate_hash("spec 1"), + "data": {"schemas": {"valid": False}}, + } + ), + False, + id="hash same data schemas valid False", + ), + pytest.param( + "spec 1", + json.dumps( + { + "hash": cache.calculate_hash("spec 1"), + "data": {"schemas": {"valid": True}}, + } + ), + True, + id="hash same data schemas valid True", + ), + ], +) +@pytest.mark.cache +def test_schemas_valid(tmpdir, spec_contents, cache_contents, expected_result): + """ + GIVEN spec file with contents and cache with contents + WHEN schemas_valid is called with the filename + THEN the expected result is returned. + """ + path_tmpdir = pathlib.Path(tmpdir) + spec_filename = "spec.json" + spec_file = path_tmpdir / spec_filename + spec_file.write_text(spec_contents, encoding="utf-8") + cache_file = ( + path_tmpdir / f"__open_alchemy_{cache.calculate_hash(spec_filename)}_cache__" + ) + cache_file.write_text( + cache_contents, + encoding="utf-8", + ) + + returned_result = cache.schemas_valid(str(spec_file)) + + assert returned_result == expected_result + + +@pytest.mark.cache +def test_schemas_are_valid_spec_missing(tmpdir): + """ + GIVEN spec is missing + WHEN schemas_are_valid is called with the spec filename + THEN CacheError is raised. + """ + path_tmpdir = pathlib.Path(tmpdir) + spec_filename = "spec.json" + spec_file = path_tmpdir / spec_filename + + with pytest.raises(exceptions.CacheError): + cache.schemas_are_valid(str(spec_file)) + + +@pytest.mark.cache +def test_schemas_are_valid_spec_not_file(tmpdir): + """ + GIVEN spec is actually a folder + WHEN schemas_are_valid is called with the spec filename + THEN CacheError is raised. + """ + path_tmpdir = pathlib.Path(tmpdir) + spec_filename = "spec.json" + spec_file = path_tmpdir / spec_filename + spec_file.mkdir() + + with pytest.raises(exceptions.CacheError): + cache.schemas_are_valid(str(spec_file)) + + +@pytest.mark.cache +def test_schemas_are_valid(tmpdir): + """ + GIVEN spec in a file and the cache is actually a folder + WHEN schemas_are_valid is called with the spec filename + THEN schemas_valid returns True if it is called afterwards. + """ + path_tmpdir = pathlib.Path(tmpdir) + spec_filename = "spec.json" + spec_file = path_tmpdir / spec_filename + spec_file.write_text("spec 1", encoding="utf-8") + cache_file = ( + path_tmpdir / f"__open_alchemy_{cache.calculate_hash(spec_filename)}_cache__" + ) + cache_file.mkdir() + cache_sub_file = cache_file / "some.file" + cache_sub_file.write_text("some contents") + + cache.schemas_are_valid(str(spec_file)) + + assert cache.schemas_valid(str(spec_file)) is True + + +@pytest.mark.cache +def test_schemas_are_valid_cache_folder(tmpdir): + """ + GIVEN spec in a file + WHEN schemas_are_valid is called with the spec filename + THEN schemas_valid returns True if it is called afterwards. + """ + path_tmpdir = pathlib.Path(tmpdir) + spec_filename = "spec.json" + spec_file = path_tmpdir / spec_filename + spec_file.write_text("spec 1", encoding="utf-8") + + cache.schemas_are_valid(str(spec_file)) + + assert cache.schemas_valid(str(spec_file)) is True + + +@pytest.mark.parametrize( + "cache_contents", + [ + pytest.param("", id="empty"), + pytest.param("not valid JSON", id="invalid JSON"), + pytest.param(json.dumps(True), id="not dict"), + pytest.param(json.dumps({}), id="empty dict"), + pytest.param(json.dumps({"hash": None}), id="hash exists"), + pytest.param(json.dumps({"data": None}), id="data exists not dict"), + pytest.param(json.dumps({"data": {}}), id="data exists empty"), + pytest.param( + json.dumps({"data": {"schemas": None}}), id="data schema not dict" + ), + pytest.param(json.dumps({"data": {"schemas": {}}}), id="data schema empty"), + pytest.param( + json.dumps({"data": {"schemas": {"valid": None}}}), + id="data schema valid not boolean", + ), + pytest.param( + json.dumps({"data": {"schemas": {"valid": False}}}), + id="data schema valid False", + ), + pytest.param( + json.dumps({"data": {"schemas": {"valid": True}}}), + id="data schema valid True", + ), + ], +) +@pytest.mark.cache +def test_schemas_are_valid_cache_exists(tmpdir, cache_contents): + """ + GIVEN spec in a file + WHEN schemas_are_valid is called with the spec filename + THEN schemas_valid returns True if it is called afterwards. + """ + path_tmpdir = pathlib.Path(tmpdir) + spec_filename = "spec.json" + spec_file = path_tmpdir / spec_filename + spec_file.write_text("spec 1", encoding="utf-8") + cache_file = ( + path_tmpdir / f"__open_alchemy_{cache.calculate_hash(spec_filename)}_cache__" + ) + cache_file.write_text( + cache_contents, + encoding="utf-8", + ) + + cache.schemas_are_valid(str(spec_file)) + + assert cache.schemas_valid(str(spec_file)) is True