Skip to content

Commit

Permalink
Merge ada5f67 into d00e80d
Browse files Browse the repository at this point in the history
  • Loading branch information
bogdandm committed Jul 13, 2021
2 parents d00e80d + ada5f67 commit c2e74d4
Show file tree
Hide file tree
Showing 8 changed files with 2,095 additions and 30 deletions.
70 changes: 52 additions & 18 deletions json_to_models/cli.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import argparse
import configparser
import importlib
import itertools
import json
Expand All @@ -10,6 +11,14 @@
from pathlib import Path
from typing import Any, Callable, Dict, Generator, Iterable, List, Tuple, Type, Union

try:
import yaml
except ImportError:
try:
import ruamel.yaml as yaml
except ImportError:
yaml = None

from . import __version__ as VERSION
from .dynamic_typing import ModelMeta, register_datetime_classes
from .generator import MetadataGenerator
Expand Down Expand Up @@ -80,6 +89,7 @@ def parse_args(self, args: List[str] = None):
(model_name, (lookup, Path(path)))
for model_name, lookup, path in namespace.list or ()
]
parser = getattr(FileLoaders, namespace.input_format)
self.output_file = namespace.output
self.enable_datetime = namespace.datetime
disable_unicode_conversion = namespace.disable_unicode_conversion
Expand All @@ -94,7 +104,7 @@ def parse_args(self, args: List[str] = None):
dict_keys_fields: List[str] = namespace.dict_keys_fields

self.validate(models, models_lists, merge_policy, framework, code_generator)
self.setup_models_data(models, models_lists)
self.setup_models_data(models, models_lists, parser)
self.set_args(merge_policy, structure, framework, code_generator, code_generator_kwargs_raw,
dict_keys_regex, dict_keys_fields, disable_unicode_conversion)

Expand Down Expand Up @@ -157,16 +167,20 @@ def validate(self, models, models_list, merge_policy, framework, code_generator)
elif framework != 'custom' and code_generator is not None:
raise ValueError("--code-generator argument has no effect without '--framework custom' argument")

def setup_models_data(self, models: Iterable[Tuple[str, Iterable[Path]]],
models_lists: Iterable[Tuple[str, Tuple[str, Path]]]):
def setup_models_data(
self,
models: Iterable[Tuple[str, Iterable[Path]]],
models_lists: Iterable[Tuple[str, Tuple[str, Path]]],
parser: 'FileLoaders.T'
):
"""
Initialize lazy loaders for models data
"""
models_dict: Dict[str, List[Iterable[dict]]] = defaultdict(list)
for model_name, paths in models:
models_dict[model_name].append(map(safe_json_load, paths))
models_dict[model_name].append(parser(path) for path in paths)
for model_name, (lookup, path) in models_lists:
models_dict[model_name].append(iter_json_file(path, lookup))
models_dict[model_name].append(iter_json_file(parser(path), lookup))

self.models_data = {
model_name: itertools.chain(*list_of_gen)
Expand Down Expand Up @@ -252,6 +266,12 @@ def _create_argparser(cls) -> argparse.ArgumentParser:
"I.e. for file that contains dict {\"a\": {\"b\": [model_data, ...]}} you should\n"
"pass 'a.b' as <JSON key>.\n\n"
)
parser.add_argument(
"-i", "--input-format",
metavar="FORMAT", default="json",
choices=['json', 'yaml', 'ini'],
help="Input files parser ('PyYaml' is required to parse yaml files)\n\n"
)
parser.add_argument(
"-o", "--output",
metavar="FILE", default="",
Expand Down Expand Up @@ -385,7 +405,31 @@ def path_split(path: str) -> List[str]:
return folders


def dict_lookup(d: dict, lookup: str) -> Union[dict, list]:
class FileLoaders:
T = Callable[[Path], Union[dict, list]]

@staticmethod
def json(path: Path) -> Union[dict, list]:
with path.open() as fp:
return json.load(fp)

@staticmethod
def yaml(path: Path) -> Union[dict, list]:
if yaml is None:
print('Yaml parser is not installed. To parse yaml files PyYaml (or ruamel.yaml) is required.')
raise ImportError('yaml')
with path.open() as fp:
return yaml.safe_load(fp)

@staticmethod
def ini(path: Path) -> dict:
config = configparser.ConfigParser()
with path.open() as fp:
config.read_file(fp)
return {s: dict(config.items(s)) for s in config.sections()}


def dict_lookup(d: Union[dict, list], lookup: str) -> Union[dict, list]:
"""
Extract nested dictionary value from key path.
If lookup is "-" returns dict as is.
Expand All @@ -403,7 +447,7 @@ def dict_lookup(d: dict, lookup: str) -> Union[dict, list]:
return d


def iter_json_file(path: Path, lookup: str) -> Generator[Union[dict, list], Any, None]:
def iter_json_file(data: Union[dict, list], lookup: str) -> Generator[Union[dict, list], Any, None]:
"""
Loads given 'path' file, perform lookup and return generator over json list.
Does not open file until iteration is started.
Expand All @@ -412,21 +456,11 @@ def iter_json_file(path: Path, lookup: str) -> Generator[Union[dict, list], Any,
:param lookup: Dot separated lookup path
:return:
"""
with path.open() as f:
l = json.load(f)
l = dict_lookup(l, lookup)
l = dict_lookup(data, lookup)
assert isinstance(l, list), f"Dict lookup return {type(l)} but list is expected, check your lookup path"
yield from l


def safe_json_load(path: Path) -> Union[dict, list]:
"""
Open file, load json and close it.
"""
with path.open(encoding="utf-8") as f:
return json.load(f)


def _process_path(path: str) -> Iterable[Path]:
"""
Convert path pattern into path iterable.
Expand Down
5 changes: 4 additions & 1 deletion json_to_models/dynamic_typing/complex.py
Original file line number Diff line number Diff line change
Expand Up @@ -265,7 +265,10 @@ def to_typing_code(self, types_style: Dict[Union['BaseType', Type['BaseType']],
if options.get(self.TypeStyle.use_literals):
limit = options.get(self.TypeStyle.max_literals)
if limit is None or len(self.literals) < limit:
parts = ', '.join(f'"{s}"' for s in sorted(self.literals))
parts = ', '.join(
'"{}"'.format(s.replace('\\', '\\\\').replace('"', '\\"'))
for s in sorted(self.literals)
)
return [(Literal.__module__, 'Literal')], f"Literal[{parts}]"

return [], 'str'
Expand Down
2 changes: 1 addition & 1 deletion json_to_models/models/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@

keywords_set = set(keyword.kwlist)
builtins_set = set(__builtins__.keys())
other_common_names_set = {'datetime', 'time', 'date', 'defaultdict'}
other_common_names_set = {'datetime', 'time', 'date', 'defaultdict', 'schema'}
blacklist_words = frozenset(keywords_set | builtins_set | other_common_names_set)
ones = ['', 'one', 'two', 'three', 'four', 'five', 'six', 'seven', 'eight', 'nine']

Expand Down
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,6 @@ def run_tests(self):
},
install_requires=required,
cmdclass={"test": PyTest},
tests_require=["pytest>=4.4.0", "pytest-xdist", "requests", "attrs", "pydantic>=1.3"],
tests_require=["pytest>=4.4.0", "pytest-xdist", "requests", "attrs", "pydantic>=1.3", "PyYaml"],
data_files=[('', ['requirements.txt', 'pytest.ini', '.coveragerc', 'LICENSE', 'README.md', 'CHANGELOG.md'])]
)
9 changes: 9 additions & 0 deletions test/test_cli/data/file.ini
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
[owner]
name = John Doe
organization = Acme Widgets Inc.

[database]
; use IP address in case network name resolution is not working
server = 192.0.2.62
port = 143
file = "payroll.dat"

0 comments on commit c2e74d4

Please sign in to comment.