diff --git a/.circleci/config.yml b/.circleci/config.yml index 89b77a9..c488b0e 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -1,67 +1,76 @@ version: 2.1 jobs: - test-35: + test-34: docker: - - image: circleci/python:3.5 + - image: circleci/python:3.4 environment: &std_env TERM: xterm LANG: en_US.UTF-8 + PIP_DISABLE_PIP_VERSION_CHECK: 1 working_directory: ~/json-syntax steps: + &steps34 - checkout - run: name: Set up virtualenv command: | - pip install poetry - poetry install + pip install --user 'poetry>=1' + python -m poetry install - run: name: Run tests command: | - poetry run pytest tests/ + python -m poetry run pytest tests/ - store_artifacts: # If a property test fails, this contains the example that failed. path: ".hypothesis" destination: ".hypothesis" + test-35: + docker: + - image: circleci/python:3.5 + environment: *std_env + steps: *steps34 + working_directory: ~/json-syntax test-36: docker: - image: circleci/python:3.6 environment: *std_env + working_directory: ~/json-syntax steps: - &std_steps + &steps36 - checkout - run: name: Set up virtualenv command: | - pip install poetry - poetry install + pip install --user 'poetry>=1' + python -m poetry install - run: name: Run tests command: | - poetry run pytest --doctest-modules json_syntax/ tests/ + python -m poetry run pytest --doctest-modules json_syntax/ tests/ - store_artifacts: # If a property test fails, this contains the example that failed. path: ".hypothesis" destination: ".hypothesis" - working_directory: ~/json-syntax test-37: docker: - image: circleci/python:3.7 environment: *std_env - steps: *std_steps + steps: *steps36 working_directory: ~/json-syntax test-38: docker: - image: circleci/python:3.8 environment: *std_env - steps: *std_steps + steps: *steps36 working_directory: ~/json-syntax workflows: test: jobs: + - test-34 - test-35 - test-36 - test-37 diff --git a/.gitignore b/.gitignore index 2b645a9..255ae39 100644 --- a/.gitignore +++ b/.gitignore @@ -9,3 +9,4 @@ setup.py requirements.txt .tox/ +README.rst diff --git a/README.md b/README.md index 4ede494..e7f5726 100644 --- a/README.md +++ b/README.md @@ -168,8 +168,8 @@ Thus we have: * `dict` and `Dict[K, V]` Tuple is a special case. In Python, they're often used to mean "frozenlist", so -`Tuple[E, ...]` (the `...` is [the Ellipsis object][ellipsis]) indicates all elements have the type -`E`. +`Tuple[E, ...]` (the `...` is [the Ellipsis object][ellipsis]) indicates all elements have +the type `E`. They're also used to represent an unnamed record. In this case, you can use `Tuple[A, B, C, D]` or however many types. It's generally better to use a `dataclass`. @@ -180,6 +180,24 @@ The standard rules don't support: 2. Using type variables. 3. Any kind of callable, coroutine, file handle, etc. +#### Support for deriving from Generic + +There is experimental support for deriving from `typing.Generic`. An `attrs` or `dataclass` +may declare itself a generic class. If another class invokes it as `YourGeneric[Param, +Param]`, those `Param` types will be substituted into the fields during encoding. This is +useful to construct parameterized container types. Example: + + @attr.s(auto_attribs=True) + class Wrapper(Generic[T, M]): + body: T + count: int + messages: List[M] + + @attr.s(auto_attribs=True) + class Message: + first: Wrapper[str, str] + second: Wrapper[Dict[str, str], int] + #### Unions A union type lets you present alternate types that the converters will attempt in @@ -347,28 +365,19 @@ This package is maintained via the [poetry][] tool. Some useful commands: 1. Setup: `poetry install` 2. Run tests: `poetry run pytest tests/` - 3. Reformat: `poetry run black json_syntax/ tests/` - -### Setting up tox - -You'll want pyenv, then install the pythons: - - curl https://bootstrap.pypa.io/get-pip.py -o get-pip.py - pyenv install --list | egrep '^ *3\.[4567]|^ *pypy3.5' - # figure out what versions you want - for v in 3.4.9 3.5.10 ...; do - pyenv install $v - PYENV_VERSION=$v python get-pip.py - done + 3. Reformat: `black json_syntax/ tests/` + 4. Generate setup.py: `dephell deps convert -e setup` + 5. Generate requirements.txt: `dephell deps convert -e req` -Once you install `tox` in your preferred python, running it is just `tox`. (Note: this is -largely redundant as the build is configured to all the different pythons on Circle.) +### Running tests via docker -### Contributor roll call +The environments for 3.4 through 3.9 are in `pyproject.toml`, so just run: -* @bsamuel-ui -- Ben Samuel -* @dschep -* @rugheid + dephell deps convert -e req # Create requirements.txt + dephell docker run -e test34 pip install -r requirements.txt + dephell docker run -e test34 pytest tests/ + dephell docker shell -e test34 pytest tests/ + dephell docker destroy -e test34 ### Notes diff --git a/json_syntax/__init__.py b/json_syntax/__init__.py index 1faec05..aa9b343 100644 --- a/json_syntax/__init__.py +++ b/json_syntax/__init__.py @@ -1,6 +1,6 @@ """ -The JSON syntax library is a combinatorial parser / generator library for managing conversion of Python objects to and -from common JSON types. +The JSON syntax library is a combinatorial parser / generator library for managing +conversion of Python objects to and from common JSON types. It's not strictly limited to JSON, but that's the major use case. """ @@ -39,9 +39,11 @@ def std_ruleset( cache=None, ): """ - Constructs a RuleSet with the provided rules. The arguments here are to make it easy to override. + Constructs a RuleSet with the provided rules. The arguments here are to make it easy to + override. - For example, to replace ``decimals`` with ``decimals_as_str`` just call ``std_ruleset(decimals=decimals_as_str)`` + For example, to replace ``decimals`` with ``decimals_as_str`` just call + ``std_ruleset(decimals=decimals_as_str)`` """ return custom( enums, @@ -59,5 +61,5 @@ def std_ruleset( stringify_keys, unions, *extras, - cache=cache, + cache=cache ) diff --git a/json_syntax/attrs.py b/json_syntax/attrs.py index 9d4876e..4201216 100644 --- a/json_syntax/attrs.py +++ b/json_syntax/attrs.py @@ -10,6 +10,7 @@ ) from . import pattern as pat from .product import build_attribute_map, build_named_tuple_map, build_typed_dict_map +from .types import is_generic, get_origin, get_argument_map from functools import partial @@ -47,7 +48,13 @@ def attrs_classes( """ if verb not in _SUPPORTED_VERBS: return - inner_map = build_attribute_map(verb, typ, ctx) + if is_generic(typ): + typ_args = get_argument_map(typ) + typ = get_origin(typ) + else: + typ_args = None + + inner_map = build_attribute_map(verb, typ, ctx, typ_args) if inner_map is None: return @@ -115,11 +122,11 @@ def named_tuples(verb, typ, ctx): def typed_dicts(verb, typ, ctx): """ - Handle the TypedDict product type. This allows you to construct a dict with specific (string) keys, which - is often how people really use dicts. + Handle the TypedDict product type. This allows you to construct a dict with specific + (string) keys, which is often how people really use dicts. - Both the class form and the functional form, ``TypedDict('Name', {'field': type, 'field': type})`` are - supported. + Both the class form and the functional form, + ``TypedDict('Name', {'field': type, 'field': type})`` are supported. """ if verb not in _SUPPORTED_VERBS: return diff --git a/json_syntax/cache.py b/json_syntax/cache.py index 5dc915f..9565b6c 100644 --- a/json_syntax/cache.py +++ b/json_syntax/cache.py @@ -103,7 +103,8 @@ def complete(self, verb, typ, action): class ThreadLocalCache(SimpleCache): """ - Avoids threads conflicting while looking up rules by keeping the cache in thread local storage. + Avoids threads conflicting while looking up rules by keeping the cache in thread local + storage. You can also prevent this by looking up rules during module loading. """ diff --git a/json_syntax/errors.py b/json_syntax/errors.py new file mode 100644 index 0000000..954db99 --- /dev/null +++ b/json_syntax/errors.py @@ -0,0 +1,94 @@ +class _Context: + """ + Stash contextual information in an exception. As we don't know exactly when an exception + is displayed to a user, this class tries to keep it always up to date. + + This class subclasses string (to be compatible) and tracks an insertion point. + """ + + __slots__ = ("original", "context", "lead") + + def __init__(self, original, lead, context): + self.original = original + self.lead = lead + self.context = [context] + + def __str__(self): + return "{}{}{}".format( + self.original, self.lead, "".join(map(str, reversed(self.context))) + ) + + def __repr__(self): + return repr(self.__str__()) + + @classmethod + def add(cls, exc, context): + args = exc.args + if args and isinstance(args[0], cls): + args[0].context.append(context) + return + args = list(exc.args) + if args: + args[0] = cls(args[0], "; at ", context) + else: + args.append(cls("", "At ", context)) + exc.args = tuple(args) + + +class ErrorContext: + """ + Inject contextual information into an exception message. This won't work for some + exceptions like OSError that ignore changes to `args`; likely not an issue for this + library. There is a neglible performance hit if there is no exception. + + >>> with ErrorContext('.foo'): + ... with ErrorContext('[0]'): + ... with ErrorContext('.qux'): + ... 1 / 0 + Traceback (most recent call last): + ZeroDivisionError: division by zero; at .foo[0].qux + + The `__exit__` method will catch the exception and look for a `_context` attribute + assigned to it. If none exists, it appends `; at ` and the context string to the first + string argument. + + As the exception walks up the stack, outer ErrorContexts will be called. They will see + the `_context` attribute and insert their context immediately after `; at ` and before + the existing context. + + Thus, in the example above: + + ('division by zero',) -- the original message + ('division by zero; at .qux',) -- the innermost context + ('division by zero; at [0].qux',) + ('division by zero; at .foo[0].qux',) -- the outermost context + + For simplicity, the method doesn't attempt to inject whitespace. To represent names, + consider surrounding them with angle brackets, e.g. `` + """ + + def __init__(self, *context): + self.context = context + + def __enter__(self): + pass + + def __exit__(self, exc_type, exc_value, traceback): + if exc_value is not None: + _Context.add(exc_value, "".join(self.context)) + + +def err_ctx(context, func): + """ + Execute a callable, decorating exceptions raised with error context. + + ``err_ctx(context, func)`` has the same effect as: + + with ErrorContext(context): + return func() + """ + try: + return func() + except Exception as exc: + _Context.add(exc, context) + raise diff --git a/json_syntax/extras/dynamodb.py b/json_syntax/extras/dynamodb.py index 1cd6832..4e4825a 100644 --- a/json_syntax/extras/dynamodb.py +++ b/json_syntax/extras/dynamodb.py @@ -1,19 +1,22 @@ """ -While the main suite is fairly complex, it's really not hard to construct a small, useful translation. +While the main suite is fairly complex, it's really not hard to construct a small, useful +translation. -AWS's DynamoDB decorates values to represent them in JSON. The rules for the decorations are fairly simple, and we'd -like to translate to and from Python objects. +AWS's DynamoDB decorates values to represent them in JSON. The rules for the decorations are +fairly simple, and we'd like to translate to and from Python objects. The a Dynamo values look like this: {"BOOL": true} {"L": [{"N": "1.5"}, {"S": "apple"}]} -We will generate rules to convert Python primitive types, lists and attrs classes into Dynamo types. +We will generate rules to convert Python primitive types, lists and attrs classes into +Dynamo types. -This will special case the kinds of sets Dynamo handles. In keeping with the principle of least astonishment, -it won't convert, e.g. ``Set[MyType]`` into a Dynamo list. This will just fail because Dynamo doesn't actually -support that. You could add a rule if that's the correct semantics. +This will special case the kinds of sets Dynamo handles. In keeping with the principle of +least astonishment, it won't convert, e.g. ``Set[MyType]`` into a Dynamo list. This will +just fail because Dynamo doesn't actually support that. You could add a rule if that's the +correct semantics. For boto3 users: you must use the **client**, not the resource. @@ -22,7 +25,7 @@ The ``boto3.resource('dynamodb').Table`` is already doing a conversion step we don't want. -Ref: https://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_AttributeValue.html#DDB-Type-AttributeValue-NS +Ref: https://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_AttributeValue.html#DDB-Type-AttributeValue-NS # noqa """ from json_syntax.helpers import ( @@ -64,8 +67,9 @@ def booleans(verb, typ, ctx): def numbers(verb, typ, ctx): """ - A rule to represent numeric values as Dynamo numbers. Any number type should work, however both Decimal and float - support NaN and infinity and I haven't tested these in Dynamo. + A rule to represent numeric values as Dynamo numbers. Any number type should work, + however both Decimal and float support NaN and infinity and I haven't tested these in + Dynamo. """ if typ == bool or not issub_safe(typ, (Decimal, Real)): return @@ -146,10 +150,12 @@ def dicts(verb, typ, ctx): def sets(verb, typ, ctx): """ - A rule to represent sets. Will only use specialized Dynamo sets, to abide by principle of least astonishment. + A rule to represent sets. Will only use specialized Dynamo sets, to abide by principle + of least astonishment. - Valid python types include Set[Decimal], Set[str], Set[bytes], or FrozenSet for any of these. Also, any number that - converts from Decimal and converts to a decimal if str is called should work. + Valid python types include Set[Decimal], Set[str], Set[bytes], or FrozenSet for any of + these. Also, any number that converts from Decimal and converts to a decimal if str is + called should work. """ if not has_origin(typ, (set, frozenset), num_args=1): return @@ -248,7 +254,8 @@ def ddb_item_to_python(self, typ): Usage: rs = dynamodb_ruleset() - response = client.get_item(TableName='my_table', Key=rs.ad_hoc(my_key='some_string')) + response = client.get_item(TableName='my_table', + Key=rs.ad_hoc(my_key='some_string')) decoder = rs.ddb_item_to_python(MyAttrsType) result = decoder(response['Item']) """ @@ -272,7 +279,8 @@ def python_to_ddb_item(self, typ): def ad_hoc(self, _key_prefix="", **kw): """ - Convenience method to encode an ad hoc set of arguments used in various DynamoDB APIs. + Convenience method to encode an ad hoc set of arguments used in various DynamoDB + APIs. If an argument is a tuple, it must be a two-item tuple of ``(value, type)``. If you want to use container types, you'll have to specify them fully. For empty @@ -335,7 +343,7 @@ def dynamodb_ruleset( optionals, nulls, *extras, - cache=cache, + cache=cache ) @@ -431,7 +439,8 @@ def _encode_number(value): if isinstance(value, (int, float, Decimal)): return str(value) else: - # This is all the Real interface guarantees us. It's a stretch using Fraction in Dynamo. + # This is all the Real interface guarantees us. It's a stretch using Fraction in + # Dynamo. return str(float(value)) diff --git a/json_syntax/extras/flags.py b/json_syntax/extras/flags.py index d1933ad..afbd418 100644 --- a/json_syntax/extras/flags.py +++ b/json_syntax/extras/flags.py @@ -12,15 +12,18 @@ class Flag(type): """ An example of a custom type that lets you quickly create string-only flags. - This also demonstrates a technique that makes it possible to create a fake type that can be - used within ``typing.Union``. + This also demonstrates a technique that makes it possible to create a fake type that can + be used within ``typing.Union``. Thanks to __class_getitem__, you can invoke this as ``Flag['foo', 'bar', 'etc']`` but this requires Python 3.7! """ def __new__(cls, *args, **kwds): - """This is necessary to be a subclass of `type`, which is necessary to be used in a Union.""" + """ + This is necessary to be a subclass of `type`, which is necessary to be used in a + Union. + """ return super().__new__(cls, cls.__name__, (), {}) def __init__(self, *elems): @@ -46,10 +49,11 @@ def _check_flag(elems, value): """ Checks that a value is a member of a set of flags. - Note that we use a top-level function and `partial`. The trouble with lambdas or local defs is that they - can't be pickled because they're inaccessible to the unpickler. + Note that we use a top-level function and `partial`. The trouble with lambdas or local + defs is that they can't be pickled because they're inaccessible to the unpickler. - If you don't intend to pickle your encoders, though, they're completely fine to use in rules. + If you don't intend to pickle your encoders, though, they're completely fine to use in + rules. """ return isinstance(value, str) and value in elems @@ -68,7 +72,8 @@ def _convert_flag(elems, value): def flags(*, verb, typ, ctx): """ - A simple rule to allow certain strings as flag values, but without converting them to an actual Enum. + A simple rule to allow certain strings as flag values, but without converting them to an + actual Enum. This rule is triggered with a fake type ``Flag['string', 'string', 'string']``. """ diff --git a/json_syntax/extras/loose_dates.py b/json_syntax/extras/loose_dates.py index 7582705..368fc62 100644 --- a/json_syntax/extras/loose_dates.py +++ b/json_syntax/extras/loose_dates.py @@ -7,11 +7,11 @@ """ This example is of working around common date issues. -The standard rules use the standard library's fromisoformat and isoformat methods, to abide by the principle of least -surprise. +The standard rules use the standard library's fromisoformat and isoformat methods, to abide +by the principle of least surprise. -But it's pretty common to have to consume a datetime in a date field, and it may also be the case that you want to -discard the timestamp. +But it's pretty common to have to consume a datetime in a date field, and it may also be the +case that you want to discard the timestamp. (Note: requires python3.7 or greater.) """ diff --git a/json_syntax/helpers.py b/json_syntax/helpers.py index cbe4224..fd02516 100644 --- a/json_syntax/helpers.py +++ b/json_syntax/helpers.py @@ -1,10 +1,14 @@ -from importlib import import_module -import logging -import typing as t -import sys +from .types import ( # noqa + has_origin, + get_origin, + is_generic, + issub_safe, + NoneType, + resolve_fwd_ref, + python_minor, +) +from .errors import ErrorContext, err_ctx # noqa -_eval_type = getattr(t, "_eval_type", None) -logger = logging.getLogger(__name__) JSON2PY = "json_to_python" PY2JSON = "python_to_json" INSP_JSON = "inspect_json" @@ -13,241 +17,8 @@ STR2PY = "string_to_python" PY2STR = "python_to_string" PATTERN = "show_pattern" -NoneType = type(None) SENTINEL = object() -python_minor = sys.version_info[:2] def identity(value): return value - - -def has_origin(typ, origin, num_args=None): - """ - Determines if a concrete class (a generic class with arguments) matches an origin - and has a specified number of arguments. - - The typing classes use dunder properties such that ``__origin__`` is the generic - class and ``__args__`` are the type arguments. - - Note: in python3.7, the ``__origin__`` attribute changed to reflect native types. - This call attempts to work around that so that 3.5 and 3.6 "just work." - """ - t_origin = get_origin(typ) - if not isinstance(origin, tuple): - origin = (origin,) - return t_origin in origin and (num_args is None or len(typ.__args__) == num_args) - - -def get_origin(typ): - """ - Get the origin type of a generic type. For example, List has an "origin type" of list. - """ - try: - t_origin = typ.__origin__ - except AttributeError: - return None - else: - return _origin_pts(t_origin) - - -try: - _Generic = t.GenericMeta -except AttributeError: - _Generic = t._GenericAlias - - -def is_generic(typ): - """ - Return true iff the instance (which should be a type value) is a generic type. - - `typing` module notes: - - 3.5: typing.List[int] is an instance of typing._GenericAlias - 3.6, 3.7: typing.List[int] is an instance of typing.GenericMeta - """ - return isinstance(typ, _Generic) - - -if python_minor < (3, 7): - import collections as c - - _map = [ - (t.Tuple, tuple), - (t.List, list), - (t.Dict, dict), - (t.Callable, callable), - (t.Type, type), - (t.Set, set), - (t.FrozenSet, frozenset), - ] - seen = {prov for prov, stable in _map} - from collections import abc - - for name, generic in vars(t).items(): - if not is_generic(generic) or generic in seen: - continue - for check in getattr(abc, name, None), getattr(c, name.lower(), None): - if check: - _map.append((generic, check)) - break - _pts = {prov: stable for prov, stable in _map} - # _stp = {stable: prov for prov, stable in _map} - - def _origin_pts(origin, _pts=_pts): - """ - Convert the __origin__ of a generic type returned by the provisional typing API (python3.5) to the stable - version. - - Don't use this, just use get_origin. - """ - return _pts.get(origin, origin) - - # def _origin_stp(origin, _stp=_stp): - # """ - # Convert the __origin__ of a generic type in the stable typing API (python3.6+) to the provisional version. - # """ - # return _stp.get(origin, origin) - - del _pts - # del _stp - del _map - del seen - del abc - del c -else: - _origin_pts = identity - # _origin_stp = identity - - -def issub_safe(sub, sup): - """ - Safe version of issubclass. Tries to be consistent in handling generic types. - - `typing` module notes: - - 3.5, 3.6: issubclass(t.List[int], list) returns true - 3.7: issubclass(t.List[int], list) raises a TypeError - """ - try: - return not is_generic(sub) and issubclass(sub, sup) - except TypeError: - return False - - -def resolve_fwd_ref(typ, context_class): - """ - Tries to resolve a forward reference given a containing class. This does nothing for - regular types. - """ - resolved = None - try: - namespace = vars(import_module(context_class.__module__)) - except AttributeError: - logger.warning("Couldn't determine module of %r", context_class) - else: - resolved = _eval_type(typ, namespace, {}) - if resolved is None: - return typ - else: - return resolved - - -if _eval_type is None: - # If typing's internal API changes, we have tests that break. - def resolve_fwd_ref(typ, context_class): # noqa - return typ - - -class _Context: - """ - Stash contextual information in an exception. As we don't know exactly when an exception is displayed - to a user, this class tries to keep it always up to date. - - This class subclasses string (to be compatible) and tracks an insertion point. - """ - - __slots__ = ("original", "context", "lead") - - def __init__(self, original, lead, context): - self.original = original - self.lead = lead - self.context = [context] - - def __str__(self): - return "{}{}{}".format( - self.original, self.lead, "".join(map(str, reversed(self.context))) - ) - - def __repr__(self): - return repr(self.__str__()) - - @classmethod - def add(cls, exc, context): - args = exc.args - if args and isinstance(args[0], cls): - args[0].context.append(context) - return - args = list(exc.args) - if args: - args[0] = cls(args[0], "; at ", context) - else: - args.append(cls("", "At ", context)) - exc.args = tuple(args) - - -class ErrorContext: - """ - Inject contextual information into an exception message. This won't work for some exceptions like OSError that - ignore changes to `args`; likely not an issue for this library. There is a neglible performance hit if there is - no exception. - - >>> with ErrorContext('.foo'): - ... with ErrorContext('[0]'): - ... with ErrorContext('.qux'): - ... 1 / 0 - Traceback (most recent call last): - ZeroDivisionError: division by zero; at .foo[0].qux - - The `__exit__` method will catch the exception and look for a `_context` attribute assigned to it. If none exists, - it appends `; at ` and the context string to the first string argument. - - As the exception walks up the stack, outer ErrorContexts will be called. They will see the `_context` attribute and - insert their context immediately after `; at ` and before the existing context. - - Thus, in the example above: - - ('division by zero',) -- the original message - ('division by zero; at .qux',) -- the innermost context - ('division by zero; at [0].qux',) - ('division by zero; at .foo[0].qux',) -- the outermost context - - For simplicity, the method doesn't attempt to inject whitespace. To represent names, consider surrounding - them with angle brackets, e.g. `` - """ - - def __init__(self, *context): - self.context = context - - def __enter__(self): - pass - - def __exit__(self, exc_type, exc_value, traceback): - if exc_value is not None: - _Context.add(exc_value, "".join(self.context)) - - -def err_ctx(context, func): - """ - Execute a callable, decorating exceptions raised with error context. - - ``err_ctx(context, func)`` has the same effect as: - - with ErrorContext(context): - return func() - """ - try: - return func() - except Exception as exc: - _Context.add(exc, context) - raise diff --git a/json_syntax/pattern.py b/json_syntax/pattern.py index ca2e65c..c6ab2cf 100644 --- a/json_syntax/pattern.py +++ b/json_syntax/pattern.py @@ -24,7 +24,8 @@ def _def(obj): class Matches(IntEnum): """ - This determines the degree to which one pattern can shadow another causing potential ambiguity. + This determines the degree to which one pattern can shadow another causing potential + ambiguity. Meaning: @@ -33,11 +34,12 @@ class Matches(IntEnum): * potential: It's not possible to prove the pattern won't shadow the other pattern. * never: The pattern will never shadow the other pattern. - In determining ambiguity, a `sometimes` threshold is often permissible. For example, if you have - `Union[date, str]` then properly formatted dates will sometimes shadow strings. That's probably okay - if you want special handling for dates. + In determining ambiguity, a `sometimes` threshold is often permissible. For example, if + you have `Union[date, str]` then properly formatted dates will sometimes shadow strings. + That's probably okay if you want special handling for dates. - But in `Union[str, date]`, the `str` will always match and thus no dates will ever be recognized. + But in `Union[str, date]`, the `str` will always match and thus no dates will ever be + recognized. """ always = 0 diff --git a/json_syntax/product.py b/json_syntax/product.py index 0126a46..dd6849f 100644 --- a/json_syntax/product.py +++ b/json_syntax/product.py @@ -2,14 +2,15 @@ A module to help with product types in Python. """ -from .helpers import issub_safe, resolve_fwd_ref, SENTINEL +from .helpers import SENTINEL +from .types import issub_safe, resolve_fwd_ref, rewrite_typevars _TypedDictMeta = None try: from typing import _TypedDictMeta except ImportError: try: - from typing_extensions import _TypeDictMeta # noqa + from typing_extensions import _TypedDictMeta # noqa except ImportError: pass @@ -31,12 +32,14 @@ class Attribute: """ - Generic class to describe an attribute for a product type that can be represented as, e.g., a JSON map. + Generic class to describe an attribute for a product type that can be represented as, + e.g., a JSON map. - An Attribute is associated with an action, specifically, its "inner" field directs how to process the inside type, - not necessarily what the inside type is. + An Attribute is associated with an action, specifically, its "inner" field directs how + to process the inside type, not necessarily what the inside type is. - See the various build_* commands to generate attribute maps. (These are really just lists of Attribute instances.) + See the various build_* commands to generate attribute maps. (These are really just + lists of Attribute instances.) Fields: name: the attribute name @@ -74,7 +77,7 @@ def is_attrs_field_required(field): return factory in _attrs_missing_values -def attr_map(verb, outer, ctx, gen): +def attr_map(verb, outer, ctx, gen, typ_args=None): result = [] failed = [] for att in gen: @@ -83,6 +86,8 @@ def attr_map(verb, outer, ctx, gen): att.typ = resolve_fwd_ref(att.typ, outer) except TypeError: failed.append("resolve fwd ref {} for {}".format(att.typ, att.name)) + else: + att.typ = rewrite_typevars(att.typ, typ_args) if att.inner is None: att.inner = ctx.lookup( verb=verb, typ=resolve_fwd_ref(att.typ, outer), accept_missing=True @@ -101,11 +106,12 @@ def attr_map(verb, outer, ctx, gen): return tuple(result) -def build_attribute_map(verb, typ, ctx): +def build_attribute_map(verb, typ, ctx, typ_args=None): """ Examine an attrs or dataclass type and construct a list of attributes. - Returns a list of Attribute instances, or None if the type is not an attrs or dataclass type. + Returns a list of Attribute instances, or None if the type is not an attrs or dataclass + type. """ try: fields = typ.__attrs_attrs__ @@ -121,7 +127,7 @@ def build_attribute_map(verb, typ, ctx): verb, typ, ctx, - ( + gen=( Attribute( name=field.name, typ=field.type, @@ -131,6 +137,7 @@ def build_attribute_map(verb, typ, ctx): for field in fields if field.init ), + typ_args=typ_args, ) @@ -167,7 +174,7 @@ def build_named_tuple_map(verb, typ, ctx): verb, typ, ctx, - ( + gen=( Attribute( name=name, typ=inner, @@ -176,6 +183,7 @@ def build_named_tuple_map(verb, typ, ctx): ) for name, inner in fields ), + typ_args=None, # A named tuple type can't accept generic arguments. ) @@ -196,8 +204,9 @@ def build_typed_dict_map(verb, typ, ctx): verb, typ, ctx, - ( + gen=( Attribute(name=name, typ=inner, is_required=True, default=SENTINEL) for name, inner in typ.__annotations__.items() ), + typ_args=None, # A typed dict can't accept generic arguments. ) diff --git a/json_syntax/ruleset.py b/json_syntax/ruleset.py index 9d063a3..5351b76 100644 --- a/json_syntax/ruleset.py +++ b/json_syntax/ruleset.py @@ -22,13 +22,14 @@ class SimpleRuleSet: """ This is the base of RuleSet and doesn't know anything about the standard verbs. - A ruleset contains a series of rules that will be evaluated, in order, against types to attempt to construct - encoders and decoders. + A ruleset contains a series of rules that will be evaluated, in order, against types to + attempt to construct encoders and decoders. It takes a list of rules; functions that accept a verb and type and return actions. - The keyword argument `cache` can specify a custom rule cache. `json_syntax.cache.ThreadLocalCache` may be helpful - if you are loading rules in a multi-threaded environment. + The keyword argument `cache` can specify a custom rule cache. + `json_syntax.cache.ThreadLocalCache` may be helpful if you are loading rules in a + multi-threaded environment. """ def __init__(self, *rules, cache=None): @@ -78,16 +79,17 @@ def fallback(self, verb, typ): class RuleSet(SimpleRuleSet): """ - A ruleset contains a series of rules that will be evaluated, in order, against types to attempt - to construct encoders and decoders. + A ruleset contains a series of rules that will be evaluated, in order, against types to + attempt to construct encoders and decoders. It takes a list of rules; functions that accept a verb and type and return actions. - The keyword argument `cache` can specify a custom rule cache. `json_syntax.cache.ThreadLocalCache` - may be helpful if you are loading rules in a multi-threaded environment. + The keyword argument `cache` can specify a custom rule cache. + `json_syntax.cache.ThreadLocalCache` may be helpful if you are loading rules in a + multi-threaded environment. - The most important methods are generally `json_to_python` and `python_to_json`; these take a - fully specified type and produce an encoder and decoder respectively. + The most important methods are generally `json_to_python` and `python_to_json`; these + take a fully specified type and produce an encoder and decoder respectively. """ def json_to_python(self, typ): diff --git a/json_syntax/std.py b/json_syntax/std.py index 081535d..55e6780 100644 --- a/json_syntax/std.py +++ b/json_syntax/std.py @@ -81,13 +81,15 @@ def floats(verb, typ, ctx): """ Rule to handle floats passing NaNs through unaltered. - JSON technically recognizes integers and floats. Many JSON generators will represent floats with integral value as - integers. Thus, this rule will convert both integers and floats in JSON to floats in Python. + JSON technically recognizes integers and floats. Many JSON generators will represent + floats with integral value as integers. Thus, this rule will convert both integers and + floats in JSON to floats in Python. - Python's standard JSON libraries treat `nan` and `inf` as special constants, but this is not standard JSON. + Python's standard JSON libraries treat `nan` and `inf` as special constants, but this is + not standard JSON. - This rule simply treats them as regular float values. If you want to catch them, you can set ``allow_nan=False`` - in ``json.dump()``. + This rule simply treats them as regular float values. If you want to catch them, you can + set ``allow_nan=False`` in ``json.dump()``. """ if typ == float: if verb in (JSON2PY, PY2JSON): @@ -104,7 +106,8 @@ def floats_nan_str(verb, typ, ctx): """ Rule to handle floats passing NaNs through as strings. - Python's standard JSON libraries treat `nan` and `inf` as special constants, but this is not standard JSON. + Python's standard JSON libraries treat `nan` and `inf` as special constants, but this is + not standard JSON. This rule converts special constants to string names. """ @@ -127,8 +130,8 @@ def decimals(verb, typ, ctx): This rule requires that your JSON library has decimal support, e.g. simplejson. - Other JSON processors may convert values to and from floating-point; if that's a concern, consider - `decimals_as_str`. + Other JSON processors may convert values to and from floating-point; if that's a + concern, consider `decimals_as_str`. This rule will fail if passed a special constant. """ @@ -167,8 +170,8 @@ def iso_dates(verb, typ, ctx): This simply uses the `fromisoformat` and `isoformat` methods of `date` and `datetime`. - There is a loose variant in the examples that will accept a datetime in a date. A datetime always accepts both - dates and datetimes. + There is a loose variant in the examples that will accept a datetime in a date. A + datetime always accepts both dates and datetimes. """ if typ not in (date, datetime, time, timedelta): return diff --git a/json_syntax/string.py b/json_syntax/string.py index 379b012..031abdb 100644 --- a/json_syntax/string.py +++ b/json_syntax/string.py @@ -13,14 +13,15 @@ """ -As JSON requires string keys, unless dicts are only allowed to be Dict[str, T], we need to be able to encode -values as strings. +As JSON requires string keys, unless dicts are only allowed to be Dict[str, T], we need to +be able to encode values as strings. Recommendations: * The string verbs are not intended for direct use. * Use these verbs for any type that must be represented as a key in a JSON object. -* The standard rules will only handle types that are reliable keys and have obvious string encodings. +* The standard rules will only handle types that are reliable keys and have obvious string + encodings. See std.dicts for an example. """ diff --git a/json_syntax/types.py b/json_syntax/types.py new file mode 100644 index 0000000..63544af --- /dev/null +++ b/json_syntax/types.py @@ -0,0 +1,266 @@ +import collections as c +from importlib import import_module +import logging +import sys +import typing as t + +logger = logging.getLogger(__name__) +_eval_type = getattr(t, "_eval_type", None) +python_minor = sys.version_info[:2] +NoneType = type(None) + + +def has_origin(typ, origin, num_args=None): + """ + Determines if a concrete class (a generic class with arguments) matches an origin + and has a specified number of arguments. + + This does a direct match rather than a subclass check. + + The typing classes use dunder properties such that ``__origin__`` is the generic + class and ``__args__`` are the type arguments. + + Note: in python3.7, the ``__origin__`` attribute changed to reflect native types. + This call attempts to work around that so that 3.5 and 3.6 "just work." + """ + t_origin = get_origin(typ) + if not isinstance(origin, tuple): + origin = (origin,) + return t_origin in origin and (num_args is None or len(get_args(typ)) == num_args) + + +def get_origin(typ): + """ + Get the constructor origin of a generic type. For example, List is constructed with + list. + """ + try: + t_origin = typ.__origin__ + except AttributeError: + return _origin_pts(typ) + else: + return _origin_pts(t_origin or typ) + + +def get_args(typ): + return getattr(typ, "__args__", ()) + + +def get_generic_origin(typ): + """ + Get the generic origin of a fully parametrized generic type. + + E.g. get_generic_origin(typing.List[int]) == typing.List + """ + if not is_parametrized(typ): + return None + + origin = typ.__origin__ + if not is_generic(origin) and not hasattr(origin, "__parameters__"): + origin = _lookup_generic_origin(origin) + + return origin + + +def get_argument_map(typ): + """ + For a concrete type, e.g. List[int], find the type parameters that map to the arguments. + + This is mostly useful for custom generics, example: + + T = TypeVar('T') + @attr.s + class MyGeneric(Generic[T, U]): + foo = attr.ib(type=T) + bar = attr.ib(type=List[U]) + + get_argument_map(MyGeneric[int, str]) == {T: int, U: str} + """ + origin = get_generic_origin(typ) + if origin is None: + return {} + return dict(zip(origin.__parameters__, typ.__args__)) + + +def rewrite_typevars(typ, arg_map): + """ + Rewrites a generic type according to a mapping of arguments. + + Note: behavior is only defined for TypeVar objects. + + From the example under get_argument_map: + + rewrite_typevars(List[U], {T: int, U: str}) == List[str] + + Note that we should immediately apply rewrites. + """ + if not arg_map: + # Nothing to rewrite. + return typ + + try: + # This is a type variable specified in the arguments. + return arg_map[typ] + except (KeyError, TypeError): + pass + + origin = get_generic_origin(typ) or typ + try: + args = typ.__args__ + except AttributeError: + return typ + else: + new_args = tuple(rewrite_typevars(arg, arg_map) for arg in args) + if new_args == args: + # Don't reconstruct the type when nothing changes. + return typ + else: + # If it passes, construct a new type with the rewritten arguments. + return origin[new_args] + + +try: + _Generic = t.GenericMeta +except AttributeError: + _Generic = t._GenericAlias + + +def is_generic(typ): + """ + Return true iff the instance (which should be a type value) is a generic type. + + `typing` module notes: + + 3.4, 3.5: typing.List[int] is an instance of typing._GenericAlias + 3.6, 3.7: typing.List[int] is an instance of typing.GenericMeta + """ + return isinstance(typ, _Generic) + + +def _make_map(): + from collections import abc + + seen = set() + for gen, con in [ + (t.Tuple, tuple), + (t.List, list), + (t.Dict, dict), + (t.Callable, callable), + (t.Type, type), + (t.Set, set), + (t.FrozenSet, frozenset), + ]: + seen.add(gen) + yield gen, con + + for name, generic in vars(t).items(): + if not is_generic(generic) or generic in seen: + continue + for check in getattr(abc, name, None), getattr(c, name.lower(), None): + if check: + yield generic, check + break + + +if python_minor < (3, 7): + + def _origin_pts(typ, _pts=dict(_make_map())): + """ + Convert the __origin__ of a generic type returned by the provisional typing API + (python3.4+) to the stable version. + + Don't use this, just use get_origin. + """ + return _pts.get(typ, typ) + + def _lookup_generic_origin(typ): + """ + Find the generic type corresponding to a regular type returned by .__origin__ + + Prefer using get_generic_origin to this. + """ + return None + + def is_parametrized(typ): + """ + Determine if the type is both generic and fully realized; no free parameters. + Parameters *may* be specified by type vars. + + This function works around weirdness in pre-3.7 where parameters will be set if + TypeVars are specified. + """ + if not is_generic(typ): + return False + args = typ.__args__ or () + return all(param in args for param in typ.__parameters__) + + +else: + + def _origin_pts(origin): + """ + Convert the __origin__ of a generic type returned by the provisional typing API + (python3.4+) to the stable version. + + Don't use this, just use get_origin. + """ + return origin + + def _lookup_generic_origin( + typ, _stp={stable: prov for prov, stable in _make_map()} + ): + """ + Find the generic type corresponding to a regular type returned by .__origin__ + """ + return _stp.get(typ, None) + + def is_parametrized(typ): + """ + Determine if the type is both generic and fully realized; no free parameters. + Parameters *may* be specified by type vars. + """ + return is_generic(typ) and not typ.__parameters__ + + +def issub_safe(sub, sup): + """ + Safe version of issubclass that only compares regular types. + + Tries to be consistent in handling generic types. + + `typing` module notes: + + 3.5, 3.6: issubclass(t.List[int], list) returns true + 3.7: issubclass(t.List[int], list) raises a TypeError + """ + try: + return not is_generic(sub) and issubclass(sub, sup) + except TypeError: + return False + + +def resolve_fwd_ref(typ, context_class): + """ + Tries to resolve a forward reference given a containing class. This does nothing for + regular types. + """ + resolved = None + try: + namespace = vars(import_module(context_class.__module__)) + except AttributeError: + logger.warning("Couldn't determine module of %r", context_class) + else: + resolved = _eval_type(typ, namespace, {}) + if resolved is None: + return typ + else: + return resolved + + +if _eval_type is None: + # If typing's internal API changes, we have tests that break. + def resolve_fwd_ref(typ, context_class): # noqa + return typ + + +del _make_map diff --git a/poetry.lock b/poetry.lock index 0dd1951..3e8df7d 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,12 +1,3 @@ -[[package]] -category = "dev" -description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." -marker = "python_version >= \"3.7\"" -name = "appdirs" -optional = false -python-versions = "*" -version = "1.4.3" - [[package]] category = "dev" description = "Atomic file writes." @@ -20,41 +11,32 @@ category = "dev" description = "Classes Without Boilerplate" name = "attrs" optional = false -python-versions = "*" -version = "18.2.0" - -[[package]] -category = "dev" -description = "The uncompromising code formatter." -marker = "python_version >= \"3.7\"" -name = "black" -optional = false -python-versions = ">=3.6" -version = "19.3b0" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +version = "19.3.0" -[package.dependencies] -appdirs = "*" -attrs = ">=18.1.0" -click = ">=6.5" -toml = ">=0.9.4" +[package.extras] +azure-pipelines = ["coverage", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "zope.interface", "pytest-azurepipelines"] +dev = ["coverage", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "zope.interface", "sphinx", "pre-commit"] +docs = ["sphinx", "zope.interface"] +tests = ["coverage", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "zope.interface"] [[package]] category = "dev" -description = "Composable command line interface toolkit" -marker = "python_version >= \"3.7\"" -name = "click" +description = "Cross-platform colored terminal text." +marker = "sys_platform == \"win32\" and python_version == \"3.4\"" +name = "colorama" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -version = "7.0" +version = "0.4.1" [[package]] category = "dev" description = "Cross-platform colored terminal text." -marker = "sys_platform == \"win32\"" +marker = "sys_platform == \"win32\" and python_version != \"3.4\"" name = "colorama" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -version = "0.4.1" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +version = "0.4.3" [[package]] category = "dev" @@ -64,29 +46,103 @@ optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*, <4" version = "4.5.4" +[[package]] +category = "dev" +description = "Code coverage measurement for Python" +name = "coverage" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4" +version = "5.0.2" + +[package.extras] +toml = ["toml"] + +[[package]] +category = "dev" +description = "An implementation of PEP 557: Data Classes" +marker = "python_version >= \"3.6\" and python_version < \"3.7\"" +name = "dataclasses" +optional = false +python-versions = "*" +version = "0.4" + [[package]] category = "dev" description = "A library for property based testing" name = "hypothesis" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -version = "4.38.1" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +version = "4.13.0" [package.dependencies] attrs = ">=16.0.0" +[package.extras] +all = ["django (>=1.11)", "dpcontracts (>=0.4)", "lark-parser (>=0.6.5)", "numpy (>=1.9.0)", "pandas (>=0.19)", "pytest (>=3.0)", "python-dateutil (>=1.4)", "pytz (>=2014.1)", "pytz"] +dateutil = ["python-dateutil (>=1.4)"] +django = ["django (>=1.11)", "pytz"] +dpcontracts = ["dpcontracts (>=0.4)"] +lark = ["lark-parser (>=0.6.5)"] +numpy = ["numpy (>=1.9.0)"] +pandas = ["pandas (>=0.19)"] +pytest = ["pytest (>=3.0)"] +pytz = ["pytz (>=2014.1)"] + +[[package]] +category = "dev" +description = "A library for property-based testing" +name = "hypothesis" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +version = "4.57.1" + +[package.dependencies] +attrs = ">=19.2.0" +sortedcontainers = ">=2.1.0,<3.0.0" + +[package.extras] +all = ["django (>=1.11)", "dpcontracts (>=0.4)", "lark-parser (>=0.6.5)", "numpy (>=1.9.0)", "pandas (>=0.19)", "pytest (>=4.3)", "python-dateutil (>=1.4)", "pytz (>=2014.1)"] +dateutil = ["python-dateutil (>=1.4)"] +django = ["pytz (>=2014.1)", "django (>=1.11)"] +dpcontracts = ["dpcontracts (>=0.4)"] +lark = ["lark-parser (>=0.6.5)"] +numpy = ["numpy (>=1.9.0)"] +pandas = ["pandas (>=0.19)"] +pytest = ["pytest (>=4.3)"] +pytz = ["pytz (>=2014.1)"] + +[[package]] +category = "dev" +description = "Read metadata from Python packages" +marker = "python_version < \"3.8\"" +name = "importlib-metadata" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7" +version = "1.1.3" + +[package.dependencies] +zipp = ">=0.5" + +[package.extras] +docs = ["sphinx", "rst.linker"] +testing = ["packaging", "importlib-resources"] + [[package]] category = "dev" description = "Read metadata from Python packages" marker = "python_version < \"3.8\"" name = "importlib-metadata" optional = false -python-versions = ">=2.7,!=3.0,!=3.1,!=3.2,!=3.3" -version = "0.23" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" +version = "1.3.0" [package.dependencies] zipp = ">=0.5" +[package.extras] +docs = ["sphinx", "rst.linker"] +testing = ["packaging", "importlib-resources"] + [[package]] category = "dev" description = "More routines for operating on iterables, beyond itertools" @@ -96,13 +152,22 @@ optional = false python-versions = ">=3.4" version = "7.2.0" +[[package]] +category = "dev" +description = "More routines for operating on iterables, beyond itertools" +marker = "python_version < \"3.8\" or python_version > \"2.7\"" +name = "more-itertools" +optional = false +python-versions = ">=3.5" +version = "8.0.2" + [[package]] category = "dev" description = "Core utilities for Python packages" name = "packaging" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -version = "19.2" +version = "20.0" [package.dependencies] pyparsing = ">=2.0.2" @@ -120,26 +185,33 @@ version = "2.3.5" [package.dependencies] six = "*" +[package.dependencies.scandir] +python = "<3.5" +version = "*" + [[package]] category = "dev" description = "plugin and hook calling mechanisms for python" name = "pluggy" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -version = "0.13.0" +version = "0.13.1" [package.dependencies] [package.dependencies.importlib-metadata] python = "<3.8" version = ">=0.12" +[package.extras] +dev = ["pre-commit", "tox"] + [[package]] category = "dev" description = "library with cross-python path, ini-parsing, io, code, log facilities" name = "py" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -version = "1.8.0" +version = "1.8.1" [[package]] category = "dev" @@ -147,7 +219,7 @@ description = "Python parsing module" name = "pyparsing" optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" -version = "2.4.2" +version = "2.4.6" [[package]] category = "dev" @@ -155,18 +227,25 @@ description = "pytest: simple powerful testing with Python" name = "pytest" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7" -version = "4.6.6" +version = "4.6.9" [package.dependencies] atomicwrites = ">=1.0" attrs = ">=17.4.0" -colorama = "*" packaging = "*" pluggy = ">=0.12,<1.0" py = ">=1.5.0" six = ">=1.10.0" wcwidth = "*" +[[package.dependencies.colorama]] +python = "<3.4.0 || >=3.5.0" +version = "*" + +[[package.dependencies.colorama]] +python = ">=3.4,<3.5" +version = "<=0.4.1" + [package.dependencies.importlib-metadata] python = "<3.8" version = ">=0.12" @@ -179,6 +258,9 @@ version = ">=4.0.0" python = "<3.6" version = ">=2.2.0" +[package.extras] +testing = ["argcomplete", "hypothesis (>=3.56)", "nose", "requests", "mock"] + [[package]] category = "dev" description = "Pytest plugin for measuring coverage." @@ -191,34 +273,68 @@ version = "2.8.1" coverage = ">=4.4" pytest = ">=3.6" +[package.extras] +testing = ["fields", "hunter", "process-tests (2.0.2)", "six", "virtualenv"] + [[package]] category = "main" description = "Extensions to the standard Python datetime module" marker = "python_version < \"3.7\"" name = "python-dateutil" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" -version = "2.8.0" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +version = "2.8.1" [package.dependencies] six = ">=1.5" +[[package]] +category = "dev" +description = "scandir, a better directory iterator and faster os.walk()" +marker = "python_version < \"3.5\"" +name = "scandir" +optional = false +python-versions = "*" +version = "1.10.0" + [[package]] category = "main" description = "Python 2 and 3 compatibility utilities" name = "six" optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*" -version = "1.12.0" +version = "1.13.0" + +[[package]] +category = "dev" +description = "Sorted Containers -- Sorted List, Sorted Dict, Sorted Set" +name = "sortedcontainers" +optional = false +python-versions = "*" +version = "2.1.0" + +[[package]] +category = "main" +description = "Type Hints for Python" +marker = "python_version < \"3.5\"" +name = "typing" +optional = false +python-versions = "*" +version = "3.7.4.1" [[package]] category = "dev" -description = "Python Library for Tom's Obvious, Minimal Language" -marker = "python_version >= \"3.7\"" -name = "toml" +description = "Backported and Experimental Type Hints for Python 3.5+" +marker = "python_version < \"3.8\"" +name = "typing-extensions" optional = false python-versions = "*" -version = "0.10.0" +version = "3.7.4.1" + +[package.dependencies] +[package.dependencies.typing] +python = "<3.5" +version = ">=3.7.4" [[package]] category = "dev" @@ -226,7 +342,7 @@ description = "Measures number of Terminal column cells of wide-character codes" name = "wcwidth" optional = false python-versions = "*" -version = "0.1.7" +version = "0.1.8" [[package]] category = "dev" @@ -240,30 +356,185 @@ version = "0.6.0" [package.dependencies] more-itertools = "*" +[package.extras] +docs = ["sphinx", "jaraco.packaging (>=3.2)", "rst.linker (>=1.9)"] +testing = ["pathlib2", "contextlib2", "unittest2"] + [metadata] -content-hash = "30ade8462629ad243bf4029ae6c7d9fe784af2117aa85c903aadc3b6f6e2d4f9" -python-versions = "^3.5" - -[metadata.hashes] -appdirs = ["9e5896d1372858f8dd3344faf4e5014d21849c756c8d5701f78f8a103b372d92", "d8b24664561d0d34ddfaec54636d502d7cea6e29c3eaf68f3df6180863e2166e"] -atomicwrites = ["03472c30eb2c5d1ba9227e4c2ca66ab8287fbfbbda3888aa93dc2e28fc6811b4", "75a9445bac02d8d058d5e1fe689654ba5a6556a1dfd8ce6ec55a0ed79866cfa6"] -attrs = ["10cbf6e27dbce8c30807caf056c8eb50917e0eaafe86347671b57254006c3e69", "ca4be454458f9dec299268d472aaa5a11f67a4ff70093396e1ceae9c76cf4bbb"] -black = ["09a9dcb7c46ed496a9850b76e4e825d6049ecd38b611f1224857a79bd985a8cf", "68950ffd4d9169716bcb8719a56c07a2f4485354fec061cdd5910aa07369731c"] -click = ["2335065e6395b9e67ca716de5f7526736bfa6ceead690adf616d925bdc622b13", "5b94b49521f6456670fdb30cd82a4eca9412788a93fa6dd6df72c94d5a8ff2d7"] -colorama = ["05eed71e2e327246ad6b38c540c4a3117230b19679b875190486ddd2d721422d", "f8ac84de7840f5b9c4e3347b3c1eaa50f7e49c2b07596221daec5edaabbd7c48"] -coverage = ["08907593569fe59baca0bf152c43f3863201efb6113ecb38ce7e97ce339805a6", "0be0f1ed45fc0c185cfd4ecc19a1d6532d72f86a2bac9de7e24541febad72650", "141f08ed3c4b1847015e2cd62ec06d35e67a3ac185c26f7635f4406b90afa9c5", "19e4df788a0581238e9390c85a7a09af39c7b539b29f25c89209e6c3e371270d", "23cc09ed395b03424d1ae30dcc292615c1372bfba7141eb85e11e50efaa6b351", "245388cda02af78276b479f299bbf3783ef0a6a6273037d7c60dc73b8d8d7755", "331cb5115673a20fb131dadd22f5bcaf7677ef758741312bee4937d71a14b2ef", "386e2e4090f0bc5df274e720105c342263423e77ee8826002dcffe0c9533dbca", "3a794ce50daee01c74a494919d5ebdc23d58873747fa0e288318728533a3e1ca", "60851187677b24c6085248f0a0b9b98d49cba7ecc7ec60ba6b9d2e5574ac1ee9", "63a9a5fc43b58735f65ed63d2cf43508f462dc49857da70b8980ad78d41d52fc", "6b62544bb68106e3f00b21c8930e83e584fdca005d4fffd29bb39fb3ffa03cb5", "6ba744056423ef8d450cf627289166da65903885272055fb4b5e113137cfa14f", "7494b0b0274c5072bddbfd5b4a6c6f18fbbe1ab1d22a41e99cd2d00c8f96ecfe", "826f32b9547c8091679ff292a82aca9c7b9650f9fda3e2ca6bf2ac905b7ce888", "93715dffbcd0678057f947f496484e906bf9509f5c1c38fc9ba3922893cda5f5", "9a334d6c83dfeadae576b4d633a71620d40d1c379129d587faa42ee3e2a85cce", "af7ed8a8aa6957aac47b4268631fa1df984643f07ef00acd374e456364b373f5", "bf0a7aed7f5521c7ca67febd57db473af4762b9622254291fbcbb8cd0ba5e33e", "bf1ef9eb901113a9805287e090452c05547578eaab1b62e4ad456fcc049a9b7e", "c0afd27bc0e307a1ffc04ca5ec010a290e49e3afbe841c5cafc5c5a80ecd81c9", "dd579709a87092c6dbee09d1b7cfa81831040705ffa12a1b248935274aee0437", "df6712284b2e44a065097846488f66840445eb987eb81b3cc6e4149e7b6982e1", "e07d9f1a23e9e93ab5c62902833bf3e4b1f65502927379148b6622686223125c", "e2ede7c1d45e65e209d6093b762e98e8318ddeff95317d07a27a2140b80cfd24", "e4ef9c164eb55123c62411f5936b5c2e521b12356037b6e1c2617cef45523d47", "eca2b7343524e7ba246cab8ff00cab47a2d6d54ada3b02772e908a45675722e2", "eee64c616adeff7db37cc37da4180a3a5b6177f5c46b187894e633f088fb5b28", "ef824cad1f980d27f26166f86856efe11eff9912c4fed97d3804820d43fa550c", "efc89291bd5a08855829a3c522df16d856455297cf35ae827a37edac45f466a7", "fa964bae817babece5aa2e8c1af841bebb6d0b9add8e637548809d040443fee0", "ff37757e068ae606659c28c3bd0d923f9d29a85de79bf25b2b34b148473b5025"] -hypothesis = ["2d0f5c69d939da4c2d6e1e686623c26a91087f3fc9077cd865013f532583dc05", "d66286a7a67946d843b37c5656321ea3fd318c259dc0ed9aeb6f2d4b037b876a"] -importlib-metadata = ["aa18d7378b00b40847790e7c27e11673d7fed219354109d0e7b9e5b25dc3ad26", "d5f18a79777f3aa179c145737780282e27b508fc8fd688cb17c7a813e8bd39af"] -more-itertools = ["409cd48d4db7052af495b09dec721011634af3753ae1ef92d2b32f73a745f832", "92b8c4b06dac4f0611c0729b2f2ede52b2e1bac1ab48f089c7ddc12e26bb60c4"] -packaging = ["28b924174df7a2fa32c1953825ff29c61e2f5e082343165438812f00d3a7fc47", "d9551545c6d761f3def1677baf08ab2a3ca17c56879e70fecba2fc4dde4ed108"] -pathlib2 = ["0ec8205a157c80d7acc301c0b18fbd5d44fe655968f5d947b6ecef5290fc35db", "6cd9a47b597b37cc57de1c05e56fb1a1c9cc9fab04fe78c29acd090418529868"] -pluggy = ["0db4b7601aae1d35b4a033282da476845aa19185c1e6964b25cf324b5e4ec3e6", "fa5fa1622fa6dd5c030e9cad086fa19ef6a0cf6d7a2d12318e10cb49d6d68f34"] -py = ["64f65755aee5b381cea27766a3a147c3f15b9b6b9ac88676de66ba2ae36793fa", "dc639b046a6e2cff5bbe40194ad65936d6ba360b52b3c3fe1d08a82dd50b5e53"] -pyparsing = ["6f98a7b9397e206d78cc01df10131398f1c8b8510a2f4d97d9abd82e1aacdd80", "d9338df12903bbf5d65a0e4e87c2161968b10d2e489652bb47001d82a9b028b4"] -pytest = ["5d0d20a9a66e39b5845ab14f8989f3463a7aa973700e6cdf02db69da9821e738", "692d9351353ef709c1126266579edd4fd469dcf6b5f4f583050f72161d6f3592"] -pytest-cov = ["cc6742d8bac45070217169f5f72ceee1e0e55b0221f54bcf24845972d3a47f2b", "cdbdef4f870408ebdbfeb44e63e07eb18bb4619fae852f6e760645fa36172626"] -python-dateutil = ["7e6584c74aeed623791615e26efd690f29817a27c73085b78e4bad02493df2fb", "c89805f6f4d64db21ed966fda138f8a5ed7a4fdbc1a8ee329ce1b74e3c74da9e"] -six = ["3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c", "d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73"] -toml = ["229f81c57791a41d65e399fc06bf0848bab550a9dfd5ed66df18ce5f05e73d5c", "235682dd292d5899d361a811df37e04a8828a5b1da3115886b73cf81ebc9100e", "f1db651f9657708513243e61e6cc67d101a39bad662eaa9b5546f789338e07a3"] -wcwidth = ["3df37372226d6e63e1b1e1eda15c594bca98a22d33a23832a90998faa96bc65e", "f4ebe71925af7b40a864553f761ed559b43544f8f71746c2d756c7fe788ade7c"] -zipp = ["3718b1cbcd963c7d4c5511a8240812904164b7f381b647143a89d3b98f9bcd8e", "f06903e9f1f43b12d371004b4ac7b06ab39a44adc747266928ae6debfa7b3335"] +content-hash = "3443d2eb40682c0356661b3bb3db19a29bc8100c6d3f43f467dfc230cd9defb0" +python-versions = "^3.4" + +[metadata.files] +atomicwrites = [ + {file = "atomicwrites-1.3.0-py2.py3-none-any.whl", hash = "sha256:03472c30eb2c5d1ba9227e4c2ca66ab8287fbfbbda3888aa93dc2e28fc6811b4"}, + {file = "atomicwrites-1.3.0.tar.gz", hash = "sha256:75a9445bac02d8d058d5e1fe689654ba5a6556a1dfd8ce6ec55a0ed79866cfa6"}, +] +attrs = [ + {file = "attrs-19.3.0-py2.py3-none-any.whl", hash = "sha256:08a96c641c3a74e44eb59afb61a24f2cb9f4d7188748e76ba4bb5edfa3cb7d1c"}, + {file = "attrs-19.3.0.tar.gz", hash = "sha256:f7b7ce16570fe9965acd6d30101a28f62fb4a7f9e926b3bbc9b61f8b04247e72"}, +] +colorama = [ + {file = "colorama-0.4.1-py2.py3-none-any.whl", hash = "sha256:f8ac84de7840f5b9c4e3347b3c1eaa50f7e49c2b07596221daec5edaabbd7c48"}, + {file = "colorama-0.4.1.tar.gz", hash = "sha256:05eed71e2e327246ad6b38c540c4a3117230b19679b875190486ddd2d721422d"}, + {file = "colorama-0.4.3-py2.py3-none-any.whl", hash = "sha256:7d73d2a99753107a36ac6b455ee49046802e59d9d076ef8e47b61499fa29afff"}, + {file = "colorama-0.4.3.tar.gz", hash = "sha256:e96da0d330793e2cb9485e9ddfd918d456036c7149416295932478192f4436a1"}, +] +coverage = [ + {file = "coverage-4.5.4-cp26-cp26m-macosx_10_12_x86_64.whl", hash = "sha256:eee64c616adeff7db37cc37da4180a3a5b6177f5c46b187894e633f088fb5b28"}, + {file = "coverage-4.5.4-cp27-cp27m-macosx_10_12_x86_64.whl", hash = "sha256:ef824cad1f980d27f26166f86856efe11eff9912c4fed97d3804820d43fa550c"}, + {file = "coverage-4.5.4-cp27-cp27m-macosx_10_13_intel.whl", hash = "sha256:9a334d6c83dfeadae576b4d633a71620d40d1c379129d587faa42ee3e2a85cce"}, + {file = "coverage-4.5.4-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:7494b0b0274c5072bddbfd5b4a6c6f18fbbe1ab1d22a41e99cd2d00c8f96ecfe"}, + {file = "coverage-4.5.4-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:826f32b9547c8091679ff292a82aca9c7b9650f9fda3e2ca6bf2ac905b7ce888"}, + {file = "coverage-4.5.4-cp27-cp27m-win32.whl", hash = "sha256:63a9a5fc43b58735f65ed63d2cf43508f462dc49857da70b8980ad78d41d52fc"}, + {file = "coverage-4.5.4-cp27-cp27m-win_amd64.whl", hash = "sha256:e2ede7c1d45e65e209d6093b762e98e8318ddeff95317d07a27a2140b80cfd24"}, + {file = "coverage-4.5.4-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:dd579709a87092c6dbee09d1b7cfa81831040705ffa12a1b248935274aee0437"}, + {file = "coverage-4.5.4-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:08907593569fe59baca0bf152c43f3863201efb6113ecb38ce7e97ce339805a6"}, + {file = "coverage-4.5.4-cp33-cp33m-macosx_10_10_x86_64.whl", hash = "sha256:6b62544bb68106e3f00b21c8930e83e584fdca005d4fffd29bb39fb3ffa03cb5"}, + {file = "coverage-4.5.4-cp34-cp34m-macosx_10_12_x86_64.whl", hash = "sha256:331cb5115673a20fb131dadd22f5bcaf7677ef758741312bee4937d71a14b2ef"}, + {file = "coverage-4.5.4-cp34-cp34m-manylinux1_i686.whl", hash = "sha256:bf1ef9eb901113a9805287e090452c05547578eaab1b62e4ad456fcc049a9b7e"}, + {file = "coverage-4.5.4-cp34-cp34m-manylinux1_x86_64.whl", hash = "sha256:386e2e4090f0bc5df274e720105c342263423e77ee8826002dcffe0c9533dbca"}, + {file = "coverage-4.5.4-cp34-cp34m-win32.whl", hash = "sha256:fa964bae817babece5aa2e8c1af841bebb6d0b9add8e637548809d040443fee0"}, + {file = "coverage-4.5.4-cp34-cp34m-win_amd64.whl", hash = "sha256:df6712284b2e44a065097846488f66840445eb987eb81b3cc6e4149e7b6982e1"}, + {file = "coverage-4.5.4-cp35-cp35m-macosx_10_12_x86_64.whl", hash = "sha256:efc89291bd5a08855829a3c522df16d856455297cf35ae827a37edac45f466a7"}, + {file = "coverage-4.5.4-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:e4ef9c164eb55123c62411f5936b5c2e521b12356037b6e1c2617cef45523d47"}, + {file = "coverage-4.5.4-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:ff37757e068ae606659c28c3bd0d923f9d29a85de79bf25b2b34b148473b5025"}, + {file = "coverage-4.5.4-cp35-cp35m-win32.whl", hash = "sha256:bf0a7aed7f5521c7ca67febd57db473af4762b9622254291fbcbb8cd0ba5e33e"}, + {file = "coverage-4.5.4-cp35-cp35m-win_amd64.whl", hash = "sha256:19e4df788a0581238e9390c85a7a09af39c7b539b29f25c89209e6c3e371270d"}, + {file = "coverage-4.5.4-cp36-cp36m-macosx_10_13_x86_64.whl", hash = "sha256:60851187677b24c6085248f0a0b9b98d49cba7ecc7ec60ba6b9d2e5574ac1ee9"}, + {file = "coverage-4.5.4-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:245388cda02af78276b479f299bbf3783ef0a6a6273037d7c60dc73b8d8d7755"}, + {file = "coverage-4.5.4-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:c0afd27bc0e307a1ffc04ca5ec010a290e49e3afbe841c5cafc5c5a80ecd81c9"}, + {file = "coverage-4.5.4-cp36-cp36m-win32.whl", hash = "sha256:6ba744056423ef8d450cf627289166da65903885272055fb4b5e113137cfa14f"}, + {file = "coverage-4.5.4-cp36-cp36m-win_amd64.whl", hash = "sha256:af7ed8a8aa6957aac47b4268631fa1df984643f07ef00acd374e456364b373f5"}, + {file = "coverage-4.5.4-cp37-cp37m-macosx_10_13_x86_64.whl", hash = "sha256:3a794ce50daee01c74a494919d5ebdc23d58873747fa0e288318728533a3e1ca"}, + {file = "coverage-4.5.4-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:0be0f1ed45fc0c185cfd4ecc19a1d6532d72f86a2bac9de7e24541febad72650"}, + {file = "coverage-4.5.4-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:eca2b7343524e7ba246cab8ff00cab47a2d6d54ada3b02772e908a45675722e2"}, + {file = "coverage-4.5.4-cp37-cp37m-win32.whl", hash = "sha256:93715dffbcd0678057f947f496484e906bf9509f5c1c38fc9ba3922893cda5f5"}, + {file = "coverage-4.5.4-cp37-cp37m-win_amd64.whl", hash = "sha256:23cc09ed395b03424d1ae30dcc292615c1372bfba7141eb85e11e50efaa6b351"}, + {file = "coverage-4.5.4-cp38-cp38-macosx_10_13_x86_64.whl", hash = "sha256:141f08ed3c4b1847015e2cd62ec06d35e67a3ac185c26f7635f4406b90afa9c5"}, + {file = "coverage-4.5.4.tar.gz", hash = "sha256:e07d9f1a23e9e93ab5c62902833bf3e4b1f65502927379148b6622686223125c"}, + {file = "coverage-5.0.2-cp27-cp27m-macosx_10_12_x86_64.whl", hash = "sha256:511ec0c00840e12fb4e852e4db58fa6a01ca4da72f36a9766fae344c3d502033"}, + {file = "coverage-5.0.2-cp27-cp27m-macosx_10_13_intel.whl", hash = "sha256:d22b4297e7e4225ccf01f1aa55e7a96412ea0796b532dd614c3fcbafa341128e"}, + {file = "coverage-5.0.2-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:593853aa1ac6dcc6405324d877544c596c9d948ef20d2e9512a0f5d2d3202356"}, + {file = "coverage-5.0.2-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:e65a5aa1670db6263f19fdc03daee1d7dbbadb5cb67fd0a1f16033659db13c1d"}, + {file = "coverage-5.0.2-cp27-cp27m-win32.whl", hash = "sha256:d4a2b578a7a70e0c71f662705262f87a456f1e6c1e40ada7ea699abaf070a76d"}, + {file = "coverage-5.0.2-cp27-cp27m-win_amd64.whl", hash = "sha256:28f7f73b34a05e23758e860a89a7f649b85c6749e252eff60ebb05532d180e86"}, + {file = "coverage-5.0.2-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:7d1cc7acc9ce55179616cf72154f9e648136ea55987edf84addbcd9886ffeba2"}, + {file = "coverage-5.0.2-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:2d0cb9b1fe6ad0d915d45ad3d87f03a38e979093a98597e755930db1f897afae"}, + {file = "coverage-5.0.2-cp35-cp35m-macosx_10_12_x86_64.whl", hash = "sha256:bfe102659e2ec13b86c7f3b1db6c9a4e7beea4255058d006351339e6b342d5d2"}, + {file = "coverage-5.0.2-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:23688ff75adfa8bfa2a67254d889f9bdf9302c27241d746e17547c42c732d3f4"}, + {file = "coverage-5.0.2-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:1bf7ba2af1d373a1750888724f84cffdfc697738f29a353c98195f98fc011509"}, + {file = "coverage-5.0.2-cp35-cp35m-win32.whl", hash = "sha256:569f9ee3025682afda6e9b0f5bb14897c0db03f1a1dc088b083dd36e743f92bb"}, + {file = "coverage-5.0.2-cp35-cp35m-win_amd64.whl", hash = "sha256:cf908840896f7aa62d0ec693beb53264b154f972eb8226fb864ac38975590c4f"}, + {file = "coverage-5.0.2-cp36-cp36m-macosx_10_13_x86_64.whl", hash = "sha256:eaad65bd20955131bcdb3967a4dea66b4e4d4ca488efed7c00d91ee0173387e8"}, + {file = "coverage-5.0.2-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:225e79a5d485bc1642cb7ba02281419c633c216cdc6b26c26494ba959f09e69f"}, + {file = "coverage-5.0.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:bd82b684bb498c60ef47bb1541a50e6d006dde8579934dcbdbc61d67d1ea70d9"}, + {file = "coverage-5.0.2-cp36-cp36m-win32.whl", hash = "sha256:7ca3db38a61f3655a2613ee2c190d63639215a7a736d3c64cc7bbdb002ce6310"}, + {file = "coverage-5.0.2-cp36-cp36m-win_amd64.whl", hash = "sha256:47874b4711c5aeb295c31b228a758ce3d096be83dc37bd56da48ed99efb8813b"}, + {file = "coverage-5.0.2-cp37-cp37m-macosx_10_13_x86_64.whl", hash = "sha256:955ec084f549128fa2702f0b2dc696392001d986b71acd8fd47424f28289a9c3"}, + {file = "coverage-5.0.2-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:1f4ee8e2e4243971618bc16fcc4478317405205f135e95226c2496e2a3b8dbbf"}, + {file = "coverage-5.0.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:f45fba420b94165c17896861bb0e8b27fb7abdcedfeb154895d8553df90b7b00"}, + {file = "coverage-5.0.2-cp37-cp37m-win32.whl", hash = "sha256:cca38ded59105f7705ef6ffe1e960b8db6c7d8279c1e71654a4775ab4454ca15"}, + {file = "coverage-5.0.2-cp37-cp37m-win_amd64.whl", hash = "sha256:cb2b74c123f65e8166f7e1265829a6c8ed755c3cd16d7f50e75a83456a5f3fd7"}, + {file = "coverage-5.0.2-cp38-cp38-macosx_10_13_x86_64.whl", hash = "sha256:53e7438fef0c97bc248f88ba1edd10268cd94d5609970aaf87abbe493691af87"}, + {file = "coverage-5.0.2-cp38-cp38-manylinux1_i686.whl", hash = "sha256:c1e4e39e43057396a5e9d069bfbb6ffeee892e40c5d2effbd8cd71f34ee66c4d"}, + {file = "coverage-5.0.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:5b0a07158360d22492f9abd02a0f2ee7981b33f0646bf796598b7673f6bbab14"}, + {file = "coverage-5.0.2-cp38-cp38m-win32.whl", hash = "sha256:88b51153657612aea68fa684a5b88037597925260392b7bb4509d4f9b0bdd889"}, + {file = "coverage-5.0.2-cp38-cp38m-win_amd64.whl", hash = "sha256:189aac76d6e0d7af15572c51892e7326ee451c076c5a50a9d266406cd6c49708"}, + {file = "coverage-5.0.2-cp39-cp39m-win32.whl", hash = "sha256:d095a7b473f8a95f7efe821f92058c8a2ecfb18f8db6677ae3819e15dc11aaae"}, + {file = "coverage-5.0.2-cp39-cp39m-win_amd64.whl", hash = "sha256:ddeb42a3d5419434742bf4cc71c9eaa22df3b76808e23a82bd0b0bd360f1a9f1"}, + {file = "coverage-5.0.2.tar.gz", hash = "sha256:b251c7092cbb6d789d62dc9c9e7c4fb448c9138b51285c36aeb72462cad3600e"}, +] +dataclasses = [ + {file = "dataclasses-0.4-py3-none-any.whl", hash = "sha256:068953b730c80cbb13ca6aac6ceedaa5d483fb6081a372ac4788aa5179ed9597"}, + {file = "dataclasses-0.4.tar.gz", hash = "sha256:0f75133f21f6c9bd0fe82bc75d9908e46f531682911c9cffa75bce0e40ef09ef"}, +] +hypothesis = [ + {file = "hypothesis-4.13.0-py2-none-any.whl", hash = "sha256:035d95b47651490b138fa45b3239894a60a122ab7e66b93a355b20342506cdcf"}, + {file = "hypothesis-4.13.0-py3-none-any.whl", hash = "sha256:83a5c2cae72b2b5cce13ca4e0df6cc9cc2d057b502e4c111678dae69674471dd"}, + {file = "hypothesis-4.13.0.tar.gz", hash = "sha256:1896d88507c63e55036d7ea4ca73281e1c001167f8c4f92f788dbac7bb811832"}, + {file = "hypothesis-4.57.1-py3-none-any.whl", hash = "sha256:94f0910bc87e0ae8c098f4ada28dfdc381245e0c8079c674292b417dbde144b5"}, + {file = "hypothesis-4.57.1.tar.gz", hash = "sha256:3c4369a4b0a1348561048bcda5f1db951a1b8e2a514ea8e8c70d36e656bf6fa0"}, +] +importlib-metadata = [ + {file = "importlib_metadata-1.1.3-py2.py3-none-any.whl", hash = "sha256:7c7f8ac40673f507f349bef2eed21a0e5f01ddf5b2a7356a6c65eb2099b53764"}, + {file = "importlib_metadata-1.1.3.tar.gz", hash = "sha256:7a99fb4084ffe6dae374961ba7a6521b79c1d07c658ab3a28aa264ee1d1b14e3"}, + {file = "importlib_metadata-1.3.0-py2.py3-none-any.whl", hash = "sha256:d95141fbfa7ef2ec65cfd945e2af7e5a6ddbd7c8d9a25e66ff3be8e3daf9f60f"}, + {file = "importlib_metadata-1.3.0.tar.gz", hash = "sha256:073a852570f92da5f744a3472af1b61e28e9f78ccf0c9117658dc32b15de7b45"}, +] +more-itertools = [ + {file = "more-itertools-7.2.0.tar.gz", hash = "sha256:409cd48d4db7052af495b09dec721011634af3753ae1ef92d2b32f73a745f832"}, + {file = "more_itertools-7.2.0-py3-none-any.whl", hash = "sha256:92b8c4b06dac4f0611c0729b2f2ede52b2e1bac1ab48f089c7ddc12e26bb60c4"}, + {file = "more-itertools-8.0.2.tar.gz", hash = "sha256:b84b238cce0d9adad5ed87e745778d20a3f8487d0f0cb8b8a586816c7496458d"}, + {file = "more_itertools-8.0.2-py3-none-any.whl", hash = "sha256:c833ef592a0324bcc6a60e48440da07645063c453880c9477ceb22490aec1564"}, +] +packaging = [ + {file = "packaging-20.0-py2.py3-none-any.whl", hash = "sha256:aec3fdbb8bc9e4bb65f0634b9f551ced63983a529d6a8931817d52fdd0816ddb"}, + {file = "packaging-20.0.tar.gz", hash = "sha256:fe1d8331dfa7cc0a883b49d75fc76380b2ab2734b220fbb87d774e4fd4b851f8"}, +] +pathlib2 = [ + {file = "pathlib2-2.3.5-py2.py3-none-any.whl", hash = "sha256:0ec8205a157c80d7acc301c0b18fbd5d44fe655968f5d947b6ecef5290fc35db"}, + {file = "pathlib2-2.3.5.tar.gz", hash = "sha256:6cd9a47b597b37cc57de1c05e56fb1a1c9cc9fab04fe78c29acd090418529868"}, +] +pluggy = [ + {file = "pluggy-0.13.1-py2.py3-none-any.whl", hash = "sha256:966c145cd83c96502c3c3868f50408687b38434af77734af1e9ca461a4081d2d"}, + {file = "pluggy-0.13.1.tar.gz", hash = "sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0"}, +] +py = [ + {file = "py-1.8.1-py2.py3-none-any.whl", hash = "sha256:c20fdd83a5dbc0af9efd622bee9a5564e278f6380fffcacc43ba6f43db2813b0"}, + {file = "py-1.8.1.tar.gz", hash = "sha256:5e27081401262157467ad6e7f851b7aa402c5852dbcb3dae06768434de5752aa"}, +] +pyparsing = [ + {file = "pyparsing-2.4.6-py2.py3-none-any.whl", hash = "sha256:c342dccb5250c08d45fd6f8b4a559613ca603b57498511740e65cd11a2e7dcec"}, + {file = "pyparsing-2.4.6.tar.gz", hash = "sha256:4c830582a84fb022400b85429791bc551f1f4871c33f23e44f353119e92f969f"}, +] +pytest = [ + {file = "pytest-4.6.9-py2.py3-none-any.whl", hash = "sha256:c77a5f30a90e0ce24db9eaa14ddfd38d4afb5ea159309bdd2dae55b931bc9324"}, + {file = "pytest-4.6.9.tar.gz", hash = "sha256:19e8f75eac01dd3f211edd465b39efbcbdc8fc5f7866d7dd49fedb30d8adf339"}, +] +pytest-cov = [ + {file = "pytest-cov-2.8.1.tar.gz", hash = "sha256:cc6742d8bac45070217169f5f72ceee1e0e55b0221f54bcf24845972d3a47f2b"}, + {file = "pytest_cov-2.8.1-py2.py3-none-any.whl", hash = "sha256:cdbdef4f870408ebdbfeb44e63e07eb18bb4619fae852f6e760645fa36172626"}, +] +python-dateutil = [ + {file = "python-dateutil-2.8.1.tar.gz", hash = "sha256:73ebfe9dbf22e832286dafa60473e4cd239f8592f699aa5adaf10050e6e1823c"}, + {file = "python_dateutil-2.8.1-py2.py3-none-any.whl", hash = "sha256:75bb3f31ea686f1197762692a9ee6a7550b59fc6ca3a1f4b5d7e32fb98e2da2a"}, +] +scandir = [ + {file = "scandir-1.10.0-cp27-cp27m-win32.whl", hash = "sha256:92c85ac42f41ffdc35b6da57ed991575bdbe69db895507af88b9f499b701c188"}, + {file = "scandir-1.10.0-cp27-cp27m-win_amd64.whl", hash = "sha256:cb925555f43060a1745d0a321cca94bcea927c50114b623d73179189a4e100ac"}, + {file = "scandir-1.10.0-cp34-cp34m-win32.whl", hash = "sha256:2c712840c2e2ee8dfaf36034080108d30060d759c7b73a01a52251cc8989f11f"}, + {file = "scandir-1.10.0-cp34-cp34m-win_amd64.whl", hash = "sha256:2586c94e907d99617887daed6c1d102b5ca28f1085f90446554abf1faf73123e"}, + {file = "scandir-1.10.0-cp35-cp35m-win32.whl", hash = "sha256:2b8e3888b11abb2217a32af0766bc06b65cc4a928d8727828ee68af5a967fa6f"}, + {file = "scandir-1.10.0-cp35-cp35m-win_amd64.whl", hash = "sha256:8c5922863e44ffc00c5c693190648daa6d15e7c1207ed02d6f46a8dcc2869d32"}, + {file = "scandir-1.10.0-cp36-cp36m-win32.whl", hash = "sha256:2ae41f43797ca0c11591c0c35f2f5875fa99f8797cb1a1fd440497ec0ae4b022"}, + {file = "scandir-1.10.0-cp36-cp36m-win_amd64.whl", hash = "sha256:7d2d7a06a252764061a020407b997dd036f7bd6a175a5ba2b345f0a357f0b3f4"}, + {file = "scandir-1.10.0-cp37-cp37m-win32.whl", hash = "sha256:67f15b6f83e6507fdc6fca22fedf6ef8b334b399ca27c6b568cbfaa82a364173"}, + {file = "scandir-1.10.0-cp37-cp37m-win_amd64.whl", hash = "sha256:b24086f2375c4a094a6b51e78b4cf7ca16c721dcee2eddd7aa6494b42d6d519d"}, + {file = "scandir-1.10.0.tar.gz", hash = "sha256:4d4631f6062e658e9007ab3149a9b914f3548cb38bfb021c64f39a025ce578ae"}, +] +six = [ + {file = "six-1.13.0-py2.py3-none-any.whl", hash = "sha256:1f1b7d42e254082a9db6279deae68afb421ceba6158efa6131de7b3003ee93fd"}, + {file = "six-1.13.0.tar.gz", hash = "sha256:30f610279e8b2578cab6db20741130331735c781b56053c59c4076da27f06b66"}, +] +sortedcontainers = [ + {file = "sortedcontainers-2.1.0-py2.py3-none-any.whl", hash = "sha256:d9e96492dd51fae31e60837736b38fe42a187b5404c16606ff7ee7cd582d4c60"}, + {file = "sortedcontainers-2.1.0.tar.gz", hash = "sha256:974e9a32f56b17c1bac2aebd9dcf197f3eb9cd30553c5852a3187ad162e1a03a"}, +] +typing = [ + {file = "typing-3.7.4.1-py2-none-any.whl", hash = "sha256:c8cabb5ab8945cd2f54917be357d134db9cc1eb039e59d1606dc1e60cb1d9d36"}, + {file = "typing-3.7.4.1-py3-none-any.whl", hash = "sha256:f38d83c5a7a7086543a0f649564d661859c5146a85775ab90c0d2f93ffaa9714"}, + {file = "typing-3.7.4.1.tar.gz", hash = "sha256:91dfe6f3f706ee8cc32d38edbbf304e9b7583fb37108fef38229617f8b3eba23"}, +] +typing-extensions = [ + {file = "typing_extensions-3.7.4.1-py2-none-any.whl", hash = "sha256:910f4656f54de5993ad9304959ce9bb903f90aadc7c67a0bef07e678014e892d"}, + {file = "typing_extensions-3.7.4.1-py3-none-any.whl", hash = "sha256:cf8b63fedea4d89bab840ecbb93e75578af28f76f66c35889bd7065f5af88575"}, + {file = "typing_extensions-3.7.4.1.tar.gz", hash = "sha256:091ecc894d5e908ac75209f10d5b4f118fbdb2eb1ede6a63544054bb1edb41f2"}, +] +wcwidth = [ + {file = "wcwidth-0.1.8-py2.py3-none-any.whl", hash = "sha256:8fd29383f539be45b20bd4df0dc29c20ba48654a41e661925e612311e9f3c603"}, + {file = "wcwidth-0.1.8.tar.gz", hash = "sha256:f28b3e8a6483e5d49e7f8949ac1a78314e740333ae305b4ba5defd3e74fb37a8"}, +] +zipp = [ + {file = "zipp-0.6.0-py2.py3-none-any.whl", hash = "sha256:f06903e9f1f43b12d371004b4ac7b06ab39a44adc747266928ae6debfa7b3335"}, + {file = "zipp-0.6.0.tar.gz", hash = "sha256:3718b1cbcd963c7d4c5511a8240812904164b7f381b647143a89d3b98f9bcd8e"}, +] diff --git a/pyproject.toml b/pyproject.toml index eac9577..2bab559 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "json-syntax" -version = "2.1.1" +version = "2.2.0" description = "Generates functions to convert Python classes to JSON dumpable objects." authors = ["Ben Samuel "] license = "MIT" @@ -13,34 +13,67 @@ classifiers = [ ] [tool.poetry.dependencies] -python = "^3.5" +python = "^3.4" python-dateutil = {version="^2.7", python = "<3.7"} +typing = {version = "^3.7", python = "<3.5"} [tool.poetry.dev-dependencies] pytest = "^4.1" -attrs = "^18.2" +attrs = "^19.2" pytest-cov = "^2.6" -black = {version = "^19.3-beta.0",allows-prereleases = true,python = ">=3.7"} -hypothesis = "^4.14" +hypothesis = "^4" +dataclasses = {version="0.4", python = ">=3.6,<3.7"} +typing-extensions = {version = "^3.7", python = "<3.8"} +# Install these tools via pipx. +# black = {version = "^19.3-beta.0", allow-prereleases = true, python = ">=3.7"} +# dephell = {version = '^0.8.0', python = '>=3.6'} [tool.black] line-length = 88 -target-version = ["py35"] - -[tool.tox] -legacy_tox_ini = """ -[tox] -isolated_builds = true -envlist = py35, py36, py37, py38 -skipsdist = true - -[testenv] -deps = - poetry -commands = - poetry install - poetry run pytest {posargs} -""" +target-version = ["py34"] + [build-system] -requires = ["poetry>=0.12"] +requires = ["poetry>=1.0"] build-backend = "poetry.masonry.api" + +[tool.dephell.setup] +# dephell deps convert -e setup; then pip install -e path/to/json_syntax will work +from = {format = "poetry", path = "pyproject.toml"} +to = {format = 'setuppy', path = "setup.py"} + +[tool.dephell.req] +# Use `dephell deps convert -e req` to generate requirements.txt. +from = {format = "poetry", path = "pyproject.toml"} +to = {format = 'pip', path = "requirements.txt"} + +[tool.dephell.test34.docker] +# dephell docker run -e test34 pip install -r requirements.txt +# dephell docker run -e test34 pytest tests/ +container = "test34" +repo = "python" +tag = "3.4" + +[tool.dephell.test35.docker] +container = "test35" +repo = "python" +tag = "3.5" + +[tool.dephell.test36.docker] +container = "test36" +repo = "python" +tag = "3.6" + +[tool.dephell.test37.docker] +container = "test37" +repo = "python" +tag = "3.7" + +[tool.dephell.test38.docker] +container = "test38" +repo = "python" +tag = "3.8" + +[tool.dephell.test39.docker] +container = "test39" +repo = "python" +tag = "3.9-rc" diff --git a/setup.cfg b/setup.cfg index 718e59c..6e3b0fe 100644 --- a/setup.cfg +++ b/setup.cfg @@ -12,5 +12,5 @@ exclude = __pycache__ .git htmlcov -max-line-length = 120 +max-line-length = 92 disable-noqa = False diff --git a/tests/_strategies.py b/tests/_strategies.py index 743dc4e..ec019ee 100644 --- a/tests/_strategies.py +++ b/tests/_strategies.py @@ -73,7 +73,7 @@ def _attrib(typ): "attrs_" + name, {field: _attrib(typ) for field, typ in fields}, frozen=frozen, - **kw, + **kw ) return st.builds(_make, idents, fields_idents(types), slots=st.booleans()) diff --git a/tests/common.py b/tests/common.py index 1aa340f..6be6044 100644 --- a/tests/common.py +++ b/tests/common.py @@ -1,3 +1,6 @@ +from importlib import import_module + + class Rules: def __init__(self, *rules): self.rules = rules @@ -11,3 +14,27 @@ def lookup(self, verb, typ, accept_missing=False): return None else: raise RuntimeError("No rule for verb={}, typ={}".format(verb, typ)) + + +class SoftMod: + def __init__(self, *modules, allow_SyntaxError=False): + self.mods = mods = [] + for name in modules: + try: + mods.append(import_module(name)) + except ImportError: + pass + except SyntaxError: + if not allow_SyntaxError: + raise + + def __getattr__(self, name): + for mod in self.mods: + val = getattr(mod, name, None) + if val is not None: + return val + return None + + +typing = SoftMod("typing", "typing_extensions") +dataclasses = SoftMod("dataclasses") diff --git a/tests/extras/test_loose_dates.py b/tests/extras/test_loose_dates.py index b48876c..378501a 100644 --- a/tests/extras/test_loose_dates.py +++ b/tests/extras/test_loose_dates.py @@ -11,7 +11,7 @@ python_minor < (3, 7), reason="datetime.isoformat not supported before python 3.7" ) def test_iso_dates_loose(): - "Test the iso_dates_loose handles dates using ISO8601, accepting datetimes as input to dates." + "Test the iso_dates_loose handles dates using ISO8601 and accepts datetimes." decoder = exam.iso_dates_loose(verb=JSON2PY, typ=date, ctx=Rules()) assert decoder("1776-07-04") == date(1776, 7, 4) diff --git a/tests/test_attrs.py b/tests/test_attrs.py index 2a7c784..d687968 100644 --- a/tests/test_attrs.py +++ b/tests/test_attrs.py @@ -1,34 +1,55 @@ import pytest +from .common import SoftMod, typing as t, Rules +from .types_attrs_common import Hooks, T, U + from json_syntax import attrs as at +from json_syntax import std from json_syntax.helpers import JSON2PY, PY2JSON, INSP_PY, INSP_JSON -try: - from dataclasses import dataclass -except ImportError: - dataclass = lambda cls: None # noqa -from typing import Tuple +import attr +from collections import namedtuple +from typing import Tuple, Generic, List + +ann = SoftMod("tests.types_attrs_ann", allow_SyntaxError=True) + + +@attr.s +class Flat: + a = attr.ib(type=int) + b = attr.ib("default", type=str) + + +@attr.s +class GenFlat(Generic[T]): + a = attr.ib(type=T) + b = attr.ib("default", type=str) + + +@attr.s +class Hook1(Hooks): + a = attr.ib(type=int) + b = attr.ib("default", type=str) + + +@attr.s +class GenExample(Generic[T, U]): + body = attr.ib(type=T) + count = attr.ib(type=int) + messages = attr.ib(type=t.List[U]) + try: - from tests.types_attrs_ann import ( - flat_types, - hook_types, - Named1, - Named2, - Named3, - Dict1, - Dict2, - ) -except SyntaxError: - from tests.types_attrs_noann import ( - flat_types, - hook_types, - Named1, - Named2, - Named3, - Dict1, - Dict2, - ) + + @attr.s(slots=True) + class GenExampleSlots(Generic[T, U]): + body = attr.ib(type=T) + count = attr.ib(type=int) + messages = attr.ib(type=t.List[U]) + + +except TypeError: + GenExampleSlots = None class Fail: @@ -53,37 +74,51 @@ def test_attrs_classes_disregards(): assert at.attrs_classes(verb=PY2JSON, typ=int, ctx=Fail()) is None assert at.attrs_classes(verb=INSP_PY, typ=int, ctx=Fail()) is None assert at.attrs_classes(verb=JSON2PY, typ=object, ctx=Fail()) is None - assert at.attrs_classes(verb="dummy", typ=flat_types[0], ctx=Fail()) is None + assert at.attrs_classes(verb="dummy", typ=Flat, ctx=Fail()) is None -@pytest.mark.parametrize("FlatCls", flat_types) -def test_attrs_encoding(FlatCls): +@pytest.mark.parametrize( + "con, FlatCls", + [ + (Flat, Flat), + (ann.Flat, ann.Flat), + (GenFlat, GenFlat[int]), + (ann.GenFlat, ann.GenFlat[int]) if ann.GenFlat else (None, None), + (ann.FlatDc, ann.FlatDc), + (ann.GenFlatDc, ann.GenFlatDc[int]) if ann.GenFlatDc else (None, None), + ], +) +def test_attrs_encoding(con, FlatCls): "Test that attrs_classes encodes and decodes a flat class." + if FlatCls is None: + pytest.skip("Annotations unavailable") encoder = at.attrs_classes(verb=PY2JSON, typ=FlatCls, ctx=Ctx()) - assert encoder(FlatCls(33, "foo")) == {"a": 33, "b": "foo"} - assert encoder(FlatCls(33, "default")) == {"a": 33} + assert encoder(con(33, "foo")) == {"a": 33, "b": "foo"} + assert encoder(con(33, "default")) == {"a": 33} decoder = at.attrs_classes(verb=JSON2PY, typ=FlatCls, ctx=Ctx()) assert decoder({"a": 33, "b": "foo"}) == FlatCls(33, "foo") assert decoder({"a": 33}) == FlatCls(33) inspect = at.attrs_classes(verb=INSP_PY, typ=FlatCls, ctx=Ctx()) - assert inspect(FlatCls(33, "foo")) - assert inspect(FlatCls("str", "foo")) + assert inspect(con(33, "foo")) + assert inspect(con("str", "foo")) assert not inspect({"a": 33, "b": "foo"}) inspect = at.attrs_classes(verb=INSP_JSON, typ=FlatCls, ctx=Ctx()) - assert not inspect(FlatCls(33, "foo")) + assert not inspect(con(33, "foo")) assert not inspect({"a": "str", "b": "foo"}) assert inspect({"a": 33}) assert inspect({"a": 33, "b": "foo"}) assert not inspect({"b": "foo"}) -@pytest.mark.parametrize("HookCls", hook_types) +@pytest.mark.parametrize("HookCls", [Hook1, ann.Hook]) def test_attrs_hooks(HookCls): "Test that attrs_classes enables hooks." + if HookCls is None: + pytest.skip("Annotations unavailable") encoder = at.attrs_classes(verb=PY2JSON, typ=HookCls, ctx=Ctx()) assert encoder(HookCls(33, "foo")) == {"_type_": "Hook", "a": 33, "b": "foo"} @@ -106,6 +141,60 @@ def test_attrs_hooks(HookCls): assert inspect({"_type_": "Hook"}) +@pytest.mark.parametrize( + "GenClass", + [ + GenExample, + GenExampleSlots, + ann.GenExample, + ann.GenExampleSlots, + ann.GenExampleDc, + ], +) +def test_attrs_generic(GenClass): + if GenClass is None: + pytest.skip() + + @attr.s + class Top: + nested = attr.ib(type=GenClass[GenClass[str, str], str]) + list_of = attr.ib(type=List[GenClass[Tuple[Flat, ...], int]]) + + rules = Rules(at.attrs_classes, std.atoms, std.lists) + py_val = Top( + nested=GenClass( + body=GenClass(body="body", count=5, messages=["msg1", "msg2"]), + count=3, + messages=["msg3", "msg4"], + ), + list_of=[ + GenClass(body=(Flat(a=1), Flat(a=2, b="three")), count=4, messages=[6, 7]) + ], + ) + j_val = { + "list_of": [ + {"body": [{"a": 1}, {"a": 2, "b": "three"}], "count": 4, "messages": [6, 7]} + ], + "nested": { + "body": {"body": "body", "count": 5, "messages": ["msg1", "msg2"]}, + "count": 3, + "messages": ["msg3", "msg4"], + }, + } + + encoder = at.attrs_classes(verb=PY2JSON, typ=Top, ctx=rules) + assert encoder(py_val) == j_val + + decoder = at.attrs_classes(verb=JSON2PY, typ=Top, ctx=rules) + assert decoder(j_val) == py_val + + inspect = at.attrs_classes(verb=INSP_PY, typ=Top, ctx=rules) + assert inspect(py_val) + + inspect = at.attrs_classes(verb=INSP_JSON, typ=Top, ctx=rules) + assert inspect(j_val) + + class Ctx2: def lookup(self, *, verb, typ, accept_missing=False): if typ is None: @@ -130,6 +219,14 @@ def test_named_tuples_disregards(): assert at.named_tuples(verb="dummy", typ=Named1, ctx=Fail()) is None +Named1 = namedtuple("Named1", ["a", "b"]) +try: + Named2 = namedtuple("Named2", ["a", "b"], defaults=["default"]) +except TypeError: + Named2 = None +Named3 = ann.Named + + def test_named_tuples_encoding1(): "Test that named_tuples encodes and decodes a namedtuple." @@ -243,13 +340,12 @@ def test_tuples_encoding(): @pytest.mark.parametrize( - "dict_type,reason", - [(Dict1, "TypedDict unavailable"), (Dict2, "TypedDict or annotations unavailable")], + "dict_type", [t.TypedDict("Dict1", a=int, b=str) if t.TypedDict else None, ann.Dict] ) -def test_typed_dict_encoding(dict_type, reason): +def test_typed_dict_encoding(dict_type): "Test that typed_dicts encodes and decodes a typed dict." if dict_type is None: - pytest.skip(reason) + pytest.skip("TypedDict or annotations unavailable") encoder = at.typed_dicts(verb=PY2JSON, typ=dict_type, ctx=Ctx()) assert encoder({"a": 3, "b": "foo"}) == {"a": 3, "b": "foo"} diff --git a/tests/test_cache.py b/tests/test_cache.py index 9d1bac3..f8618cd 100644 --- a/tests/test_cache.py +++ b/tests/test_cache.py @@ -34,7 +34,7 @@ def test_simple_cache_get(): @pytest.mark.filterwarnings("error") def test_simple_cache_flight(): - "Test that the SimpleCache inflight -> complete mechanism produces a valid forward action." + "Test that the SimpleCache inflight -> complete mechanism produces a forward action." subj = cache.SimpleCache() diff --git a/tests/test_errors.py b/tests/test_errors.py new file mode 100644 index 0000000..9716014 --- /dev/null +++ b/tests/test_errors.py @@ -0,0 +1,44 @@ +import pytest + +from json_syntax import errors as err + +import traceback as tb + + +@pytest.mark.parametrize( + "args,expect", + [ + ((), "ValueError: At .alpha.beta\n"), + (("message",), "ValueError: message; at .alpha.beta\n"), + (("two", "parts"), "ValueError: ('two; at .alpha.beta', 'parts')\n"), + ], +) +def test_error_context(args, expect): + "Test that error contexts add information correctly." + + try: + with err.ErrorContext(".", "alpha"): + with err.ErrorContext(".", "beta"): + raise ValueError(*args) + except ValueError as exc: + actual = "".join(tb.format_exception_only(type(exc), exc)) + else: + assert False, "Didn't throw?!" + + assert actual == expect + + +def test_error_ctx_inline(): + "Test that err_ctx adds inline context." + + def inside(): + raise ValueError("message") + + try: + err.err_ctx(".alpha", inside) + except ValueError as exc: + actual = "".join(tb.format_exception_only(type(exc), exc)) + else: + assert False, "Didn't throw?!" + + assert actual == "ValueError: message; at .alpha\n" diff --git a/tests/test_helpers.py b/tests/test_helpers.py index 8c72b45..d8ed553 100644 --- a/tests/test_helpers.py +++ b/tests/test_helpers.py @@ -1,8 +1,5 @@ from json_syntax import helpers as hlp -import traceback as tb -import typing as t - def test_identity(): "Test that the identity function does what it says." @@ -10,99 +7,3 @@ def test_identity(): subj = object() assert hlp.identity(subj) is subj - - -def test_has_origin_not_typing(): - "Test that has_origin disregards a type value if it's not from `typing`." - - assert not hlp.has_origin(list, list) - - -def test_has_origin_handle_tuple(): - "Test that has_origin accepts a tuple of origins." - - assert hlp.has_origin(t.List[int], (str, list, tuple)) - - -def test_has_origin_num_args(): - "Test that has_origin checks the number of arguments." - - assert hlp.has_origin(t.Tuple[int, str, float], tuple, num_args=3) - - -def test_issub_safe_normal_type1(): - "Test that issub_safe behaves like issubclass for normal types." - - assert hlp.issub_safe(bool, int) - assert hlp.issub_safe(bool, (int, float, str)) - assert not hlp.issub_safe(int, str) - - -def test_issub_safe_normal_type2(): - "Test that issub_safe returns False for generic types." - - assert not hlp.issub_safe(t.List[int], list) - - -def test_eval_type_imports(): - "Test that the private ``typing._eval_type`` function imports." - - from json_syntax.helpers import _eval_type - - assert ( - _eval_type is not None - ), "typing._eval_type is not available, investigate an alternative." - - -class SomeClass: - some_type = t.List["AnotherClass"] - - -class AnotherClass: - pass - - -def test_resolve_fwd_ref(): - "Test that resolve_fwd_ref correctly identifies the target of a forward reference." - - actual = hlp.resolve_fwd_ref(SomeClass.some_type, SomeClass) - - assert hlp.has_origin(actual, list) - assert actual.__args__ == (AnotherClass,) - - -def test_resolve_fwd_ref_bad_context(): - "Test that resolve_fwd_ref returns the original if the module can't be determined." - - try: - Forward = t.ForwardRef - except AttributeError: - Forward = t._ForwardRef - subj = Forward("AnotherClass") - actual = hlp.resolve_fwd_ref(subj, "dummy") - - assert actual is subj - - -def outside(*msg): - with hlp.ErrorContext(".", "alpha"): - return inside(*msg) - - -def inside(*msg): - with hlp.ErrorContext(".", "beta"): - raise ValueError(*msg) - - -def run_func(*args): - try: - outside(*args) - except ValueError as exc: - return "".join(tb.format_exception_only(type(exc), exc)) - - -def test_error_contexts(): - "Test that error contexts add information correctly." - assert run_func() == "ValueError: At .alpha.beta\n" - assert run_func("message") == "ValueError: message; at .alpha.beta\n" - assert run_func("two", "parts") == "ValueError: ('two; at .alpha.beta', 'parts')\n" diff --git a/tests/test_std.py b/tests/test_std.py index 7473c20..d347ff3 100644 --- a/tests/test_std.py +++ b/tests/test_std.py @@ -129,7 +129,7 @@ def test_floats_disregard(): def test_floats(): - "Test the floats rule will generate encoders and decoders for floats that are tolerant of integers." + "Test the floats rule generates encoders and decoders that are tolerant of integers." decoder = std.floats(verb=JSON2PY, typ=float, ctx=Rules()) assert decoder(77.7) == 77.7 @@ -157,7 +157,7 @@ def test_floats(): def test_floats_nan_str(): - "Test the floats rule will generate encoders and decoders for floats that are tolerant of integers." + "Test floats_nan_str rule generates encoders and decoders that stringify 'nan'." decoder = std.floats_nan_str(verb=JSON2PY, typ=float, ctx=Rules()) assert decoder(77.7) == 77.7 @@ -276,7 +276,7 @@ def test_iso_dates_disregard(): def test_iso_dates(): - "Test the iso_dates rule handles dates using ISO8601, rejecting datetimes as input to dates." + "Test the iso_dates rule handles dates using ISO8601 and rejects datetimes." decoder = std.iso_dates(verb=JSON2PY, typ=date, ctx=Rules()) assert decoder("1776-07-04") == date(1776, 7, 4) @@ -302,7 +302,7 @@ def test_iso_dates(): def test_iso_datetimes(): - "Test the iso_dates rule will generate encoders and decoders for datetimes using ISO8601." + "Test the iso_dates rule generates encoders and decoders for datetimes using ISO8601." decoder = std.iso_dates(verb=JSON2PY, typ=datetime, ctx=Rules()) assert decoder("6666-06-06T12:12:12.987654") == datetime( @@ -385,7 +385,8 @@ def test_enums(): def test_enums_int(): - "Test the enums rule will generate encoders and decoders for enumerated type subclasses." + "Test the enums rule generates encoders and decoders for enumerated type subclasses." + decoder = std.enums(verb=JSON2PY, typ=Enum2, ctx=Rules()) assert decoder("ALPHA") == Enum2.ALPHA assert decoder("GAMMA") == Enum2.GAMMA @@ -695,7 +696,7 @@ def test_dicts_string_key(): def test_dicts_date_key(): - "Test that dicts will generate encoders and decoders for dicts with simple dates as keys." + "Test that dicts will generate encoders and decoders for dicts keyed by simple dates." ctx = Rules(std.atoms, std.iso_dates, stringify_keys) diff --git a/tests/test_types.py b/tests/test_types.py new file mode 100644 index 0000000..a7732be --- /dev/null +++ b/tests/test_types.py @@ -0,0 +1,119 @@ +import pytest + +from json_syntax import types as tt + +from .common import typing as t, SoftMod +from .types_attrs_common import T, U + +import attr + +ann = SoftMod("tests.types_attrs_ann", allow_SyntaxError=True) + + +@attr.s +class GenExample(t.Generic[T, U]): + body = attr.ib(type=T) + count = attr.ib(type=int) + messages = attr.ib(type=t.List[U]) + + +def test_has_origin_not_typing(): + "Test that has_origin disregards a type value if it's not from `typing`." + + assert tt.has_origin(list, list) + + +def test_has_origin_handle_tuple(): + "Test that has_origin accepts a tuple of origins." + + assert tt.has_origin(t.List[int], (str, list, tuple)) + + +def test_has_origin_num_args(): + "Test that has_origin checks the number of arguments." + + assert tt.has_origin(t.Tuple[int, str, float], tuple, num_args=3) + + +def test_issub_safe_normal_type1(): + "Test that issub_safe behaves like issubclass for normal types." + + assert tt.issub_safe(bool, int) + assert tt.issub_safe(bool, (int, float, str)) + assert not tt.issub_safe(int, str) + + +def test_issub_safe_normal_type2(): + "Test that issub_safe returns False for generic types." + + assert not tt.issub_safe(t.List[int], list) + + +def test_eval_type_imports(): + "Test that the private ``typing._eval_type`` function imports." + + assert ( + tt._eval_type is not None + ), "typing._eval_type is not available, investigate an alternative." + + +class SomeClass: + some_type = t.List["AnotherClass"] + + +class AnotherClass: + pass + + +def test_resolve_fwd_ref(): + "Test that resolve_fwd_ref correctly identifies the target of a forward reference." + + actual = tt.resolve_fwd_ref(SomeClass.some_type, SomeClass) + + assert tt.has_origin(actual, list) + assert actual.__args__ == (AnotherClass,) + + +def test_resolve_fwd_ref_bad_context(): + "Test that resolve_fwd_ref returns the original if the module can't be determined." + + Forward = t.ForwardRef or t._ForwardRef + subj = Forward("AnotherClass") + actual = tt.resolve_fwd_ref(subj, "dummy") + + assert actual is subj + + +@pytest.mark.parametrize( + "GenClass, origin", + [ + (GenExample, None), + (GenExample[str, int], GenExample), + (t.List[int], t.List), + (t.List["int"], t.List), + (t.List, None), + (t.Union[int, str], None), + (int, None), + ], +) +def test_get_generic_origin(GenClass, origin): + "Test that get_generic_origin finds the origin class, unless the class is not generic." + assert tt.get_generic_origin(GenClass) == origin + + +@pytest.mark.parametrize( + "GenClass, origin", + [ + (GenExample, GenExample), + (GenExample[str, int], GenExample), + (t.List[int], list), + (t.List["int"], list), + (t.List, list), + (t.Union[int, str], t.Union), + (t.Union, t.Union), + (int, int), + ], +) +def test_get_origin(GenClass, origin): + "Test that get_generic_origin finds the origin class, unless the class is not generic." + assert tt.get_origin(GenClass) == origin diff --git a/tests/type_strategies.py b/tests/type_strategies.py index 84dcdf8..a04111e 100644 --- a/tests/type_strategies.py +++ b/tests/type_strategies.py @@ -7,7 +7,8 @@ from . import _strategies as _st -# Tests often want to compare for equality, and there's no good way to do this with NaNs breaking it. :-( +# Tests often want to compare for equality, and there's no good way to do this with NaNs +# breaking it. :-( st.register_type_strategy(Decimal, st.decimals(allow_nan=False)) st.register_type_strategy(float, st.floats(allow_nan=False)) diff --git a/tests/types_attrs_ann.py b/tests/types_attrs_ann.py index f270ba9..c2832ac 100644 --- a/tests/types_attrs_ann.py +++ b/tests/types_attrs_ann.py @@ -1,68 +1,81 @@ import attr -from typing import NamedTuple -from tests.types_attrs_noann import ( # noqa - flat_types, - hook_types, - Hooks, - Dict1, - Named1, - Named2, -) +from .common import dataclasses as dc, typing as t +from .types_attrs_common import Hooks, T, U -try: - from dataclasses import dataclass -except ImportError: - dataclass = None -try: - from typing import TypedDict -except ImportError: - TypedDict = None +@attr.s(auto_attribs=True) +class Flat: + a: int + b: str = "default" @attr.s(auto_attribs=True) -class Flat2: +class GenFlat(t.Generic[T]): + a: T + b: str = "default" + + +@attr.s(auto_attribs=True) +class Hook(Hooks): a: int b: str = "default" -flat_types.append(Flat2) -if dataclass: +class Named(t.NamedTuple): + a: int + b: str = "default" - @dataclass - class Flat3: - a: int - b: str = "default" - flat_types.append(Flat3) +class Dict(t.TypedDict): + a: int + b: str @attr.s(auto_attribs=True) -class Hook2(Hooks): - a: int - b: str = "default" +class GenExample(t.Generic[T, U]): + body: T + count: int + messages: t.List[U] -hook_types.append(Hook2) -if dataclass: +try: - @dataclass - class Hook3(Hooks): - a: int - b: str = "default" + @attr.s(auto_attribs=True, slots=True) + class GenExampleSlots(t.Generic[T, U]): + body: T + count: int + messages: t.List[U] - hook_types.append(Hook3) +except TypeError: + # Slots don't work with Generic on older versions of typing. + GenExampleSlots = None -class Named3(NamedTuple): - a: int - b: str = "default" +if dc.dataclass: + + @dc.dataclass + class FlatDc: + a: int + b: str = "default" -Dict2 = None -if TypedDict: + @dc.dataclass + class GenFlatDc(t.Generic[T]): + a: T + b: str = "default" - class Dict2(TypedDict): + @dc.dataclass + class HookDc(Hooks): a: int - b: str + b: str = "default" + + @dc.dataclass + class GenExampleDc(t.Generic[T, U]): + body: T + count: int + messages: t.List[U] + + +else: + FlatDc = GenFlatDc = HookDc = GenericExampleDc = None diff --git a/tests/types_attrs_common.py b/tests/types_attrs_common.py new file mode 100644 index 0000000..e6918ae --- /dev/null +++ b/tests/types_attrs_common.py @@ -0,0 +1,20 @@ +from typing import TypeVar + + +class Hooks: + @classmethod + def __json_pre_decode__(cls, value): + if isinstance(value, list): + value = {"a": value[0], "b": value[1]} + return value + + @classmethod + def __json_check__(cls, value): + return value.get("_type_") == "Hook" + + def __json_post_encode__(cls, value): + return dict(value, _type_="Hook") + + +T = TypeVar("T") +U = TypeVar("U") diff --git a/tests/types_attrs_noann.py b/tests/types_attrs_noann.py deleted file mode 100644 index 43e89c0..0000000 --- a/tests/types_attrs_noann.py +++ /dev/null @@ -1,55 +0,0 @@ -import attr -from collections import namedtuple - -try: - from typing import TypedDict -except ImportError: - TypedDict = None - - -@attr.s -class Flat1: - a = attr.ib(type=int) - b = attr.ib("default", type=str) - - -flat_types = [Flat1] - - -class Hooks: - @classmethod - def __json_pre_decode__(cls, value): - if isinstance(value, list): - value = {"a": value[0], "b": value[1]} - return value - - @classmethod - def __json_check__(cls, value): - return value.get("_type_") == "Hook" - - def __json_post_encode__(cls, value): - return dict(value, _type_="Hook") - - -@attr.s -class Hook1(Hooks): - a = attr.ib(type=int) - b = attr.ib("default", type=str) - - -hook_types = [Hook1] - -Named1 = namedtuple("Named1", ["a", "b"]) -named_tup_types = [Named1] -try: - Named2 = namedtuple("Named2", ["a", "b"], defaults=["default"]) - named_tup_types.append(Named2) -except TypeError: - Named2 = None -Named3 = None - -if TypedDict: - Dict1 = TypedDict("Dict1", a=int, b=str) -else: - Dict1 = None -Dict2 = None