diff --git a/.gitignore b/.gitignore
index e1eced7..2b645a9 100644
--- a/.gitignore
+++ b/.gitignore
@@ -3,6 +3,7 @@
/dist/
/.python-version
/.coverage
+/.hypothesis
/htmlcov
/pip-wheel-metadata
setup.py
diff --git a/README.md b/README.md
index 24c0723..1fac5d4 100644
--- a/README.md
+++ b/README.md
@@ -26,8 +26,8 @@ structure using libraries like [attrs][].
* The library has no dependencies of its own
* It does not actually read or write JSON
-At the time of writing, the library is in **alpha** and the API may move around or be
-renamed.
+At the time of writing, the library is in **beta** and the API is relatively stable but
+may change.
### Supported types
@@ -219,7 +219,7 @@ During encoding, the reverse sequence takes place:
#### JSON type check hook
-Type checks are only used in `json-syntax` to support `typing.Union`; in a nutshell, the
+Type checks are only used in _json-syntax_ to support `typing.Union`; in a nutshell, the
`unions` rule will inspect some JSON to see which variant is present.
If a type-check hook is not defined, `__json_pre_decode__` will be called before the
@@ -249,9 +249,40 @@ encode_account = rules.lookup(typ=Union[AccountA, AccountB, AccountC],
See [the examples][] for details on custom rules.
+### Debugging amibguous structures
+
+(May need more docs and some test cases.)
+
+As _json-syntax_ tries to directly translate your Python types to JSON, it is possible
+to write ambiguous structures. To avoid this, there is a handy `is_ambiguous` method:
+
+```python
+# This is true because both are represented as an array of numbers in JSON.
+rules.is_ambiguous(typ=Union[List[int], Set[int]])
+
+@dataclass
+class Account:
+ user: str
+ address: str
+
+# This is true because such a dictionary would always match the contents of the account.
+rules.is_ambiguous(typ=Union[Dict[str, str], Account])
+```
+
+The aim of this is to let you put a check in your unit tests to make sure data can be
+reliably expressed given your particular case.
+
+Internally, this is using the `PATTERN` verb to represent the JSON pattern, so this may
+be helpful in understanding how _json-syntax_ is trying to represent your data:
+
+```python
+print(rules.lookup(typ=MyAmbiguousClass, verb='show_pattern'))
+```
+
### Sharp edges
-_Alpha release status._ This API may change, there are probably bugs!
+_Beta release status._ This API may change, there are probably bugs! In particular, the
+status of rules accepting subclasses is likely to change.
_The RuleSet caches encoders._ Construct a new ruleset if you want to change settings.
@@ -265,28 +296,8 @@ _Everything to do with typing._ It's a bit magical and sort of wasn't designed f
[We have a guide to it to try and help][types].
_Union types._ You can use `typing.Union` to allow a member to be one of some number of
-alternates, but there are some caveats. These are documented in code in `test_unions`,
-but in plain English:
-
-When encoding Python to JSON:
-
- * `Union[Super, Sub]` will never match Sub when converting from Python to JSON.
-
-When decoding JSON to Python:
-
- * `Union[str, Stringly]` will never construct an instance that is represented as a
- string in JSON.
- * This includes enums, dates and special float values (`Nan`, `-inf`, etc.) may be
- represented as strings.
- * `Union[datetime, date]` will never construct a date because `YYYY-MM-DD` is a valid
- datetime according to ISO8601.
- * `Union[Dict[str, Value], MyAttrs]` will never construct `MyAttrs` if all its
- attributes are `Value`.
- * `Union[List[X], Set[X], FrozenSet[X], Tuple[X, ...]]` will only ever construct
- `List[X]` because all the others are also represented as JSON lists.
- * `Union[MyClassA, MyClassB, MyClassC]` can be ambiguous if these classes all share
- common fields. Consider using the `__json_check__` hook to differentiate. Simply
- adding a field named `class` or something can be unambiguous and fast.
+alternates, but there are some caveats. You should use the `.is_ambiguous()` method of
+RuleSet to warn you of these.
_Rules accept subclasses._ If you subclass `int`, the atoms rule will match it, and then
the converter will call `int` against your instance. I haven't taken the time to examine
@@ -306,7 +317,7 @@ This package is maintained via the [poetry][] tool. Some useful commands:
1. Setup: `poetry install`
2. Run tests: `poetry run pytest tests/`
- 3. Reformat: `poetry run black -N json_syntax/ tests/`
+ 3. Reformat: `poetry run black json_syntax/ tests/`
### Setting up tox
@@ -322,6 +333,10 @@ You'll want pyenv, then install the pythons:
Once you install `tox` in your preferred python, running it is just `tox`.
+(Caveat: `poetry install` is now breaking in `tox` because `pip` has changed: it now
+tries to create a dist in _pip-wheel-metadata_ each time. I'm nuking that directory, but
+most likely there's some new config variable to hunt down.)
+
### Notes
1: Writing the encoder is deceptively easy because the instances in
diff --git a/json_syntax/__init__.py b/json_syntax/__init__.py
index 6347835..a33227d 100644
--- a/json_syntax/__init__.py
+++ b/json_syntax/__init__.py
@@ -22,7 +22,7 @@
)
from .attrs import attrs_classes, named_tuples, tuples
from .unions import unions
-from .helpers import JSON2PY, PY2JSON, INSP_PY, INSP_JSON # noqa
+from .helpers import JSON2PY, PY2JSON, INSP_PY, INSP_JSON, PATTERN # noqa
def std_ruleset(
@@ -43,18 +43,18 @@ def std_ruleset(
For example, to replace ``decimals`` with ``decimals_as_str`` just call ``std_ruleset(decimals=decimals_as_str)``
"""
return custom(
+ enums,
atoms,
floats,
decimals,
dates,
optional,
- enums,
lists,
attrs_classes,
sets,
- dicts,
named_tuples,
tuples,
+ dicts,
unions,
*extras,
cache=cache,
diff --git a/json_syntax/action_v1.py b/json_syntax/action_v1.py
index 2cb7d5c..d80a23b 100644
--- a/json_syntax/action_v1.py
+++ b/json_syntax/action_v1.py
@@ -1,7 +1,9 @@
from .helpers import ErrorContext, err_ctx
-from datetime import date, datetime, time
+from datetime import date, datetime, time, timedelta
+from decimal import InvalidOperation
import math
+import re
def check_parse_error(value, parser, error):
@@ -21,6 +23,13 @@ def check_has_type(value, typ):
return type(value) == typ
+def convert_decimal_str(value):
+ result = str(value)
+ if result == "sNaN":
+ raise InvalidOperation("Won't save signalling NaN")
+ return result
+
+
def convert_float(value):
value = float(value)
if math.isfinite(value):
@@ -74,6 +83,56 @@ def convert_str_enum(value, mapping):
del instance
+def convert_timedelta_str(dur):
+ "Barebones support for storing a timedelta as an ISO8601 duration."
+ micro = ".{:06d}".format(dur.microseconds) if dur.microseconds else ""
+ return "P{:d}DT{:d}{}S".format(dur.days, dur.seconds, micro)
+
+
+_iso8601_duration = re.compile(
+ r"^P(?!$)([-+]?\d+(?:[.,]\d+)?Y)?"
+ r"([-+]?\d+(?:[.,]\d+)?M)?"
+ r"([-+]?\d+(?:[.,]\d+)?W)?"
+ r"([-+]?\d+(?:[.,]\d+)?D)?"
+ r"(?:(T)(?=[0-9+-])"
+ r"([-+]?\d+(?:[.,]\d+)?H)?"
+ r"([-+]?\d+(?:[.,]\d+)?M)?"
+ r"([-+]?\d+(?:[.,]\d+)?S)?)?$"
+)
+_duration_args = {
+ "PW": "weeks",
+ "PD": "days",
+ "TH": "hours",
+ "TM": "minutes",
+ "TS": "seconds",
+}
+
+
+def convert_str_timedelta(dur):
+ if not isinstance(dur, str):
+ raise ValueError("Value was not a string.")
+ match = _iso8601_duration.match(dur.upper().replace(",", "."))
+ section = "P"
+ if not match:
+ raise ValueError("Value was not an ISO8601 duration.")
+ args = {}
+ for elem in match.groups():
+ if elem is None:
+ continue
+ if elem == "T":
+ section = "T"
+ continue
+ part = section + elem[-1]
+ value = float(elem[:-1])
+ if not value:
+ continue
+
+ if part in ("PY", "PM"):
+ raise ValueError("Year and month durations not supported")
+ args[_duration_args[part]] = value
+ return timedelta(**args)
+
+
def convert_optional(value, inner):
if value is None:
return None
diff --git a/json_syntax/attrs.py b/json_syntax/attrs.py
index 8249db3..555086a 100644
--- a/json_syntax/attrs.py
+++ b/json_syntax/attrs.py
@@ -3,6 +3,7 @@
PY2JSON,
INSP_JSON,
INSP_PY,
+ PATTERN,
SENTINEL,
has_origin,
identity,
@@ -18,6 +19,7 @@
convert_dict_to_attrs,
convert_tuple_as_list,
)
+from . import pattern as pat
from functools import partial
@@ -33,7 +35,7 @@ def attrs_classes(
"""
Handle an ``@attr.s`` or ``@dataclass`` decorated class.
"""
- if verb not in (JSON2PY, PY2JSON, INSP_PY, INSP_JSON):
+ if verb not in (JSON2PY, PY2JSON, INSP_PY, INSP_JSON, PATTERN):
return
try:
fields = typ.__attrs_attrs__
@@ -59,7 +61,7 @@ def attrs_classes(
)
if verb == PY2JSON:
tup += (field.default,)
- elif verb == INSP_JSON:
+ elif verb in (INSP_JSON, PATTERN):
tup += (is_attrs_field_required(field),)
inner_map.append(tup)
@@ -82,6 +84,12 @@ def attrs_classes(
return check
pre_hook_method = getattr(typ, pre_hook, identity)
return partial(check_dict, inner_map=inner_map, pre_hook=pre_hook_method)
+ elif verb == PATTERN:
+ return pat.Object.exact(
+ (pat.String.exact(name), inner or pat.Unkown)
+ for name, inner, req in inner_map
+ if req
+ )
def named_tuples(verb, typ, ctx):
@@ -90,7 +98,9 @@ def named_tuples(verb, typ, ctx):
Also handles a ``collections.namedtuple`` if you have a fallback handler.
"""
- if verb not in (JSON2PY, PY2JSON, INSP_PY, INSP_JSON) or not issub_safe(typ, tuple):
+ if verb not in (JSON2PY, PY2JSON, INSP_PY, INSP_JSON, PATTERN) or not issub_safe(
+ typ, tuple
+ ):
return
try:
fields = typ._field_types
@@ -116,7 +126,7 @@ def named_tuples(verb, typ, ctx):
)
if verb == PY2JSON:
tup += (defaults.get(name, SENTINEL),)
- elif verb == INSP_JSON:
+ elif verb in (INSP_JSON, PATTERN):
tup += (name not in defaults,)
inner_map.append(tup)
@@ -133,6 +143,10 @@ def named_tuples(verb, typ, ctx):
)
elif verb == INSP_JSON:
return partial(check_dict, pre_hook=identity, inner_map=tuple(inner_map))
+ elif verb == PATTERN:
+ return pat.Object.exact(
+ (pat.String.exact(name), inner) for name, inner, req in inner_map if req
+ )
def tuples(verb, typ, ctx):
@@ -140,7 +154,9 @@ def tuples(verb, typ, ctx):
Handle a ``Tuple[type, type, type]`` product type. Use a ``NamedTuple`` if you don't
want a list.
"""
- if verb not in (JSON2PY, PY2JSON, INSP_PY, INSP_JSON) or not has_origin(typ, tuple):
+ if verb not in (JSON2PY, PY2JSON, INSP_PY, INSP_JSON, PATTERN) or not has_origin(
+ typ, tuple
+ ):
return
args = typ.__args__
if Ellipsis in args:
@@ -155,3 +171,5 @@ def tuples(verb, typ, ctx):
return partial(check_tuple_as_list, inner=inner, con=tuple)
elif verb == INSP_JSON:
return partial(check_tuple_as_list, inner=inner, con=list)
+ elif verb == PATTERN:
+ return pat.Array.exact(inner)
diff --git a/json_syntax/helpers.py b/json_syntax/helpers.py
index 48b29ce..e1f1a05 100644
--- a/json_syntax/helpers.py
+++ b/json_syntax/helpers.py
@@ -9,6 +9,7 @@
PY2JSON = "python_to_json"
INSP_JSON = "inspect_json"
INSP_PY = "inspect_python"
+PATTERN = "show_pattern"
NoneType = type(None)
SENTINEL = object()
python_minor = sys.version_info[:2]
diff --git a/json_syntax/pattern.py b/json_syntax/pattern.py
new file mode 100644
index 0000000..81027a2
--- /dev/null
+++ b/json_syntax/pattern.py
@@ -0,0 +1,303 @@
+"""
+Patterns to represent roughly what syntax will look like, and also to investigate whether
+unions are potentially ambiguous.
+"""
+from functools import partial, lru_cache, singledispatch
+from itertools import chain, cycle, islice, product, zip_longest
+from enum import IntEnum
+
+try:
+ import simplejson as json
+except ImportError:
+ import json
+
+ def _def(obj):
+ return obj.for_json()
+
+ _args = {"default": lambda obj: obj.for_json()}
+else:
+ _args = {"for_json": True}
+
+dump = partial(json.dump, **_args)
+dumps = partial(json.dumps, **_args)
+
+
+class Matches(IntEnum):
+ """
+ This determines the degree to which one pattern can shadow another causing potential ambiguity.
+ """
+
+ always = 0 # Will always match
+ sometimes = 1 # It will sometimes match
+ potential = 2 # Can't prove it won't match
+ never = 3 # Provably won't match
+
+
+matches_all = partial(max, default=Matches.always)
+matches_any = partial(min, default=Matches.never)
+
+
+def matches(left, right, ctx=None):
+ if ctx is None:
+ ctx = set()
+ else:
+ if (left, right) in ctx:
+ return Matches.potential
+ ctx.add((left, right))
+ result = matches_any(
+ left._matches(right, ctx)
+ for left, right in product(left._unpack(), right._unpack())
+ )
+ return result
+
+
+class Pattern:
+ def _matches(self, other, ctx):
+ raise NotImplementedError()
+
+ def _unpack(self):
+ return [self]
+
+ def __repr__(self):
+ return dumps(self, indent=2)
+
+
+class Atom(Pattern):
+ def __init__(self, value):
+ self.value = value
+
+ def for_json(self):
+ return self.value
+
+ def _matches(self, other, ctx):
+ return (
+ Matches.always
+ if isinstance(other, Atom) and self.value == other.value
+ else Matches.never
+ )
+
+
+class String(Pattern):
+ """
+ Rather than try to analyze regular expressions, we just name common string patterns,
+ and have a list of known ambiguities.
+
+ We're deliberately not trying to analyze regexes here as we assume you would want to
+ use specialize logic to make such fine distinctions.
+ """
+
+ def __init__(self, name, arg=None):
+ self.name = name
+ self.arg = arg
+
+ def for_json(self):
+ if self.name == "exact":
+ return "=" + self.arg
+ else:
+ return self.name
+
+ @classmethod
+ def exact(cls, string):
+ assert isinstance(string, str)
+ return cls("exact", string)
+
+ def _matches(self, other, ctx):
+ "Check whether this pattern will match the other."
+ if not isinstance(other, String):
+ return Matches.never
+ if self.name == "str":
+ return Matches.always # Strings always overshadow
+ elif other.name == "str":
+ return Matches.sometimes # Strings are sometimes shadowed
+ if self.name == "exact":
+ if other.name == "exact":
+ return Matches.always if self.arg == other.arg else Matches.never
+ elif other.arg is None:
+ return Matches.potential
+ else:
+ return Matches.always if other.arg(self.arg) else Matches.never
+ return Matches.always if self.name == other.name else Matches.potential
+
+
+class _Unknown(Pattern):
+ def __init__(self, name, match):
+ self._name = name
+ self._match = match
+
+ def _matches(self, other, ctx):
+ return self._match
+
+ def __repr__(self):
+ return self._name
+
+
+String.any = String("str")
+Number = Atom(0)
+Null = Atom(None)
+Bool = Atom(False)
+Missing = _Unknown("", Matches.never)
+Unknown = _Unknown("", Matches.potential)
+
+
+class Alternatives(Pattern):
+ """
+ Used by the `show_pattern` verb to represent alternative patterns in unions.
+ """
+
+ def __init__(self, alts):
+ self.alts = tuple(alts)
+ assert all(isinstance(alt, Pattern) for alt in self.alts)
+
+ def _unpack(self):
+ yield from self.alts
+
+ def _matches(self, other, ctx):
+ raise NotImplementedError(
+ "Didn't call unpack"
+ ) # Should be bypassed by _unpack.
+
+ def for_json(self):
+ out = ["alts"]
+ out.extend(self.alts)
+ return out
+
+
+class Array(Pattern):
+ def __init__(self, elems, *, homog):
+ self.elems = tuple(elems)
+ assert all(isinstance(elem, Pattern) for elem in self.elems)
+ self._homog = homog
+
+ @classmethod
+ def homog(cls, elem):
+ return cls((elem,), homog=True)
+
+ @classmethod
+ def exact(cls, elems):
+ return cls(elems, homog=False)
+
+ def _matches(self, other, ctx):
+ if not isinstance(other, Array):
+ return Matches.never
+ if self._homog and not other.elems:
+ return Matches.always
+ left = self.elems
+ right = other.elems
+ if self._homog and not other._homog:
+ left = islice(cycle(left), len(right))
+ elif not self._homog and other._homog:
+ right = islice(cycle(right), len(left))
+
+ possible = matches_all(
+ matches(l, r, ctx) for l, r in zip_longest(left, right, fillvalue=Missing)
+ )
+ if self._homog and other._homog:
+ # Zero cases can't be distinguished match.
+ possible = matches_any([Matches.sometimes, possible])
+ return possible
+
+ def for_json(self):
+ out = list(self.elems)
+ if self.homog:
+ out.append("...")
+ return out
+
+
+class Object(Pattern):
+ def __init__(self, items, *, homog):
+ self.items = tuple(items)
+ valid = all(
+ isinstance(key, Pattern) and isinstance(val, Pattern)
+ for key, val in self.items
+ )
+ if not valid:
+ # for key, val in self.items:
+ # print(f"{key!r}: {type(key)} / {val!r}: {type(val)}")
+ raise TypeError("Keys and values must be patterns")
+ self._homog = homog
+
+ @classmethod
+ def homog(cls, key, val):
+ return cls(((key, val),), homog=True)
+
+ @classmethod
+ def exact(cls, items):
+ return cls(items, homog=False)
+
+ def _matches(self, other, ctx):
+ if not isinstance(other, Object):
+ return Matches.never
+ if self._homog and not other.items:
+ return Matches.always
+
+ possible = matches_all(
+ matches_any(
+ matches(lk, rk, ctx) and matches(lv, rv, ctx) for rk, rv in other.items
+ )
+ for lk, lv in self.items
+ )
+ if self._homog and other._homog:
+ possible = matches_any([Matches.sometimes, possible])
+ return possible
+
+ def for_json(self):
+ def jsonify(key):
+ try:
+ for_json = key.for_json
+ except AttributeError:
+ return key
+ else:
+ return for_json()
+
+ out = {jsonify(k): v for k, v in self.items}
+ if self._homog:
+ out["..."] = "..."
+ return out
+
+
+@singledispatch
+def is_ambiguous(pattern, threshold=Matches.always, _path=()):
+ raise TypeError("pattern must be a recognized subclass of Pattern.")
+
+
+@is_ambiguous.register(Atom)
+@is_ambiguous.register(String)
+def _(pattern, threshold=Matches.always, _path=()):
+ return ()
+
+
+@is_ambiguous.register(_Unknown)
+def _(pattern, threshold=Matches.always, _path=()):
+ return (str(pattern),) if pattern._match <= threshold else ()
+
+
+def _any(iterable):
+ for item in iterable:
+ if bool(item):
+ return item
+ return ()
+
+
+@is_ambiguous.register(Array)
+def _(pattern, threshold=Matches.always, _path=()):
+ _path += ("[]",)
+ return _any(is_ambiguous(elem, threshold, _path) for elem in pattern.elems)
+
+
+@is_ambiguous.register(Object)
+def _(pattern, threshold=Matches.always, _path=()):
+ return _any(
+ is_ambiguous(val, threshold, _path + (str(key),)) for key, val in pattern.items
+ )
+
+
+@is_ambiguous.register(Alternatives)
+def _(pattern, threshold=Matches.always, _path=()):
+ # An ambiguous pattern is one where an earlier pattern shadows a later pattern.
+ alts = pattern.alts
+ for i, early in enumerate(alts[:-1]):
+ for late in alts[i + 1 :]:
+ if matches(early, late) <= threshold:
+ return _path + ("alternative {}".format(i),)
+
+ return ()
diff --git a/json_syntax/ruleset.py b/json_syntax/ruleset.py
index f241c25..695e808 100644
--- a/json_syntax/ruleset.py
+++ b/json_syntax/ruleset.py
@@ -1,4 +1,6 @@
from .cache import SimpleCache
+from .helpers import PATTERN
+from . import pattern
import logging
@@ -52,5 +54,15 @@ def lookup(self, verb, typ, accept_missing=False):
finally:
self.cache.de_flight(verb=verb, typ=typ, forward=forward)
+ if action is None and not accept_missing:
+ raise ValueError("Failed: lookup({!s}, {!r}".format(verb, typ))
+
def fallback(self, verb, typ):
- pass
+ if verb == PATTERN:
+ return pattern.Unknown
+ else:
+ return None
+
+ def is_ambiguous(self, typ, threshold=pattern.Matches.always):
+ pat = self.lookup(verb=PATTERN, typ=typ)
+ return pattern.is_ambiguous(pat, threshold=threshold)
diff --git a/json_syntax/std.py b/json_syntax/std.py
index 4ce5214..dd071ac 100644
--- a/json_syntax/std.py
+++ b/json_syntax/std.py
@@ -7,6 +7,7 @@
PY2JSON,
INSP_JSON,
INSP_PY,
+ PATTERN,
)
from .action_v1 import (
check_collection,
@@ -20,17 +21,21 @@
convert_collection,
convert_date,
convert_datetime,
+ convert_decimal_str,
convert_enum_str,
convert_float,
convert_mapping,
convert_none,
convert_optional,
convert_str_enum,
+ convert_str_timedelta,
convert_time,
+ convert_timedelta_str,
)
+from . import pattern as pat
from collections import OrderedDict
-from datetime import datetime, date, time
+from datetime import datetime, date, time, timedelta
from decimal import Decimal
from enum import Enum
from functools import partial
@@ -61,6 +66,15 @@ def atoms(verb, typ, ctx):
for base in (NoneType, str, bool, int):
if issubclass(typ, base):
return partial(check_isinst, typ=base)
+ elif verb == PATTERN:
+ for base, node in [
+ (NoneType, pat.Null),
+ (str, pat.String.any),
+ (bool, pat.Bool),
+ (int, pat.Number),
+ ]:
+ if issubclass(typ, base):
+ return node
def floats(verb, typ, ctx):
@@ -82,6 +96,8 @@ def floats(verb, typ, ctx):
return partial(check_isinst, typ=float)
elif verb == INSP_JSON:
return partial(check_isinst, typ=(int, float))
+ elif verb == PATTERN:
+ return pat.Number
def floats_nan_str(verb, typ, ctx):
@@ -101,6 +117,8 @@ def floats_nan_str(verb, typ, ctx):
return partial(check_isinst, typ=float)
elif verb == INSP_JSON:
return check_float
+ elif verb == PATTERN:
+ return pat.Number
def decimals(verb, typ, ctx):
@@ -118,6 +136,8 @@ def decimals(verb, typ, ctx):
return Decimal
elif verb in (INSP_JSON, INSP_PY):
return partial(check_isinst, typ=Decimal)
+ elif verb == PATTERN:
+ return pat.Number
def decimals_as_str(verb, typ, ctx):
@@ -132,43 +152,46 @@ def decimals_as_str(verb, typ, ctx):
if verb == JSON2PY:
return Decimal
elif verb == PY2JSON:
- return str
+ return convert_decimal_str
elif verb == INSP_PY:
return partial(check_isinst, typ=Decimal)
- elif verb == INSP_JSON:
- return partial(check_parse_error, parser=Decimal, error=ArithmeticError)
+ elif verb in (INSP_JSON, PATTERN):
+ inspect = partial(check_parse_error, parser=Decimal, error=ArithmeticError)
+ return pat.String("number", inspect) if verb == PATTERN else inspect
def iso_dates(verb, typ, ctx):
"""
Rule to handle iso formatted datetimes and dates.
- This is the strict variant that simply uses the `fromisoformat` and `isoformat` methods of `date` and `datetime`.
+ This simply uses the `fromisoformat` and `isoformat` methods of `date` and `datetime`.
There is a loose variant in the examples that will accept a datetime in a date. A datetime always accepts both
dates and datetimes.
"""
- if typ not in (date, datetime, time):
+ if typ not in (date, datetime, time, timedelta):
return
if verb == PY2JSON:
- return typ.isoformat
+ return convert_timedelta_str if typ == timedelta else typ.isoformat
elif verb == INSP_PY:
return partial(check_has_type, typ=typ)
- elif verb in (JSON2PY, INSP_JSON):
+ elif verb in (JSON2PY, INSP_JSON, PATTERN):
if typ == date:
parse = convert_date
elif typ == datetime:
parse = convert_datetime
elif typ == time:
parse = convert_time
+ elif typ == timedelta:
+ parse = convert_str_timedelta
else:
return
if verb == JSON2PY:
return parse
- else:
- return partial(
- check_parse_error, parser=parse, error=(TypeError, ValueError)
- )
+ inspect = partial(
+ check_parse_error, parser=parse, error=(TypeError, ValueError)
+ )
+ return pat.String(typ.__name__, inspect) if verb == PATTERN else inspect
def enums(verb, typ, ctx):
@@ -180,8 +203,9 @@ def enums(verb, typ, ctx):
return partial(convert_str_enum, mapping=dict(typ.__members__))
elif verb == INSP_PY:
return partial(check_isinst, typ=typ)
- elif verb == INSP_JSON:
- return partial(check_str_enum, mapping=frozenset(typ.__members__.keys()))
+ elif verb in (INSP_JSON, PATTERN):
+ inspect = partial(check_str_enum, mapping=frozenset(typ.__members__.keys()))
+ return pat.String(typ.__name__, inspect) if verb == PATTERN else inspect
def faux_enums(verb, typ, ctx):
@@ -190,15 +214,16 @@ def faux_enums(verb, typ, ctx):
if verb in (JSON2PY, PY2JSON):
mapping = {name: name for name in typ.__members__}
return partial(convert_str_enum, mapping=mapping)
- elif verb in (INSP_JSON, INSP_PY):
- return partial(check_str_enum, mapping=frozenset(typ.__members__.keys()))
+ elif verb in (INSP_JSON, INSP_PY, PATTERN):
+ inspect = partial(check_str_enum, mapping=frozenset(typ.__members__.keys()))
+ return pat.String(typ.__name__, inspect) if verb == PATTERN else inspect
def optional(verb, typ, ctx):
"""
Handle an ``Optional[inner]`` by passing ``None`` through.
"""
- if verb not in (JSON2PY, PY2JSON, INSP_PY, INSP_JSON):
+ if verb not in (JSON2PY, PY2JSON, INSP_PY, INSP_JSON, PATTERN):
return
if has_origin(typ, Union, num_args=2):
if NoneType not in typ.__args__:
@@ -216,6 +241,8 @@ def optional(verb, typ, ctx):
return partial(convert_optional, inner=inner)
elif verb in (INSP_JSON, INSP_PY):
return partial(check_optional, inner=inner)
+ elif verb == PATTERN:
+ return pat.Alternatives([pat.Null, inner])
def lists(verb, typ, ctx):
@@ -225,7 +252,7 @@ def lists(verb, typ, ctx):
Trivia: the ellipsis indicates a homogenous tuple; ``Tuple[A, B, C]`` is a product
type that contains exactly those elements.
"""
- if verb not in (JSON2PY, PY2JSON, INSP_PY, INSP_JSON):
+ if verb not in (JSON2PY, PY2JSON, INSP_PY, INSP_JSON, PATTERN):
return
if has_origin(typ, list, num_args=1):
(inner,) = typ.__args__
@@ -236,28 +263,32 @@ def lists(verb, typ, ctx):
else:
return
inner = ctx.lookup(verb=verb, typ=inner)
- con = list if verb in (PY2JSON, INSP_JSON) else get_origin(typ)
+ con = list if verb in (PY2JSON, INSP_JSON, PATTERN) else get_origin(typ)
if verb in (JSON2PY, PY2JSON):
return partial(convert_collection, inner=inner, con=con)
elif verb in (INSP_JSON, INSP_PY):
return partial(check_collection, inner=inner, con=con)
+ elif verb == PATTERN:
+ return pat.Array.homog(inner)
def sets(verb, typ, ctx):
"""
Handle a ``Set[type]`` or ``FrozenSet[type]``.
"""
- if verb not in (JSON2PY, PY2JSON, INSP_PY, INSP_JSON):
+ if verb not in (JSON2PY, PY2JSON, INSP_PY, INSP_JSON, PATTERN):
return
if not has_origin(typ, (set, frozenset), num_args=1):
return
(inner,) = typ.__args__
- con = list if verb in (PY2JSON, INSP_JSON) else get_origin(typ)
+ con = list if verb in (PY2JSON, INSP_JSON, PATTERN) else get_origin(typ)
inner = ctx.lookup(verb=verb, typ=inner)
if verb in (JSON2PY, PY2JSON):
return partial(convert_collection, inner=inner, con=con)
elif verb in (INSP_JSON, INSP_PY):
return partial(check_collection, inner=inner, con=con)
+ elif verb == PATTERN:
+ return pat.Array.homog(inner)
def _stringly(verb, typ, ctx):
@@ -266,16 +297,19 @@ def _stringly(verb, typ, ctx):
This is used internally by dicts.
"""
- if verb not in (JSON2PY, PY2JSON, INSP_PY, INSP_JSON) or not issub_safe(
- typ, (int, str, date, Enum)
- ):
- return
for base in str, int:
if issubclass(typ, base):
+ if verb == PATTERN and base == str:
+ return pat.String.any
if verb in (JSON2PY, PY2JSON):
return base
- elif verb in (INSP_JSON, INSP_PY):
+ elif verb == INSP_PY:
return partial(check_isinst, typ=base)
+ elif verb in (INSP_JSON, PATTERN):
+ inspect = partial(check_parse_error, parser=base, error=ValueError)
+ return pat.String(typ.__name__, inspect) if verb == PATTERN else inspect
+ if issubclass(typ, (datetime, time)):
+ return
for rule in enums, iso_dates:
action = rule(verb=verb, typ=typ, ctx=ctx)
if action is not None:
@@ -284,10 +318,8 @@ def _stringly(verb, typ, ctx):
def dicts(verb, typ, ctx):
"""
- Handle a ``Dict[key, value]`` where key is a string, integer or enum type.
+ Handle a ``Dict[key, value]`` where key is a string, integer, date or enum type.
"""
- if verb not in (JSON2PY, PY2JSON, INSP_PY, INSP_JSON):
- return
if not has_origin(typ, (dict, OrderedDict), num_args=2):
return
(key_type, val_type) = typ.__args__
@@ -299,3 +331,5 @@ def dicts(verb, typ, ctx):
return partial(convert_mapping, key=key_type, val=val_type, con=get_origin(typ))
elif verb in (INSP_JSON, INSP_PY):
return partial(check_mapping, key=key_type, val=val_type, con=get_origin(typ))
+ elif verb == PATTERN:
+ return pat.Object.homog(key_type, val_type)
diff --git a/json_syntax/unions.py b/json_syntax/unions.py
index 9034bca..2279e3c 100644
--- a/json_syntax/unions.py
+++ b/json_syntax/unions.py
@@ -1,5 +1,6 @@
-from .helpers import has_origin, JSON2PY, PY2JSON, INSP_JSON, INSP_PY
+from .helpers import has_origin, JSON2PY, PY2JSON, INSP_JSON, INSP_PY, PATTERN
from .action_v1 import convert_union, check_union
+from . import pattern as pat
from functools import partial
from typing import Union
@@ -30,15 +31,21 @@ def unions(verb, typ, ctx):
check_verb = INSP_JSON
else:
return
- steps = []
- for arg in typ.__args__:
- check = ctx.lookup(verb=check_verb, typ=arg)
- convert = ctx.lookup(verb=verb, typ=arg)
- steps.append((check, convert, "<{!s}>".format(arg)))
+ steps = [
+ (
+ ctx.lookup(verb=check_verb, typ=arg),
+ ctx.lookup(verb=verb, typ=arg),
+ "<{!s}>".format(arg),
+ )
+ for arg in typ.__args__
+ ]
return partial(convert_union, steps=steps, typename=repr(typ))
elif verb in (INSP_JSON, INSP_PY):
- steps = []
- for arg in typ.__args__:
- check = ctx.lookup(verb=verb, typ=arg)
- steps.append((check, "<{!s}>".format(arg)))
+ steps = [
+ (ctx.lookup(verb=verb, typ=arg), "<{!s}>".format(arg))
+ for arg in typ.__args__
+ ]
return partial(check_union, steps=steps)
+ elif verb == PATTERN:
+ alts = [ctx.lookup(verb=verb, typ=arg) for arg in typ.__args__]
+ return pat.Alternatives(alts)
diff --git a/poetry.lock b/poetry.lock
index 98ad268..a299a97 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -30,11 +30,11 @@ marker = "python_version >= \"3.7\""
name = "black"
optional = false
python-versions = ">=3.6"
-version = "18.9b0"
+version = "19.3b0"
[package.dependencies]
appdirs = "*"
-attrs = ">=17.4.0"
+attrs = ">=18.1.0"
click = ">=6.5"
toml = ">=0.9.4"
@@ -62,7 +62,18 @@ description = "Code coverage measurement for Python"
name = "coverage"
optional = false
python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*, <4"
-version = "4.5.2"
+version = "4.5.3"
+
+[[package]]
+category = "dev"
+description = "A library for property based testing"
+name = "hypothesis"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
+version = "4.14.2"
+
+[package.dependencies]
+attrs = ">=16.0.0"
[[package]]
category = "dev"
@@ -71,7 +82,7 @@ marker = "python_version > \"2.7\""
name = "more-itertools"
optional = false
python-versions = ">=3.4"
-version = "6.0.0"
+version = "7.0.0"
[[package]]
category = "dev"
@@ -91,7 +102,7 @@ description = "plugin and hook calling mechanisms for python"
name = "pluggy"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
-version = "0.8.1"
+version = "0.9.0"
[[package]]
category = "dev"
@@ -99,7 +110,7 @@ description = "library with cross-python path, ini-parsing, io, code, log facili
name = "py"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
-version = "1.7.0"
+version = "1.8.0"
[[package]]
category = "dev"
@@ -107,13 +118,13 @@ description = "pytest: simple powerful testing with Python"
name = "pytest"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
-version = "4.2.1"
+version = "4.4.0"
[package.dependencies]
atomicwrites = ">=1.0"
attrs = ">=17.4.0"
colorama = "*"
-pluggy = ">=0.7"
+pluggy = ">=0.9"
py = ">=1.5.0"
setuptools = "*"
six = ">=1.10.0"
@@ -168,22 +179,23 @@ python-versions = "*"
version = "0.10.0"
[metadata]
-content-hash = "4c0e31907eb58992b7d6ee19b1f5b30233cffbfd056d7208bc6f5278f2ea1b6b"
+content-hash = "30ade8462629ad243bf4029ae6c7d9fe784af2117aa85c903aadc3b6f6e2d4f9"
python-versions = "^3.5"
[metadata.hashes]
appdirs = ["9e5896d1372858f8dd3344faf4e5014d21849c756c8d5701f78f8a103b372d92", "d8b24664561d0d34ddfaec54636d502d7cea6e29c3eaf68f3df6180863e2166e"]
atomicwrites = ["03472c30eb2c5d1ba9227e4c2ca66ab8287fbfbbda3888aa93dc2e28fc6811b4", "75a9445bac02d8d058d5e1fe689654ba5a6556a1dfd8ce6ec55a0ed79866cfa6"]
attrs = ["10cbf6e27dbce8c30807caf056c8eb50917e0eaafe86347671b57254006c3e69", "ca4be454458f9dec299268d472aaa5a11f67a4ff70093396e1ceae9c76cf4bbb"]
-black = ["817243426042db1d36617910df579a54f1afd659adb96fc5032fcf4b36209739", "e030a9a28f542debc08acceb273f228ac422798e5215ba2a791a6ddeaaca22a5"]
+black = ["09a9dcb7c46ed496a9850b76e4e825d6049ecd38b611f1224857a79bd985a8cf", "68950ffd4d9169716bcb8719a56c07a2f4485354fec061cdd5910aa07369731c"]
click = ["2335065e6395b9e67ca716de5f7526736bfa6ceead690adf616d925bdc622b13", "5b94b49521f6456670fdb30cd82a4eca9412788a93fa6dd6df72c94d5a8ff2d7"]
colorama = ["05eed71e2e327246ad6b38c540c4a3117230b19679b875190486ddd2d721422d", "f8ac84de7840f5b9c4e3347b3c1eaa50f7e49c2b07596221daec5edaabbd7c48"]
-coverage = ["06123b58a1410873e22134ca2d88bd36680479fe354955b3579fb8ff150e4d27", "09e47c529ff77bf042ecfe858fb55c3e3eb97aac2c87f0349ab5a7efd6b3939f", "0a1f9b0eb3aa15c990c328535655847b3420231af299386cfe5efc98f9c250fe", "0cc941b37b8c2ececfed341444a456912e740ecf515d560de58b9a76562d966d", "0d34245f824cc3140150ab7848d08b7e2ba67ada959d77619c986f2062e1f0e8", "10e8af18d1315de936d67775d3a814cc81d0747a1a0312d84e27ae5610e313b0", "1b4276550b86caa60606bd3572b52769860a81a70754a54acc8ba789ce74d607", "1e8a2627c48266c7b813975335cfdea58c706fe36f607c97d9392e61502dc79d", "258b21c5cafb0c3768861a6df3ab0cfb4d8b495eee5ec660e16f928bf7385390", "2b224052bfd801beb7478b03e8a66f3f25ea56ea488922e98903914ac9ac930b", "3ad59c84c502cd134b0088ca9038d100e8fb5081bbd5ccca4863f3804d81f61d", "447c450a093766744ab53bf1e7063ec82866f27bcb4f4c907da25ad293bba7e3", "46101fc20c6f6568561cdd15a54018bb42980954b79aa46da8ae6f008066a30e", "4710dc676bb4b779c4361b54eb308bc84d64a2fa3d78e5f7228921eccce5d815", "510986f9a280cd05189b42eee2b69fecdf5bf9651d4cd315ea21d24a964a3c36", "5535dda5739257effef56e49a1c51c71f1d37a6e5607bb25a5eee507c59580d1", "5a7524042014642b39b1fcae85fb37556c200e64ec90824ae9ecf7b667ccfc14", "5f55028169ef85e1fa8e4b8b1b91c0b3b0fa3297c4fb22990d46ff01d22c2d6c", "6694d5573e7790a0e8d3d177d7a416ca5f5c150742ee703f3c18df76260de794", "6831e1ac20ac52634da606b658b0b2712d26984999c9d93f0c6e59fe62ca741b", "71afc1f5cd72ab97330126b566bbf4e8661aab7449f08895d21a5d08c6b051ff", "7349c27128334f787ae63ab49d90bf6d47c7288c63a0a5dfaa319d4b4541dd2c", "77f0d9fa5e10d03aa4528436e33423bfa3718b86c646615f04616294c935f840", "828ad813c7cdc2e71dcf141912c685bfe4b548c0e6d9540db6418b807c345ddd", "859714036274a75e6e57c7bab0c47a4602d2a8cfaaa33bbdb68c8359b2ed4f5c", "85a06c61598b14b015d4df233d249cd5abfa61084ef5b9f64a48e997fd829a82", "869ef4a19f6e4c6987e18b315721b8b971f7048e6eaea29c066854242b4e98d9", "8cb4febad0f0b26c6f62e1628f2053954ad2c555d67660f28dfb1b0496711952", "977e2d9a646773cc7428cdd9a34b069d6ee254fadfb4d09b3f430e95472f3cf3", "99bd767c49c775b79fdcd2eabff405f1063d9d959039c0bdd720527a7738748a", "a5c58664b23b248b16b96253880b2868fb34358911400a7ba39d7f6399935389", "aaa0f296e503cda4bc07566f592cd7a28779d433f3a23c48082af425d6d5a78f", "ab235d9fe64833f12d1334d29b558aacedfbca2356dfb9691f2d0d38a8a7bfb4", "b3b0c8f660fae65eac74fbf003f3103769b90012ae7a460863010539bb7a80da", "bab8e6d510d2ea0f1d14f12642e3f35cefa47a9b2e4c7cea1852b52bc9c49647", "c45297bbdbc8bb79b02cf41417d63352b70bcb76f1bbb1ee7d47b3e89e42f95d", "d19bca47c8a01b92640c614a9147b081a1974f69168ecd494687c827109e8f42", "d64b4340a0c488a9e79b66ec9f9d77d02b99b772c8b8afd46c1294c1d39ca478", "da969da069a82bbb5300b59161d8d7c8d423bc4ccd3b410a9b4d8932aeefc14b", "ed02c7539705696ecb7dc9d476d861f3904a8d2b7e894bd418994920935d36bb", "ee5b8abc35b549012e03a7b1e86c09491457dba6c94112a2482b18589cc2bdb9"]
-more-itertools = ["0125e8f60e9e031347105eb1682cef932f5e97d7b9a1a28d9bf00c22a5daef40", "590044e3942351a1bdb1de960b739ff4ce277960f2425ad4509446dbace8d9d1"]
+coverage = ["0c5fe441b9cfdab64719f24e9684502a59432df7570521563d7b1aff27ac755f", "2b412abc4c7d6e019ce7c27cbc229783035eef6d5401695dccba80f481be4eb3", "3684fabf6b87a369017756b551cef29e505cb155ddb892a7a29277b978da88b9", "39e088da9b284f1bd17c750ac672103779f7954ce6125fd4382134ac8d152d74", "3c205bc11cc4fcc57b761c2da73b9b72a59f8d5ca89979afb0c1c6f9e53c7390", "42692db854d13c6c5e9541b6ffe0fe921fe16c9c446358d642ccae1462582d3b", "465ce53a8c0f3a7950dfb836438442f833cf6663d407f37d8c52fe7b6e56d7e8", "48020e343fc40f72a442c8a1334284620f81295256a6b6ca6d8aa1350c763bbe", "4ec30ade438d1711562f3786bea33a9da6107414aed60a5daa974d50a8c2c351", "5296fc86ab612ec12394565c500b412a43b328b3907c0d14358950d06fd83baf", "5f61bed2f7d9b6a9ab935150a6b23d7f84b8055524e7be7715b6513f3328138e", "6899797ac384b239ce1926f3cb86ffc19996f6fa3a1efbb23cb49e0c12d8c18c", "68a43a9f9f83693ce0414d17e019daee7ab3f7113a70c79a3dd4c2f704e4d741", "6b8033d47fe22506856fe450470ccb1d8ba1ffb8463494a15cfc96392a288c09", "7ad7536066b28863e5835e8cfeaa794b7fe352d99a8cded9f43d1161be8e9fbd", "7bacb89ccf4bedb30b277e96e4cc68cd1369ca6841bde7b005191b54d3dd1034", "839dc7c36501254e14331bcb98b27002aa415e4af7ea039d9009409b9d2d5420", "8e679d1bde5e2de4a909efb071f14b472a678b788904440779d2c449c0355b27", "8f9a95b66969cdea53ec992ecea5406c5bd99c9221f539bca1e8406b200ae98c", "932c03d2d565f75961ba1d3cec41ddde00e162c5b46d03f7423edcb807734eab", "93f965415cc51604f571e491f280cff0f5be35895b4eb5e55b47ae90c02a497b", "988529edadc49039d205e0aa6ce049c5ccda4acb2d6c3c5c550c17e8c02c05ba", "998d7e73548fe395eeb294495a04d38942edb66d1fa61eb70418871bc621227e", "9de60893fb447d1e797f6bf08fdf0dbcda0c1e34c1b06c92bd3a363c0ea8c609", "9e80d45d0c7fcee54e22771db7f1b0b126fb4a6c0a2e5afa72f66827207ff2f2", "a545a3dfe5082dc8e8c3eb7f8a2cf4f2870902ff1860bd99b6198cfd1f9d1f49", "a5d8f29e5ec661143621a8f4de51adfb300d7a476224156a39a392254f70687b", "a9abc8c480e103dc05d9b332c6cc9fb1586330356fc14f1aa9c0ca5745097d19", "aca06bfba4759bbdb09bf52ebb15ae20268ee1f6747417837926fae990ebc41d", "bb23b7a6fd666e551a3094ab896a57809e010059540ad20acbeec03a154224ce", "bfd1d0ae7e292105f29d7deaa9d8f2916ed8553ab9d5f39ec65bcf5deadff3f9", "c22ab9f96cbaff05c6a84e20ec856383d27eae09e511d3e6ac4479489195861d", "c62ca0a38958f541a73cf86acdab020c2091631c137bd359c4f5bddde7b75fd4", "c709d8bda72cf4cd348ccec2a4881f2c5848fd72903c185f363d361b2737f773", "c968a6aa7e0b56ecbd28531ddf439c2ec103610d3e2bf3b75b813304f8cb7723", "ca58eba39c68010d7e87a823f22a081b5290e3e3c64714aac3c91481d8b34d22", "df785d8cb80539d0b55fd47183264b7002077859028dfe3070cf6359bf8b2d9c", "f406628ca51e0ae90ae76ea8398677a921b36f0bd71aab2099dfed08abd0322f", "f46087bbd95ebae244a0eda01a618aff11ec7a069b15a3ef8f6b520db523dcf1", "f8019c5279eb32360ca03e9fac40a12667715546eed5c5eb59eb381f2f501260", "fc5f4d209733750afd2714e9109816a29500718b32dd9a5db01c0cb3a019b96a"]
+hypothesis = ["232f5e19de11e5da6ec78091a4c6cd8fe7b9454a15ebf9df38284ebc2435db78", "c3fd7aa8d22f4179089a42fd5f759b439de0a9dd02a228935d3c85e22780cdf2", "d23b44e711fcef554eda08328b88c7bd4143d4d0028c74118160643248916094"]
+more-itertools = ["2112d2ca570bb7c3e53ea1a35cd5df42bb0fd10c45f0fb97178679c3c03d64c7", "c3e4748ba1aad8dba30a4886b0b1a2004f9a863837b8654e7059eebf727afa5a"]
pathlib2 = ["25199318e8cc3c25dcb45cbe084cc061051336d5a9ea2a12448d3d8cb748f742", "5887121d7f7df3603bca2f710e7219f3eca0eb69e0b7cc6e0a022e155ac931a7"]
-pluggy = ["8ddc32f03971bfdf900a81961a48ccf2fb677cf7715108f85295c67405798616", "980710797ff6a041e9a73a5787804f848996ecaa6f8a1b1e08224a5894f2074a"]
-py = ["bf92637198836372b520efcba9e020c330123be8ce527e535d185ed4b6f45694", "e76826342cefe3c3d5f7e8ee4316b80d1dd8a300781612ddbc765c17ba25a6c6"]
-pytest = ["80cfd9c8b9e93f419abcc0400e9f595974a98e44b6863a77d3e1039961bfc9c4", "c2396a15726218a2dfef480861c4ba37bd3952ebaaa5b0fede3fc23fddcd7f8c"]
+pluggy = ["19ecf9ce9db2fce065a7a0586e07cfb4ac8614fe96edf628a264b1c70116cf8f", "84d306a647cc805219916e62aab89caa97a33a1dd8c342e87a37f91073cd4746"]
+py = ["64f65755aee5b381cea27766a3a147c3f15b9b6b9ac88676de66ba2ae36793fa", "dc639b046a6e2cff5bbe40194ad65936d6ba360b52b3c3fe1d08a82dd50b5e53"]
+pytest = ["13c5e9fb5ec5179995e9357111ab089af350d788cbc944c628f3cde72285809b", "f21d2f1fb8200830dcbb5d8ec466a9c9120e20d8b53c7585d180125cce1d297a"]
pytest-cov = ["0ab664b25c6aa9716cbf203b17ddb301932383046082c081b9848a0edf5add33", "230ef817450ab0699c6cc3c9c8f7a829c34674456f2ed8df1fe1d39780f7c87f"]
python-dateutil = ["7e6584c74aeed623791615e26efd690f29817a27c73085b78e4bad02493df2fb", "c89805f6f4d64db21ed966fda138f8a5ed7a4fdbc1a8ee329ce1b74e3c74da9e"]
six = ["3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c", "d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73"]
diff --git a/pyproject.toml b/pyproject.toml
index e5c6f55..27ff470 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,13 +1,13 @@
[tool.poetry]
name = "json-syntax"
-version = "0.2.0"
+version = "0.2.1"
description = "Generates functions to convert Python classes to JSON dumpable objects."
authors = ["Ben Samuel "]
license = "MIT"
readme = "README.md"
repository = "https://github.com/UnitedIncome/json-syntax"
classifiers = [
- "Development Status :: 3 - Alpha",
+ "Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"Topic :: Software Development :: Libraries"
]
@@ -20,11 +20,12 @@ python-dateutil = {version="^2.7", python = "<3.7"}
pytest = "^4.1"
attrs = "^18.2"
pytest-cov = "^2.6"
-black = {version = "^18.3-alpha.0",allows-prereleases = true,python = ">=3.7"}
+black = {version = "^19.3-beta.0",allows-prereleases = true,python = ">=3.7"}
+hypothesis = "^4.14"
[tool.black]
line-length = 88
-py36 = true
+target-version = ["py35"]
[tool.tox]
legacy_tox_ini = """
@@ -36,8 +37,10 @@ skipsdist = true
[testenv]
deps =
poetry
+whitelist_externals =
+ /bin/rm
commands =
- python -V
+ /bin/rm -r pip-wheel-metadata
poetry install
poetry run pytest {posargs}
"""
diff --git a/tests/__init__.py b/tests/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/tests/_strategies.py b/tests/_strategies.py
new file mode 100644
index 0000000..743dc4e
--- /dev/null
+++ b/tests/_strategies.py
@@ -0,0 +1,151 @@
+"""
+Some miscellany to keep the type_strategies module a bit more readable.
+"""
+from hypothesis import strategies as st
+
+import attr
+
+try:
+ import dataclasses as dc
+except ImportError:
+ dc = None
+from datetime import date
+from enum import IntEnum
+from keyword import iskeyword
+import os
+import typing
+
+
+MAX_FIELDS = 8
+_max_cp = None if os.environ.get("UNICODE_NAMES") else 0x7F
+_any_char = st.characters(min_codepoint=1, max_codepoint=_max_cp)
+_ident_start = st.characters(
+ whitelist_categories=["Lu", "Ll", "Lt", "Lm", "Lo", "Nl"], max_codepoint=_max_cp
+)
+_ident_tail = st.characters(
+ whitelist_categories=["Lu", "Ll", "Lt", "Lm", "Lo", "Nl", "Mn", "Mc", "Nd", "Pc"],
+ whitelist_characters="_",
+ max_codepoint=_max_cp,
+)
+
+
+@st.composite
+def _idents(draw, lengths=st.integers(min_value=0, max_value=80)):
+ chars = [draw(_ident_start)]
+ chars.extend(draw(_ident_tail) for _ in range(draw(lengths)))
+ chars = "".join(chars)
+ if iskeyword(chars):
+ chars += draw(_ident_tail)
+ return chars
+
+
+def _make_enum(name, elems):
+ # print(f'IntEnum(enum_{name}, {elems!r})')
+ return IntEnum("enum_" + name, elems)
+
+
+idents = _idents()
+enums = st.builds(
+ _make_enum, idents, st.lists(idents, min_size=1, max_size=MAX_FIELDS, unique=True)
+)
+
+
+def fields_idents(types):
+ return st.dictionaries(
+ idents, types, dict_class=list, min_size=0, max_size=MAX_FIELDS
+ )
+
+
+class _Faux(attr.validators._InstanceOfValidator):
+ def __call__(self, inst, attr, value):
+ pass
+
+
+def attrs(types, frozen):
+ def _make(name, fields, **kw):
+ def _attrib(typ):
+ # Add a bogus validator because from_type reads that, not `type`
+ # Can't use the real one because of generic types!
+ return attr.ib(type=typ, validator=_Faux(typ))
+
+ # print(f'attrs({name}, {fields}, **{kw})')
+ return attr.make_class(
+ "attrs_" + name,
+ {field: _attrib(typ) for field, typ in fields},
+ frozen=frozen,
+ **kw,
+ )
+
+ return st.builds(_make, idents, fields_idents(types), slots=st.booleans())
+
+
+if dc is not None:
+
+ def dataclasses(types, frozen):
+ def _make(name, fields, order):
+ # print(f'dataclass({name}, {fields}, frozen={frozen}, order={order}')
+ return dc.make_dataclass(
+ "dc_" + name, fields, frozen=frozen, eq=True, order=order
+ )
+
+ return st.builds(_make, idents, fields_idents(types), order=st.booleans())
+
+
+else:
+
+ def dataclasses(types, frozen):
+ return None
+
+
+try:
+ _NamedTuple = typing.NamedTuple
+except AttributeError:
+
+ def namedtuples(types):
+ return None
+
+
+else:
+
+ def namedtuples(types):
+ def _make(name, fields):
+ # print(f'namedtuple({name}, {fields})')
+ return _NamedTuple("nt_" + name, fields)
+
+ return st.builds(_make, idents, fields_idents(types))
+
+
+def lists(types):
+ return st.builds(lambda a: typing.List[a], types)
+
+
+def hmg_tuples(types):
+ return st.builds(lambda a: typing.Tuple[a, ...], types)
+
+
+def sets(types):
+ return st.builds(lambda a: typing.Set[a], types)
+
+
+def frozensets(types):
+ return st.builds(lambda a: typing.FrozenSet[a], types)
+
+
+_dict_keys = atoms = st.one_of([st.sampled_from([int, str, date]), enums])
+
+
+def dicts(val_types):
+ return st.builds(lambda k, v: typing.Dict[k, v], _dict_keys, val_types)
+
+
+def prod_tuples(types):
+ return st.builds(
+ lambda a: typing.Tuple[tuple(a)],
+ st.lists(types, min_size=1, max_size=MAX_FIELDS),
+ )
+
+
+def unions(types, max_size=None):
+ return st.builds(
+ lambda a: typing.Union[tuple(a)], st.lists(types, min_size=1, max_size=max_size)
+ )
diff --git a/tests/test_union.py b/tests/test_union.py
index 5f1ae9e..ff72b09 100644
--- a/tests/test_union.py
+++ b/tests/test_union.py
@@ -22,27 +22,41 @@ class Dir(Enum):
DOWN = 2
-cases = [
+atoms = [
(NoneType, None, None),
(bool, True, True),
+]
+
+nums = [
(int, 5, 5),
(float, 3.3, 3.3),
(Decimal, Decimal("5.5"), Decimal("5.5")),
+]
+
+strings = [
(str, "str", "str"),
(date, date(2010, 10, 10), "2010-10-10"),
(datetime, datetime(2011, 11, 11, 11, 11, 11), "2011-11-11T11:11:11"),
- (Point, Point(x=4.5, y=6.6), {"x": 4.5, "y": 6.6}),
- (Dir, Dir.UP, "UP"),
+ (Dir, Dir.UP, "UP")
+]
+
+arrays = [
(List[Point], [Point(x=4.5, y=6.6)], [{"x": 4.5, "y": 6.6}]),
(Tuple[Point, ...], (Point(x=4.5, y=6.6),), [{"x": 4.5, "y": 6.6}]),
(Set[Point], {Point(x=4.5, y=6.6)}, [{"x": 4.5, "y": 6.6}]),
(FrozenSet[Point], frozenset([Point(x=4.5, y=6.6)]), [{"x": 4.5, "y": 6.6}]),
+]
+
+dicts = [
+ (Point, Point(x=4.5, y=6.6), {"x": 4.5, "y": 6.6}),
(Dict[Dir, Decimal], {Dir.UP: Decimal("7.7")}, {"UP": Decimal("7.7")}),
(Dict[str, float], {"a": 2.3, "b": 3.4}, {"a": 2.3, "b": 3.4}),
]
+cats = [atoms, nums, strings, arrays, dicts]
-@pytest.mark.parametrize("typ,py,js", cases)
+
+@pytest.mark.parametrize("typ,py,js", [trip for cat in cats for trip in cat])
def test_simple(typ, py, js):
rs = std_ruleset()
act = rs.lookup(verb=PY2JSON, typ=typ)
@@ -51,36 +65,18 @@ def test_simple(typ, py, js):
assert act(js) == py
-def ambiguous(left, right):
- if left == str and right in {Dir, date, datetime}:
- return "str prevents {} matching".format(right)
- if left == date and right == datetime:
- return "supertype date prevents subtype datetime matching"
- if left == datetime and right == date:
- return "dates in iso format are valid datetimes"
- if left == Dict[str, float] and right == Point:
- # Note that this is the case where the attrs class has homogenous fields
- return "dict prevents attrs class matching"
- ambiguous = {List[Point], Tuple[Point, ...], Set[Point], FrozenSet[Point]}
- if left in ambiguous and right in ambiguous:
- return "collections are all represented as json arrays"
- return
+def _pairs():
+ for i in range(0, len(cats)):
+ lefts = cats[i]
+ rights = cats[(i + 2) % len(cats)]
+ yield from product(lefts, rights)
def cvt_map():
- for left, right in product(cases, cases):
- if left is right:
- continue
+ for left, right in _pairs():
left_type, left_python, left_json = left
right_type, right_python, right_json = right
- if (
- left_json == right_json
- or left_python == right_python
- or ambiguous(left_type, right_type)
- ):
- continue
-
typ = Union[left_type, right_type]
yield (PY2JSON, typ, left_python, left_json)
yield (PY2JSON, typ, right_python, right_json)
@@ -98,19 +94,10 @@ def test_convert_unions(verb, typ, subj, expect):
def check_map():
- for left, right in product(cases, cases):
- if left is right:
- continue
+ for left, right in _pairs():
left_type, left_python, left_json = left
right_type, right_python, right_json = right
- if (
- left_json == right_json
- or left_python == right_python
- or ambiguous(left_type, right_type)
- ):
- continue
-
typ = Union[left_type, right_type]
yield (INSP_PY, typ, left_python)
yield (INSP_PY, typ, right_python)
diff --git a/tests/test_union_prop.py b/tests/test_union_prop.py
new file mode 100644
index 0000000..ad99fb4
--- /dev/null
+++ b/tests/test_union_prop.py
@@ -0,0 +1,53 @@
+import pytest
+from hypothesis import given, settings, HealthCheck, reproduce_failure
+
+from . import type_strategies as ts
+
+import attr
+from datetime import date, datetime
+from decimal import Decimal
+from enum import Enum
+from itertools import product
+from typing import Union, List, Tuple, Set, FrozenSet, Dict
+
+from json_syntax import std_ruleset
+from json_syntax.helpers import PY2JSON, JSON2PY, INSP_PY, INSP_JSON, NoneType
+from json_syntax.pattern import Matches
+
+
+@settings(suppress_health_check=[HealthCheck.too_slow], max_examples=100, deadline=None)
+@given(ts.type_value_pairs(ts.complex_no_unions))
+def test_roundtrip(pair):
+ typ, py_value = pair
+ rs = std_ruleset()
+ act = rs.lookup(verb=PY2JSON, typ=typ)
+ json_value = act(py_value)
+ act2 = rs.lookup(verb=JSON2PY, typ=typ)
+ rt_py_value = act2(json_value)
+ assert py_value == rt_py_value
+
+
+@settings(suppress_health_check=[HealthCheck.too_slow], max_examples=100, deadline=None)
+@given(ts.type_value_pairs(ts.unions_of_simple))
+def test_roundtrip_union_simple(pair):
+ typ, py_value = pair
+ rs = std_ruleset()
+ act = rs.lookup(verb=PY2JSON, typ=typ)
+ json_value = act(py_value)
+ act2 = rs.lookup(verb=JSON2PY, typ=typ)
+ rt_py_value = act2(json_value)
+ if not rs.is_ambiguous(typ=typ, threshold=Matches.sometimes):
+ assert py_value == rt_py_value
+
+
+@settings(suppress_health_check=[HealthCheck.too_slow], max_examples=100, deadline=None)
+@given(ts.type_value_pairs(ts.complex_anything))
+def test_roundtrip_arbitrary_complex(pair):
+ typ, py_value = pair
+ rs = std_ruleset()
+ act = rs.lookup(verb=PY2JSON, typ=typ)
+ json_value = act(py_value)
+ act2 = rs.lookup(verb=JSON2PY, typ=typ)
+ rt_py_value = act2(json_value)
+ if not rs.is_ambiguous(typ=typ, threshold=Matches.sometimes):
+ assert py_value == rt_py_value
diff --git a/tests/type_strategies.py b/tests/type_strategies.py
new file mode 100644
index 0000000..84dcdf8
--- /dev/null
+++ b/tests/type_strategies.py
@@ -0,0 +1,145 @@
+from hypothesis import strategies as st
+
+from decimal import Decimal
+import datetime as dt
+from enum import Enum
+
+from . import _strategies as _st
+
+
+# Tests often want to compare for equality, and there's no good way to do this with NaNs breaking it. :-(
+st.register_type_strategy(Decimal, st.decimals(allow_nan=False))
+st.register_type_strategy(float, st.floats(allow_nan=False))
+
+
+def type_value_pairs(base):
+ @st.composite
+ def tv_pairs(draw):
+ typ = draw(base)
+ try:
+ val = draw(st.from_type(typ))
+ except Exception as exc:
+ exc.args += (typ,)
+ raise
+ return (typ, val)
+
+ return tv_pairs()
+
+
+atoms = st.sampled_from(
+ [
+ type(None),
+ bool,
+ int,
+ float,
+ Decimal,
+ str,
+ dt.date,
+ dt.datetime,
+ dt.time,
+ dt.timedelta,
+ ]
+)
+
+
+class Head(Enum):
+ def __init__(self, disposition):
+ self.disposition = disposition
+ self.atomic = disposition == "atom"
+ self.hashable = disposition in ("atom", "immut")
+ self.is_union = disposition == "union"
+
+ atoms = "atom"
+ enums = "atom"
+ lists = "mut"
+ sets = "mut"
+ dicts = "mut"
+ mut_attrs = "mut"
+ mut_dataclasses = "mut"
+ hmg_tuples = "immut"
+ frozensets = "immut"
+ prod_tuples = "immut"
+ frz_attrs = "immut"
+ frz_dataclasses = "immut"
+ namedtuples = "immut"
+ unions = "union"
+
+ @classmethod
+ def short(cls, elems):
+ if isinstance(elems, (cls, str)):
+ elems = [elems]
+ out = set()
+ for elem in elems:
+ if isinstance(elem, cls):
+ out.add(elem)
+ elif isinstance(elem, str):
+ out.update(head for head in cls if head.disposition == elem)
+ return out
+
+
+# Need to add:
+# 1. default values to all of these
+# 2. typeless variants
+# 3. our own subclasses?
+
+
+def map_heads(types, frz_types):
+ H = Head
+ yield H.atoms, atoms
+ yield H.enums, _st.enums
+ if types:
+ yield H.lists, _st.lists(types)
+ yield H.unions, _st.unions(types)
+ yield H.mut_attrs, _st.attrs(types, frozen=False)
+ yield H.mut_dataclasses, _st.dataclasses(types, frozen=False)
+ yield H.dicts, _st.dicts(types)
+ if frz_types:
+ yield H.hmg_tuples, _st.hmg_tuples(frz_types)
+ yield H.sets, _st.sets(frz_types)
+ yield H.frozensets, _st.frozensets(frz_types)
+ yield H.prod_tuples, _st.prod_tuples(frz_types)
+ yield H.frz_attrs, _st.attrs(frz_types, frozen=True)
+ yield H.frz_dataclasses, _st.dataclasses(frz_types, frozen=True)
+ yield H.namedtuples, _st.namedtuples(frz_types)
+
+
+def type_tree(*levels):
+ """
+ Constructs a type tree of a fixed maximum height based on the heads provided.
+ The last level must be leaves that can be contained by the levels above.
+ """
+ types, frz_types = None, None
+
+ for level in map(Head.short, reversed(levels)):
+ tt = []
+ frz_tt = []
+ for head, typ in map_heads(types, frz_types):
+ if typ is None:
+ continue
+ if head in level:
+ tt.append(typ)
+ if head.hashable:
+ frz_tt.append(typ)
+ types = st.one_of(tt) if tt else None
+ frz_types = st.one_of(frz_tt) if frz_tt else None
+
+ if types is None:
+ raise ValueError("No types for {}".format(levels))
+ return types
+
+
+complex_no_unions = type_tree(
+ {"atom", "mut", "immut"},
+ {"atom", "mut", "immut"},
+ {"atom", "mut", "immut"},
+ {"atom"},
+)
+
+unions_of_simple = type_tree({Head.unions}, {"atom", "mut", "immut"}, {"atom"})
+
+complex_anything = type_tree(
+ {"atom", "mut", "immut", "unions"},
+ {"atom", "mut", "immut", "unions"},
+ {"atom", "mut", "immut", "unions"},
+ {"atom"},
+)