diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 4dd5c5d..ca67453 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -2,6 +2,12 @@ CHANGELOG ========= +3.9.1.0 +=============== + +* Python 3.9 support + + 0.27.2 =============== diff --git a/README.rst b/README.rst index a9e5ba6..1b23a05 100644 --- a/README.rst +++ b/README.rst @@ -1,7 +1,7 @@ .. _badges: .. image:: https://github.com/avanov/typeit/workflows/CI/badge.svg?branch=develop - :target: https://github.com/avanov/typeit/actions?query=workflow%3A%22CI%22 + :target: https://github.com/avanov/typeit/actions?query=branch%3Adevelop .. image:: https://coveralls.io/repos/github/avanov/typeit/badge.svg?branch=develop :target: https://coveralls.io/github/avanov/typeit?branch=develop @@ -23,7 +23,8 @@ Typeit ------ **typeit** infers Python types from a sample JSON/YAML data, and provides you with the tools -for serialising and parsing it. It works superb on Python 3.7 and above. +for serialising and parsing it. It also provides you with smart constructors for arbitrarily nested data structures. +The library works superb on Python 3.7 and above. Start using it by generating types for your JSON payloads: diff --git a/docs/conf.py b/docs/conf.py index db966a9..73f7a5c 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -51,9 +51,9 @@ # built documents. # # The short X.Y version. -version = '0.27' +version = '3.9' # The full version, including alpha/beta/rc tags. -release = '0.27.2' +release = '3.9.1.0' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. diff --git a/setup.py b/setup.py index 44a98c0..e6e0b2f 100644 --- a/setup.py +++ b/setup.py @@ -38,7 +38,7 @@ def requirements(at_path: Path): # ---------------------------- setup(name='typeit', - version='0.27.2', + version='3.9.1.0', description='typeit brings typed data into your project', long_description=README, classifiers=[ diff --git a/tests/parser/test_codegen.py b/tests/parser/test_codegen.py index 749faa6..3123428 100644 --- a/tests/parser/test_codegen.py +++ b/tests/parser/test_codegen.py @@ -28,7 +28,6 @@ def test_typeit(data): cg.typeit(data) - def test_parse_structure_with_sequences(): data = """{"x": [[{"y": "z"}]]}""" github_pr_dict = json.loads(data) diff --git a/typeit/codegen/__init__.py b/typeit/codegen/__init__.py index 7951a40..d179ab5 100644 --- a/typeit/codegen/__init__.py +++ b/typeit/codegen/__init__.py @@ -219,6 +219,9 @@ def codegen_py(typeit_schema: TypeitSchema, return NEW_LINE.join(generated_definitions), overrides_source +SEQUENCE_ORIGINS = {insp.get_origin(List[Any]), insp.get_origin(Sequence[Any])} + + def literal_for_type(typ: Type[iface.IType]) -> str: # typ is either one of these: # * builtin type @@ -227,7 +230,7 @@ def literal_for_type(typ: Type[iface.IType]) -> str: try: return BUILTIN_LITERALS_FOR_TYPES[typ](typ) except KeyError: - if typ.__class__ in {List.__class__, Sequence.__class__}: # type: ignore + if insp.get_origin(typ) in SEQUENCE_ORIGINS: # type: ignore sub_type = literal_for_type(typ.__args__[0]) # TODO: Sequence/List/PVector flag-based return f'Sequence[{sub_type}]' diff --git a/typeit/parser/__init__.py b/typeit/parser/__init__.py index ce5c92b..6cd5848 100644 --- a/typeit/parser/__init__.py +++ b/typeit/parser/__init__.py @@ -120,7 +120,7 @@ def _maybe_node_for_union( case of Optional[Any], which is in essence Union[None, T] where T is either unknown Any or a concrete type. """ - if typ in supported_type or insp.get_origin(typ) in supported_origin: + if typ in supported_type or get_origin_39(typ) in supported_origin: variants = inner_type_boundaries(typ) if variants in ((NoneType, Any), (Any, NoneType)): # Case for Optional[Any] and Union[None, Any] notations @@ -211,7 +211,7 @@ def _maybe_node_for_literal( types: https://mypy.readthedocs.io/en/latest/literal_types.html """ if typ in supported_type \ - or insp.get_origin(typ) in supported_origin: + or get_origin_39(typ) in supported_origin: inner = inner_type_boundaries(typ) for x in inner: if type(x) not in _supported_literal_types: @@ -238,12 +238,16 @@ def _maybe_node_for_list( }) ) -> Tuple[Optional[schema.nodes.SequenceSchema], MemoType]: # typ is List[T] where T is either unknown Any or a concrete type - if typ in supported_type or insp.get_origin(typ) in supported_origin: + if typ in supported_type or get_origin_39(typ) in supported_origin: try: inner = inner_type_boundaries(typ)[0] except IndexError: - inner = Any - if pyt.PVector in (typ, insp.get_origin(typ)): + # Python 3.9 access to arguments + try: + inner = typ.__args__[0] + except (AttributeError, IndexError): + inner = Any + if pyt.PVector in (typ, get_origin_39(typ)): seq_type = schema.nodes.PVectorSchema else: seq_type = schema.nodes.SequenceSchema @@ -252,6 +256,13 @@ def _maybe_node_for_list( return None, memo +def get_origin_39(typ): + """python3.9 aware origin""" + origin = insp.get_origin(typ) + if origin is None: + origin = typ.__origin__ if hasattr(typ, '__origin__') else None + return origin + def _maybe_node_for_set( typ: Type[iface.IType], overrides: OverridesT, @@ -272,7 +283,7 @@ def _maybe_node_for_set( frozenset, }) ) -> Optional[schema.nodes.SequenceSchema]: - origin = insp.get_origin(typ) + origin = get_origin_39(typ) if typ in supported_type or origin in supported_origin: try: inner = inner_type_boundaries(typ)[0] @@ -301,7 +312,7 @@ def _maybe_node_for_tuple( tuple, Tuple, }) ) -> Tuple[Optional[schema.nodes.TupleSchema], MemoType]: - if typ in supported_type or insp.get_origin(typ) in supported_origin: + if typ in supported_type or get_origin_39(typ) in supported_origin: inner_types = inner_type_boundaries(typ) if Ellipsis in inner_types: raise TypeError( @@ -321,11 +332,30 @@ def _maybe_node_for_tuple( return None, memo +def are_generic_bases_match(bases, template): + for base in bases: + if base in template: + return True + return False + + +def is_pmap(typ): + """python3.9 compatible pmap checker""" + return pyt.PMap in (typ, get_origin_39(typ)) \ + or (hasattr(typ, '__name__') and typ.__name__ == "PMap" and typ.__module__.startswith("pyrsistent.")) + +def is_39_deprecated_dict(typ): + """python3.9 deprecated Dict in favor of dict, and now it lacks necessary metadata other than name and module if + there is no other constraints on key and value types, e.g. Dict[Key, Val] can be recognised, however just Dict cannot be. + """ + return get_origin_39(typ) is None and hasattr(typ, '_name') and typ._name == 'Dict' and typ.__module__ == 'typing' + def _maybe_node_for_dict( typ: Type[iface.IType], overrides: OverridesT, memo: MemoType, supported_type=frozenset({ + dict, collections.abc.Mapping, pyt.PMap, }), @@ -342,17 +372,27 @@ def _maybe_node_for_dict( (for instance, python logging settings that have an infinite set of possible attributes). """ - if typ in supported_type or insp.get_origin(typ) in supported_origin: - if pyt.PMap in (typ, insp.get_origin(typ)): - schema_node_type = schema.nodes.PMapSchema + # This is a hack for Python 3.9 + if insp.is_generic_type(typ): + generic_bases = [get_origin_39(x) for x in insp.get_generic_bases(typ)] + else: + generic_bases = [] + + typ = dict if is_39_deprecated_dict(typ) else typ + + if typ in supported_type or get_origin_39(typ) in supported_origin or are_generic_bases_match(generic_bases, supported_origin): + schema_node_type = schema.nodes.PMapSchema if is_pmap(typ) else schema.nodes.SchemaNode + + if generic_bases: + # python 3.9 args + key_type, value_type = typ.__args__ else: - schema_node_type = schema.nodes.SchemaNode + try: + key_type, value_type = insp.get_args(typ) + except ValueError: + # Mapping doesn't provide key/value types + key_type, value_type = Any, Any - try: - key_type, value_type = insp.get_args(typ) - except ValueError: - # Mapping doesn't provide key/value types - key_type, value_type = Any, Any key_node, memo = decide_node_type(key_type, overrides, memo) value_node, memo = decide_node_type(value_type, overrides, memo) mapping_type = schema.types.TypedMapping(key_node=key_node, value_node=value_node) @@ -373,13 +413,14 @@ def _maybe_node_for_user_type( ) -> Tuple[Optional[schema.nodes.SchemaNode], MemoType]: """ Generates a Colander schema for the given user-defined `typ` that is capable of both constructing (deserializing) and serializing the `typ`. + This includes named tuples and dataclasses. """ global_name_overrider = get_global_name_overrider(overrides) is_generic = insp.is_generic_type(typ) if is_generic: # get the base class that was turned into Generic[T, ...] - hints_source = insp.get_origin(typ) + hints_source = get_origin_39(typ) # now we need to map generic type variables to the bound class types, # e.g. we map Generic[T,U,V, ...] to actual types of MyClass[int, float, str, ...] generic_repr = insp.get_generic_bases(hints_source)