From d3c02a45b52bb7d955e81d09906dfcb522fdc223 Mon Sep 17 00:00:00 2001 From: Samuel Colvin Date: Sun, 26 Mar 2023 22:31:36 +0100 Subject: [PATCH] Refector docs (#5270) * moving examples into docs * examples passing! * fix for older python * fix a few more tests * fix docs dependencies * xfail some discriminated union tests * more skipping * make tests run * switch to mkdocs-simple-hooks for build * using pyupgrade on examples * fix devtools * no mypy on plugins * run isort on examples * fix devtools newline * uprev deps * fix test_docs, add docs --verbose build * fix more build errors * fixing ci (i hope) * another test-requires --- .github/workflows/ci.yml | 23 +- .gitignore | 7 +- .mypy-configs/fast.toml | 6 +- .mypy-configs/full.toml | 4 + Makefile | 13 +- docs/.benchmarks_table.md | 13 - docs/blog/pydantic-v2.md | 26 +- docs/build/exec_examples.py | 428 ----- docs/build/main.py | 31 - docs/build/schema_mapping.py | 565 ------- docs/changelog.md | 1 - docs/datamodel_code_generator.md | 24 +- docs/examples/.editorconfig | 6 - docs/examples/annotated_types_named_tuple.py | 20 - docs/examples/annotated_types_typed_dict.py | 45 - .../dataclasses_arbitrary_types_allowed.py | 42 - docs/examples/dataclasses_config.py | 26 - docs/examples/dataclasses_default_schema.py | 21 - docs/examples/dataclasses_initvars.py | 23 - docs/examples/dataclasses_json_dumps.py | 17 - docs/examples/dataclasses_main.py | 13 - docs/examples/dataclasses_nested.py | 16 - .../dataclasses_post_init_post_parse.py | 22 - .../dataclasses_stdlib_inheritance.py | 27 - .../dataclasses_stdlib_run_validation.py | 30 - .../dataclasses_stdlib_to_pydantic.py | 47 - .../dataclasses_stdlib_with_basemodel.py | 39 - docs/examples/devtools_main.py | 33 - docs/examples/exporting_models_copy.py | 22 - docs/examples/exporting_models_dict.py | 19 - docs/examples/exporting_models_exclude1.py | 32 - docs/examples/exporting_models_exclude2.py | 73 - docs/examples/exporting_models_exclude3.py | 29 - docs/examples/exporting_models_exclude4.py | 26 - docs/examples/exporting_models_exclude5.py | 26 - docs/examples/exporting_models_iterate.py | 18 - docs/examples/exporting_models_json.py | 15 - .../exporting_models_json_encoders.py | 18 - .../exporting_models_json_encoders_merge.py | 24 - .../exporting_models_json_forward_ref.py | 34 - .../exporting_models_json_subclass.py | 27 - docs/examples/exporting_models_orjson.py | 22 - docs/examples/exporting_models_pickle.py | 15 - docs/examples/exporting_models_ujson.py | 16 - docs/examples/generate_models_person_model.py | 20 - .../hypothesis_property_based_test.py | 24 - docs/examples/index_error.py | 11 - docs/examples/index_main.py | 22 - docs/examples/model_config_alias_generator.py | 18 - .../examples/model_config_alias_precedence.py | 22 - .../model_config_change_globally_custom.py | 14 - docs/examples/model_config_class_kwargs.py | 11 - docs/examples/model_config_dataclass.py | 23 - docs/examples/model_config_main.py | 12 - docs/examples/model_config_smart_union_off.py | 19 - docs/examples/model_config_smart_union_on.py | 22 - .../model_config_smart_union_on_edge_case.py | 14 - docs/examples/models_abc.py | 11 - docs/examples/models_construct.py | 29 - docs/examples/models_custom_root_access.py | 17 - docs/examples/models_custom_root_field.py | 16 - .../models_custom_root_field_parse_obj.py | 21 - docs/examples/models_data_conversion.py | 10 - docs/examples/models_default_factory.py | 14 - docs/examples/models_dynamic_creation.py | 8 - docs/examples/models_dynamic_inheritance.py | 16 - docs/examples/models_dynamic_validators.py | 26 - docs/examples/models_errors1.py | 33 - docs/examples/models_errors2.py | 18 - docs/examples/models_errors3.py | 22 - docs/examples/models_field_order.py | 20 - docs/examples/models_from_typeddict.py | 21 - docs/examples/models_generics.py | 42 - docs/examples/models_generics_inheritance.py | 17 - .../models_generics_inheritance_extend.py | 19 - docs/examples/models_generics_naming.py | 17 - docs/examples/models_generics_nested.py | 23 - docs/examples/models_generics_typevars.py | 25 - docs/examples/models_mutation.py | 22 - docs/examples/models_orm_mode.py | 36 - docs/examples/models_orm_mode_data_binding.py | 43 - docs/examples/models_orm_mode_recursive.py | 39 - .../examples/models_orm_mode_reserved_name.py | 30 - docs/examples/models_parse.py | 42 - docs/examples/models_recursive.py | 22 - .../models_required_field_optional.py | 15 - docs/examples/models_required_fields.py | 7 - docs/examples/models_signature.py | 12 - docs/examples/models_signature_custom_init.py | 15 - .../models_structural_pattern_matching.py | 18 - docs/examples/mypy_main.py | 17 - docs/examples/parse_obj_as.py | 16 - docs/examples/postponed_annotations_broken.py | 23 - .../postponed_annotations_forward_ref.py | 15 - docs/examples/postponed_annotations_main.py | 11 - ...nnotations_self_referencing_annotations.py | 12 - ...ned_annotations_self_referencing_string.py | 11 - docs/examples/postponed_annotations_works.py | 13 - docs/examples/private_attributes.py | 19 - ...attributes_underscore_attrs_are_private.py | 16 - docs/examples/ruff.toml | 9 - docs/examples/schema_ad_hoc.py | 20 - docs/examples/schema_annotated.py | 9 - docs/examples/schema_custom.py | 17 - docs/examples/schema_extra_callable.py | 19 - docs/examples/schema_main.py | 38 - docs/examples/schema_top_level.py | 20 - .../examples/schema_unenforced_constraints.py | 30 - docs/examples/schema_with_example.py | 22 - docs/examples/schema_with_field.py | 33 - docs/examples/types_arbitrary_allowed.py | 37 - docs/examples/types_bare_type.py | 20 - docs/examples/types_boolean.py | 13 - docs/examples/types_bytesize.py | 15 - docs/examples/types_callable.py | 10 - docs/examples/types_choices.py | 26 - docs/examples/types_color.py | 21 - docs/examples/types_constrained.py | 59 - docs/examples/types_custom_type.py | 68 - docs/examples/types_dt.py | 19 - docs/examples/types_generics.py | 83 - .../types_import_string_serialization.py | 42 - docs/examples/types_import_string_usage.py | 40 - docs/examples/types_infinite_generator.py | 22 - ...types_infinite_generator_validate_first.py | 46 - docs/examples/types_iterables.py | 44 - docs/examples/types_json_type.py | 29 - docs/examples/types_literal1.py | 15 - docs/examples/types_literal2.py | 27 - docs/examples/types_literal3.py | 32 - docs/examples/types_payment_card_number.py | 30 - docs/examples/types_secret_types.py | 50 - docs/examples/types_strict.py | 53 - docs/examples/types_type.py | 28 - docs/examples/types_typevar.py | 18 - docs/examples/types_undefined_warning.py | 45 - docs/examples/types_union_correct.py | 15 - docs/examples/types_union_discriminated.py | 30 - .../types_union_discriminated_nested.py | 50 - docs/examples/types_union_incorrect.py | 21 - docs/examples/types_url_properties.py | 33 - docs/examples/types_url_punycode.py | 16 - docs/examples/types_urls.py | 19 - docs/examples/validation_decorator_async.py | 32 - docs/examples/validation_decorator_config.py | 26 - docs/examples/validation_decorator_field.py | 22 - .../validation_decorator_field_alias.py | 10 - docs/examples/validation_decorator_main.py | 19 - .../validation_decorator_parameter_types.py | 68 - .../validation_decorator_raw_function.py | 14 - docs/examples/validation_decorator_types.py | 21 - .../examples/validation_decorator_validate.py | 17 - docs/examples/validators_allow_reuse.py | 25 - docs/examples/validators_always.py | 15 - docs/examples/validators_dataclass.py | 17 - docs/examples/validators_pre_item.py | 46 - docs/examples/validators_root.py | 36 - docs/examples/validators_simple.py | 44 - .../examples/validators_subclass_each_item.py | 36 - docs/hypothesis_plugin.md | 30 +- docs/index.md | 74 +- docs/mypy_plugin.md | 21 +- docs/plugins/devtools_output.html | 22 + docs/plugins/main.py | 214 +++ docs/plugins/schema_mappings.toml | 590 +++++++ docs/usage/dataclasses.md | 307 +++- docs/usage/devtools.md | 38 +- docs/usage/exporting_models.md | 426 ++++- docs/usage/model_config.md | 199 ++- docs/usage/models.md | 1035 +++++++++++- docs/usage/mypy.md | 20 +- docs/usage/postponed_annotations.md | 116 +- docs/usage/schema.md | 405 ++++- docs/usage/types.md | 1443 ++++++++++++++++- docs/usage/validation_decorator.md | 320 +++- docs/usage/validators.md | 261 ++- docs/visual_studio_code.md | 14 +- mkdocs.yml | 13 +- pydantic/_internal/_generate_schema.py | 1 + requirements/docs.in | 8 +- requirements/docs.txt | 30 +- requirements/linting.txt | 6 +- requirements/pyproject-all.txt | 4 +- requirements/pyproject-min.txt | 4 +- requirements/testing-extra.in | 4 + requirements/testing-extra.txt | 10 +- requirements/testing.in | 1 + requirements/testing.txt | 24 +- tests/test_discriminated_union.py | 3 + tests/test_docs.py | 178 ++ tests/test_orm_mode.py | 4 - 191 files changed, 5637 insertions(+), 5024 deletions(-) delete mode 100644 docs/.benchmarks_table.md delete mode 100755 docs/build/exec_examples.py delete mode 100755 docs/build/main.py delete mode 100755 docs/build/schema_mapping.py delete mode 100644 docs/changelog.md delete mode 100644 docs/examples/.editorconfig delete mode 100644 docs/examples/annotated_types_named_tuple.py delete mode 100644 docs/examples/annotated_types_typed_dict.py delete mode 100644 docs/examples/dataclasses_arbitrary_types_allowed.py delete mode 100644 docs/examples/dataclasses_config.py delete mode 100644 docs/examples/dataclasses_default_schema.py delete mode 100644 docs/examples/dataclasses_initvars.py delete mode 100644 docs/examples/dataclasses_json_dumps.py delete mode 100644 docs/examples/dataclasses_main.py delete mode 100644 docs/examples/dataclasses_nested.py delete mode 100644 docs/examples/dataclasses_post_init_post_parse.py delete mode 100644 docs/examples/dataclasses_stdlib_inheritance.py delete mode 100644 docs/examples/dataclasses_stdlib_run_validation.py delete mode 100644 docs/examples/dataclasses_stdlib_to_pydantic.py delete mode 100644 docs/examples/dataclasses_stdlib_with_basemodel.py delete mode 100644 docs/examples/devtools_main.py delete mode 100644 docs/examples/exporting_models_copy.py delete mode 100644 docs/examples/exporting_models_dict.py delete mode 100644 docs/examples/exporting_models_exclude1.py delete mode 100644 docs/examples/exporting_models_exclude2.py delete mode 100644 docs/examples/exporting_models_exclude3.py delete mode 100644 docs/examples/exporting_models_exclude4.py delete mode 100644 docs/examples/exporting_models_exclude5.py delete mode 100644 docs/examples/exporting_models_iterate.py delete mode 100644 docs/examples/exporting_models_json.py delete mode 100644 docs/examples/exporting_models_json_encoders.py delete mode 100644 docs/examples/exporting_models_json_encoders_merge.py delete mode 100644 docs/examples/exporting_models_json_forward_ref.py delete mode 100644 docs/examples/exporting_models_json_subclass.py delete mode 100644 docs/examples/exporting_models_orjson.py delete mode 100644 docs/examples/exporting_models_pickle.py delete mode 100644 docs/examples/exporting_models_ujson.py delete mode 100644 docs/examples/generate_models_person_model.py delete mode 100644 docs/examples/hypothesis_property_based_test.py delete mode 100644 docs/examples/index_error.py delete mode 100644 docs/examples/index_main.py delete mode 100644 docs/examples/model_config_alias_generator.py delete mode 100644 docs/examples/model_config_alias_precedence.py delete mode 100644 docs/examples/model_config_change_globally_custom.py delete mode 100644 docs/examples/model_config_class_kwargs.py delete mode 100644 docs/examples/model_config_dataclass.py delete mode 100644 docs/examples/model_config_main.py delete mode 100644 docs/examples/model_config_smart_union_off.py delete mode 100644 docs/examples/model_config_smart_union_on.py delete mode 100644 docs/examples/model_config_smart_union_on_edge_case.py delete mode 100644 docs/examples/models_abc.py delete mode 100644 docs/examples/models_construct.py delete mode 100644 docs/examples/models_custom_root_access.py delete mode 100644 docs/examples/models_custom_root_field.py delete mode 100644 docs/examples/models_custom_root_field_parse_obj.py delete mode 100644 docs/examples/models_data_conversion.py delete mode 100644 docs/examples/models_default_factory.py delete mode 100644 docs/examples/models_dynamic_creation.py delete mode 100644 docs/examples/models_dynamic_inheritance.py delete mode 100644 docs/examples/models_dynamic_validators.py delete mode 100644 docs/examples/models_errors1.py delete mode 100644 docs/examples/models_errors2.py delete mode 100644 docs/examples/models_errors3.py delete mode 100644 docs/examples/models_field_order.py delete mode 100644 docs/examples/models_from_typeddict.py delete mode 100644 docs/examples/models_generics.py delete mode 100644 docs/examples/models_generics_inheritance.py delete mode 100644 docs/examples/models_generics_inheritance_extend.py delete mode 100644 docs/examples/models_generics_naming.py delete mode 100644 docs/examples/models_generics_nested.py delete mode 100644 docs/examples/models_generics_typevars.py delete mode 100644 docs/examples/models_mutation.py delete mode 100644 docs/examples/models_orm_mode.py delete mode 100644 docs/examples/models_orm_mode_data_binding.py delete mode 100644 docs/examples/models_orm_mode_recursive.py delete mode 100644 docs/examples/models_orm_mode_reserved_name.py delete mode 100644 docs/examples/models_parse.py delete mode 100644 docs/examples/models_recursive.py delete mode 100644 docs/examples/models_required_field_optional.py delete mode 100644 docs/examples/models_required_fields.py delete mode 100644 docs/examples/models_signature.py delete mode 100644 docs/examples/models_signature_custom_init.py delete mode 100644 docs/examples/models_structural_pattern_matching.py delete mode 100644 docs/examples/mypy_main.py delete mode 100644 docs/examples/parse_obj_as.py delete mode 100644 docs/examples/postponed_annotations_broken.py delete mode 100644 docs/examples/postponed_annotations_forward_ref.py delete mode 100644 docs/examples/postponed_annotations_main.py delete mode 100644 docs/examples/postponed_annotations_self_referencing_annotations.py delete mode 100644 docs/examples/postponed_annotations_self_referencing_string.py delete mode 100644 docs/examples/postponed_annotations_works.py delete mode 100644 docs/examples/private_attributes.py delete mode 100644 docs/examples/private_attributes_underscore_attrs_are_private.py delete mode 100644 docs/examples/ruff.toml delete mode 100644 docs/examples/schema_ad_hoc.py delete mode 100644 docs/examples/schema_annotated.py delete mode 100644 docs/examples/schema_custom.py delete mode 100644 docs/examples/schema_extra_callable.py delete mode 100644 docs/examples/schema_main.py delete mode 100644 docs/examples/schema_top_level.py delete mode 100644 docs/examples/schema_unenforced_constraints.py delete mode 100644 docs/examples/schema_with_example.py delete mode 100644 docs/examples/schema_with_field.py delete mode 100644 docs/examples/types_arbitrary_allowed.py delete mode 100644 docs/examples/types_bare_type.py delete mode 100644 docs/examples/types_boolean.py delete mode 100644 docs/examples/types_bytesize.py delete mode 100644 docs/examples/types_callable.py delete mode 100644 docs/examples/types_choices.py delete mode 100644 docs/examples/types_color.py delete mode 100644 docs/examples/types_constrained.py delete mode 100644 docs/examples/types_custom_type.py delete mode 100644 docs/examples/types_dt.py delete mode 100644 docs/examples/types_generics.py delete mode 100644 docs/examples/types_import_string_serialization.py delete mode 100644 docs/examples/types_import_string_usage.py delete mode 100644 docs/examples/types_infinite_generator.py delete mode 100644 docs/examples/types_infinite_generator_validate_first.py delete mode 100644 docs/examples/types_iterables.py delete mode 100644 docs/examples/types_json_type.py delete mode 100644 docs/examples/types_literal1.py delete mode 100644 docs/examples/types_literal2.py delete mode 100644 docs/examples/types_literal3.py delete mode 100644 docs/examples/types_payment_card_number.py delete mode 100644 docs/examples/types_secret_types.py delete mode 100644 docs/examples/types_strict.py delete mode 100644 docs/examples/types_type.py delete mode 100644 docs/examples/types_typevar.py delete mode 100644 docs/examples/types_undefined_warning.py delete mode 100644 docs/examples/types_union_correct.py delete mode 100644 docs/examples/types_union_discriminated.py delete mode 100644 docs/examples/types_union_discriminated_nested.py delete mode 100644 docs/examples/types_union_incorrect.py delete mode 100644 docs/examples/types_url_properties.py delete mode 100644 docs/examples/types_url_punycode.py delete mode 100644 docs/examples/types_urls.py delete mode 100644 docs/examples/validation_decorator_async.py delete mode 100644 docs/examples/validation_decorator_config.py delete mode 100644 docs/examples/validation_decorator_field.py delete mode 100644 docs/examples/validation_decorator_field_alias.py delete mode 100644 docs/examples/validation_decorator_main.py delete mode 100644 docs/examples/validation_decorator_parameter_types.py delete mode 100644 docs/examples/validation_decorator_raw_function.py delete mode 100644 docs/examples/validation_decorator_types.py delete mode 100644 docs/examples/validation_decorator_validate.py delete mode 100644 docs/examples/validators_allow_reuse.py delete mode 100644 docs/examples/validators_always.py delete mode 100644 docs/examples/validators_dataclass.py delete mode 100644 docs/examples/validators_pre_item.py delete mode 100644 docs/examples/validators_root.py delete mode 100644 docs/examples/validators_simple.py delete mode 100644 docs/examples/validators_subclass_each_item.py create mode 100644 docs/plugins/devtools_output.html create mode 100644 docs/plugins/main.py create mode 100644 docs/plugins/schema_mappings.toml create mode 100644 tests/test_docs.py diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 076f1ac9b3..46738a6950 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -65,28 +65,15 @@ jobs: steps: - uses: actions/checkout@v3 - - name: set up python - uses: actions/setup-python@v4 + - uses: actions/setup-python@v4 with: python-version: '3.10' - - uses: actions/cache@v3 - id: cache - with: - path: ${{ env.pythonLocation }} - key: > - docs-build-v2 - ${{ runner.os }} - ${{ env.pythonLocation }} - ${{ hashFiles('pyproject.toml') }} - ${{ hashFiles('requirements/*') }} - - name: install - if: steps.cache.outputs.cache-hit != 'true' - run: pip install -r requirements/all.txt . + run: pip install -r requirements/docs.txt - - name: build site - run: make docs + - run: python -c 'import docs.plugins.main' + - run: mkdocs build --verbose - name: Store docs site uses: actions/upload-artifact@v3 @@ -127,7 +114,7 @@ jobs: run: pip install . - name: test - run: pytest --ignore=tests/mypy/ --memray + run: pytest --ignore=tests/mypy/ --ignore=tests/test_docs.py --memray test: name: test ${{ matrix.os }} / ${{ matrix.python-version }} diff --git a/.gitignore b/.gitignore index d932d6b952..e192566c26 100644 --- a/.gitignore +++ b/.gitignore @@ -17,12 +17,7 @@ test.py .hypothesis /htmlcov/ /benchmarks/*.json -/docs/.changelog.md -/docs/.version.md -/docs/.tmp_schema_mappings.html -/docs/.tmp_examples/ -/docs/.tmp-projections/ -/docs/usage/.tmp-projections/ +/docs/changelog.md /site/ /site.zip .pytest_cache/ diff --git a/.mypy-configs/fast.toml b/.mypy-configs/fast.toml index 3192e35921..d56c94651d 100644 --- a/.mypy-configs/fast.toml +++ b/.mypy-configs/fast.toml @@ -30,7 +30,5 @@ module = [ ignore_missing_imports = true [[tool.mypy.overrides]] -module = [ - 'pydantic_core.*', -] -follow_imports = "skip" +module = ['pydantic_core.*', 'devtools.*'] +follow_imports = 'skip' diff --git a/.mypy-configs/full.toml b/.mypy-configs/full.toml index 9a86f72787..5d98e324ad 100644 --- a/.mypy-configs/full.toml +++ b/.mypy-configs/full.toml @@ -28,3 +28,7 @@ module = [ 'devtools.*', ] ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = ['devtools.*'] +follow_imports = 'skip' diff --git a/Makefile b/Makefile index 4b13dca2e3..5f65977b08 100644 --- a/Makefile +++ b/Makefile @@ -1,5 +1,5 @@ .DEFAULT_GOAL := all -sources = pydantic tests docs/build +sources = pydantic tests docs/plugins .PHONY: install install: @@ -31,11 +31,11 @@ lint: .PHONY: typecheck typecheck: - mypy pydantic docs/build --disable-recursive-aliases --config-file .mypy-configs/full.toml + mypy pydantic --disable-recursive-aliases --config-file .mypy-configs/full.toml .PHONY: typecheck-fast typecheck-fast: - mypy pydantic docs/build --disable-recursive-aliases --config-file .mypy-configs/fast.toml + mypy pydantic --disable-recursive-aliases --config-file .mypy-configs/fast.toml .PHONY: test-mypy test-mypy: @@ -96,11 +96,4 @@ clean: .PHONY: docs docs: - ruff docs/examples/ - python docs/build/main.py mkdocs build - -.PHONY: docs-serve -docs-serve: - python docs/build/main.py - mkdocs serve diff --git a/docs/.benchmarks_table.md b/docs/.benchmarks_table.md deleted file mode 100644 index 49694101df..0000000000 --- a/docs/.benchmarks_table.md +++ /dev/null @@ -1,13 +0,0 @@ -[//]: <> (Generated with benchmarks/run.py, DO NOT EDIT THIS FILE DIRECTLY, instead run `SAVE=1 python ./run.py`.) - -Package | Version | Relative Performance | Mean validation time ---- | --- | --- | --- -pydantic | `1.7.3` | | 93.7μs -attrs + cattrs | `20.3.0` | 1.5x slower | 143.6μs -valideer | `0.4.2` | 1.9x slower | 175.9μs -marshmallow | `3.10.0` | 2.4x slower | 227.6μs -voluptuous | `0.12.1` | 2.7x slower | 257.5μs -trafaret | `2.1.0` | 3.2x slower | 296.7μs -schematics | `2.1.0` | 10.2x slower | 955.5μs -django-rest-framework | `3.12.2` | 12.3x slower | 1148.4μs -cerberus | `1.3.2` | 25.9x slower | 2427.6μs diff --git a/docs/blog/pydantic-v2.md b/docs/blog/pydantic-v2.md index f2c3e64292..c736197132 100644 --- a/docs/blog/pydantic-v2.md +++ b/docs/blog/pydantic-v2.md @@ -241,9 +241,10 @@ my misgivings about marking a field as `Optional[int]` but requiring a value to In pydantic V2, pydantic will move to match dataclasses, thus: -```py title="Required vs. Nullable" +```py title="Required vs. Nullable" test="skip" lint="skip" upgrade="skip" from pydantic import BaseModel + class Foo(BaseModel): f1: str # required, cannot be None f2: str | None # required, can be None - same as Optional[str] / Union[str, None] @@ -268,10 +269,11 @@ Fields which use a function for validation can be any of the following types: An example how a wrap validator might look: -```py title="Wrap mode validator function" +```py title="Wrap mode validator function" test="skip" lint="skip" upgrade="skip" from datetime import datetime from pydantic import BaseModel, ValidationError, validator + class MyModel(BaseModel): timestamp: datetime @@ -297,7 +299,7 @@ pydantic-core can support alias "paths" as well as simple string aliases to flat Best demonstrated with an example: -```py title="Alias paths" +```py title="Alias paths" test="skip" lint="skip" upgrade="skip" from pydantic import BaseModel, Field @@ -360,9 +362,10 @@ See [pydantic#1549](https://github.com/pydantic/pydantic/issues/1549) for motiva Here's an example of `context` might be used: -```py title="Context during Validation" +```py title="Context during Validation" test="skip" lint="skip" upgrade="skip" from pydantic import BaseModel, EmailStr, validator + class User(BaseModel): email: EmailStr home_country: str @@ -373,6 +376,7 @@ class User(BaseModel): raise ValueError('invalid country choice') return v + async def add_user(post_data: bytes): countries = set(await db_connection.fetch_all('select code from country')) user = User.model_validate_json(post_data, context={'countries': countries}) @@ -405,7 +409,7 @@ All methods on models will start with `model_`, fields' names will not be allowe This will mean `BaseModel` will have roughly the following signature. -```{.py .annotate title="New BaseModel methods"} +```{.py .annotate title="New BaseModel methods" test="skip" lint="skip" upgrade="skip"} class BaseModel: model_fields: List[FieldInfo] """previously `__fields__`, although the format will change a lot""" @@ -537,7 +541,7 @@ docs. Thus, errors might look like: -```py title="Line Errors Example" +```py title="Line Errors Example" test="skip" lint="skip" upgrade="skip" [ { 'kind': 'greater_than_equal', @@ -609,9 +613,10 @@ an error or returning the validation result. To be clear, this isn't a real `isinstance` call, rather it is equivalent to -```py title="is_instance" +```py title="is_instance" test="skip" lint="skip" upgrade="skip" class BaseModel: ... + @classmethod def model_is_instance(cls, data: Any) -> bool: try: @@ -648,9 +653,10 @@ item can be a string, if so a function of that name will be taken from the class Here's an example of how a custom field type could be defined: -```py title="New custom field types" +```py title="New custom field types" test="skip" lint="skip" upgrade="skip" from pydantic import ValidationSchema + class Foobar: def __init__(self, value: str): self.value = value @@ -659,7 +665,7 @@ class Foobar: 'type': 'function', 'mode': 'after', 'function': 'validate', - 'schema': {'type': 'str'} + 'schema': {'type': 'str'}, } @classmethod @@ -790,7 +796,7 @@ We will endeavour to read and respond to everyone. At the center of pydantic v2 will be a `PydanticValidator` class which looks roughly like this (note: this is just pseudo-code, it's not even valid python and is only supposed to be used to demonstrate the idea): -```py title="PydanticValidator" +```py title="PydanticValidator" test="skip" lint="skip" upgrade="skip" # type identifying data which has been validated, # as per pydantic-core, this can include "fields_set" data ValidData = ... diff --git a/docs/build/exec_examples.py b/docs/build/exec_examples.py deleted file mode 100755 index e0f1ed8339..0000000000 --- a/docs/build/exec_examples.py +++ /dev/null @@ -1,428 +0,0 @@ -#!/usr/bin/env python3 -from __future__ import annotations - -import importlib -import json -import os -import re -import shutil -import subprocess -import sys -import textwrap -import traceback -from pathlib import Path -from typing import Any, Callable -from unittest.mock import patch - -from ansi2html import Ansi2HTMLConverter -from devtools import PrettyFormat - -THIS_DIR = Path(__file__).parent -DOCS_DIR = (THIS_DIR / '..').resolve() -EXAMPLES_DIR = DOCS_DIR / 'examples' -TMP_EXAMPLES_DIR = DOCS_DIR / '.tmp_examples' -UPGRADED_TMP_EXAMPLES_DIR = TMP_EXAMPLES_DIR / 'upgraded' - -MAX_LINE_LENGTH = int( - re.search(r'max_line_length = (\d+)', (EXAMPLES_DIR / '.editorconfig').read_text()).group(1) # type: ignore -) -LONG_LINE = 50 -LOWEST_VERSION = (3, 7) -HIGHEST_VERSION = (3, 10) -pformat = PrettyFormat(simple_cutoff=LONG_LINE) -Error = Callable[..., None] -Version = tuple[int, int] - -PYTHON_CODE_MD_TMPL = """ -=== "Python {version} and above" - - ```py -{code} - ``` -""".strip() -JSON_OUTPUT_MD_TMPL = """ - -Outputs: -```json -{output} -``` -""" - - -def to_string(value: Any) -> str: - # attempt to build a pretty equivalent of the print output - if isinstance(value, (dict, list, tuple, set)): - return pformat(value) - elif isinstance(value, str) and any(re.fullmatch(r, value, flags=re.DOTALL) for r in ['{".+}', r'\[.+\]']): - try: - obj = json.loads(value) - except ValueError: - # not JSON, not a problem - pass - else: - s = json.dumps(obj) - if len(s) > LONG_LINE: - json.dumps(obj, indent=2) - else: - return s - - return str(value) - - -class MockPrint: - def __init__(self, file: Path) -> None: - self.file = file - self.statements: list[tuple[int, str]] = [] - - def __call__(self, *args: Any, sep: str = ' ', **kwargs: Any) -> None: - frame = sys._getframe(4) if sys.version_info >= (3, 8) else sys._getframe(3) - - if not self.file.samefile(frame.f_code.co_filename): - # happens when index_error.py imports index_main.py - return - s = sep.join(map(to_string, args)) - - self.statements.append((frame.f_lineno, s)) - - -class MockPath: - def read_text(self, *args: Any, **kwargs: Any) -> str: - return '{"foobar": "spam"}' - - -def build_print_lines(s: str, max_len_reduction: int = 0) -> list[str]: - print_lines = [] - max_len = MAX_LINE_LENGTH - 3 - max_len_reduction - for line in s.split('\n'): - if len(line) > max_len: - print_lines += textwrap.wrap(line, width=max_len) - else: - print_lines.append(line) - return print_lines - - -def build_print_statement(line_no: int, s: str, lines: list[str]) -> None: - indent = '' - for back in range(1, 100): - m = re.search(r'^( *)print\(', lines[line_no - back]) - if m: - indent = m.group(1) - break - print_lines = build_print_lines(s, len(indent)) - - if len(print_lines) > 2: - text = textwrap.indent('"""\n{}\n"""'.format('\n'.join(print_lines)), indent) - else: - text = '\n'.join(f'{indent}#> {line}' for line in print_lines) - lines.insert(line_no, text) - - -def all_md_contents() -> str: - file_contents = [] - for f in DOCS_DIR.glob('**/*.md'): - file_contents.append(f.read_text()) - return '\n\n\n'.join(file_contents) - - -def gen_ansi_output() -> None: - - conv = Ansi2HTMLConverter() - - input_file = EXAMPLES_DIR / 'devtools_main.py' - os.environ['PY_DEVTOOLS_HIGHLIGHT'] = 'true' - p = subprocess.run((sys.executable, str(input_file)), stdout=subprocess.PIPE, check=True, encoding='utf8') - html = conv.convert(p.stdout, full=False).strip('\r\n') - full_html = f'
\n
\n{html}\n
\n
' - path = TMP_EXAMPLES_DIR / f'{input_file.stem}.html' - path.write_text(full_html) - print(f'generated ansi output to {path}') - - -dont_execute_re = re.compile(r'^# dont-execute\n', flags=re.M | re.I) -dont_upgrade_re = re.compile(r'^# dont-upgrade\n', flags=re.M | re.I) -requires_re = re.compile(r'^# requires: *(.+)\n', flags=re.M | re.I) -required_py_re = re.compile(r'^# *requires *python *(\d+).(\d+)', flags=re.M) - - -def should_execute(file_name: str, file_text: str) -> tuple[str, bool, Version]: - m = required_py_re.search(file_text) - if m: - lowest_version = (int(m.groups()[0]), int(m.groups()[1])) - if sys.version_info >= lowest_version: - return required_py_re.sub('', file_text), True, lowest_version - else: - v = '.'.join(m.groups()) - print(f'WARNING: {file_name} requires python {v}, not running') - return ( - required_py_re.sub(f'# requires python {v}, NOT EXECUTED!', file_text), - False, - lowest_version, - ) - elif dont_execute_re.search(file_text): - return dont_execute_re.sub('', file_text), False, LOWEST_VERSION - return file_text, True, LOWEST_VERSION - - -def should_upgrade(file_text: str) -> tuple[str, bool]: - if dont_upgrade_re.search(file_text): - return dont_upgrade_re.sub('', file_text), False - return file_text, True - - -def get_requirements(file_text: str) -> tuple[str, str | None]: - m = requires_re.search(file_text) - if m: - return requires_re.sub('', file_text), m.groups()[0] - return file_text, None - - -def exec_file(file: Path, file_text: str, error: Error) -> tuple[list[str], str | None]: - no_print_intercept_re = re.compile(r'^# no-print-intercept\n', flags=re.M) - print_intercept = not bool(no_print_intercept_re.search(file_text)) - if not print_intercept: - file_text = no_print_intercept_re.sub('', file_text) - - if file.stem in sys.modules: - del sys.modules[file.stem] - mp = MockPrint(file) - mod = None - - with patch.object(Path, 'read_text', MockPath.read_text), patch('builtins.print') as patch_print: - if print_intercept: - patch_print.side_effect = mp - try: - mod = importlib.import_module(file.stem) - except Exception: - tb = traceback.format_exception(*sys.exc_info()) - error(''.join(e for e in tb if '/pydantic/docs/examples/' in e or not e.startswith(' File '))) - - if mod and mod.__file__ != str(file): - error(f'module path "{mod.__file__}" is not same as "{file}", name may shadow another module?') - - lines = file_text.split('\n') - - to_json_line = '# output-json' - if to_json_line in lines: - lines = [line for line in lines if line != to_json_line] - if len(mp.statements) != 1: - error('should have exactly one print statement') - print_lines = build_print_lines(mp.statements[0][1]) - return lines, '\n'.join(print_lines) + '\n' - else: - for line_no, print_string in reversed(mp.statements): - build_print_statement(line_no, print_string, lines) - return lines, None - - -def filter_lines(lines: list[str], error: Any) -> tuple[list[str], bool]: - ignored_above = False - try: - ignore_above = lines.index('# ignore-above') - except ValueError: - pass - else: - ignored_above = True - lines = lines[ignore_above + 1 :] - - try: - ignore_below = lines.index('# ignore-below') - except ValueError: - pass - else: - lines = lines[:ignore_below] - - lines = '\n'.join(lines).split('\n') - if any(len(line) > MAX_LINE_LENGTH for line in lines): - error(f'lines longer than {MAX_LINE_LENGTH} characters') - return lines, ignored_above - - -def upgrade_code(content: str, min_version: Version = HIGHEST_VERSION) -> str: - import autoflake # type: ignore - import pyupgrade._main # type: ignore - - upgraded = pyupgrade._main._fix_plugins( - content, - settings=pyupgrade._main.Settings( - min_version=min_version, - keep_percent_format=True, - keep_mock=False, - keep_runtime_typing=True, - ), - ) - upgraded = autoflake.fix_code(upgraded, remove_all_unused_imports=True) - return upgraded - - -def ensure_used(file: Path, all_md: str, error: Error) -> None: - """Ensures that example is used appropriately""" - file_tmpl = '{{!.tmp_examples/{}!}}' - md_name = file.stem + '.md' - if file_tmpl.format(md_name) not in all_md: - if file_tmpl.format(file.name) in all_md: - error( - f'incorrect usage, change filename to {md_name!r} in docs.' - "make sure you don't specify ```py code blocks around examples," - 'they are automatically generated now.' - ) - else: - error( - 'file not used anywhere. correct usage:', - file_tmpl.format(md_name), - ) - - -def check_style(file_text: str, error: Error) -> None: - if '\n\n\n\n' in file_text: - error('too many new lines') - if not file_text.endswith('\n'): - error('no trailing new line') - if re.search('^ *# *>', file_text, flags=re.M): - error('contains comments with print output, please remove') - - -def populate_upgraded_versions(file: Path, file_text: str, lowest_version: Version) -> list[tuple[Path, str, Version]]: - versions = [] - major, minor = lowest_version - assert major == HIGHEST_VERSION[0], 'Wow, Python 4 is out? Congrats!' - upgraded_file_text = file_text - while minor < HIGHEST_VERSION[1]: - minor += 1 - new_file_text = upgrade_code(file_text, min_version=(major, minor)) - if upgraded_file_text != new_file_text: - upgraded_file_text = new_file_text - new_file = UPGRADED_TMP_EXAMPLES_DIR / (file.stem + f'_{major}_{minor}' + file.suffix) - new_file.write_text(upgraded_file_text) - versions.append((new_file, upgraded_file_text, (major, minor))) - return versions - - -def v2_hack() -> bool: - """ - Temporary hack while working on V2. - - TODO remove once tests are working again - (although test generation needs to be completely rewritten to match watchfiles et. al.) - """ - return True - - -def exec_examples() -> int: # noqa: C901 (I really don't want to decompose it any further) - errors = [] - all_md = all_md_contents() - new_files = {} - os.environ.update( - { - 'my_auth_key': 'xxx', - 'my_api_key': 'xxx', - 'database_dsn': 'postgres://postgres@localhost:5432/env_db', - 'v0': '0', - 'sub_model': '{"v1": "json-1", "v2": "json-2"}', - 'sub_model__v2': 'nested-2', - 'sub_model__v3': '3', - 'sub_model__deep__v4': 'v4', - } - ) - sys.path.append(str(EXAMPLES_DIR)) - if sys.version_info < HIGHEST_VERSION: - print("WARNING: examples for 3.10+ requires python 3.10. They won't be executed") - else: - UPGRADED_TMP_EXAMPLES_DIR.mkdir(parents=True, exist_ok=True) - sys.path.append(str(UPGRADED_TMP_EXAMPLES_DIR)) - - for file in sorted(EXAMPLES_DIR.iterdir()): - markdown_name = file.stem + '.md' - - def error(*desc: str) -> None: - errors.append((file, desc)) - previous_frame = sys._getframe(1) - filename = Path(previous_frame.f_globals['__file__']).relative_to(Path.cwd()) - location = f'{filename}:{previous_frame.f_lineno}' - sys.stderr.write(f'{location}: error in {file.relative_to(Path.cwd())}:\n{" ".join(desc)}\n') - - if not file.is_file(): - # __pycache__, maybe others - continue - - if file.suffix != '.py': - # just copy - new_files[file.name] = file.read_text() - continue - - file_text = file.read_text('utf-8') - ensure_used(file, all_md, error) - check_style(file_text, error) - - if v2_hack(): - new_files[markdown_name] = f'```py\n{file_text}\n```\n' - continue - - file_text, execute, lowest_version = should_execute(file.name, file_text) - file_text, upgrade = should_upgrade(file_text) - file_text, requirements = get_requirements(file_text) - - if upgrade and upgrade_code(file_text, min_version=lowest_version) != file_text: - error("pyupgrade would upgrade file. If it's not desired, add '# dont-upgrade' line at the top of the file") - - versions: list[tuple[Path, str, Version]] = [(file, file_text, lowest_version)] - if upgrade: - versions.extend(populate_upgraded_versions(file, file_text, lowest_version)) - - json_outputs: set[str | None] = set() - should_run_as_is = not requirements - final_content: list[str] = [] - for file, file_text, lowest_version in versions: - if execute and sys.version_info >= lowest_version: - lines, json_output = exec_file(file, file_text, error) - json_outputs.add(json_output) - else: - lines = file_text.split('\n') - - lines, ignored_lines_before_script = filter_lines(lines, error) - should_run_as_is = should_run_as_is and not ignored_lines_before_script - - final_content.append( - PYTHON_CODE_MD_TMPL.format( - version='.'.join(map(str, lowest_version)), - code=textwrap.indent('\n'.join(lines), ' '), - ) - ) - - if should_run_as_is: - final_content.append('_(This script is complete, it should run "as is")_') - elif requirements: - final_content.append(f'_(This script requires {requirements})_') - else: - error( - 'script may not run as is, but requirements were not specified.', - 'specify `# requires: ` in the end of the script', - ) - - if len(json_outputs) > 1: - error('json output should not differ between versions') - - if json_outputs: - json_output, *_ = json_outputs - if json_output: - final_content.append(JSON_OUTPUT_MD_TMPL.format(output=json_output)) - - new_files[markdown_name] = '\n'.join(final_content) - - if errors: - print(f'\n{len(errors)} errors, not writing files\n') - return 1 - - if TMP_EXAMPLES_DIR.exists(): - shutil.rmtree(TMP_EXAMPLES_DIR) - - print(f'writing {len(new_files)} example files to {TMP_EXAMPLES_DIR}') - TMP_EXAMPLES_DIR.mkdir() - for file_name, content in new_files.items(): - (TMP_EXAMPLES_DIR / file_name).write_text(content, 'utf-8') - gen_ansi_output() - - return 0 - - -if __name__ == '__main__': - sys.exit(exec_examples()) diff --git a/docs/build/main.py b/docs/build/main.py deleted file mode 100755 index 2e21507308..0000000000 --- a/docs/build/main.py +++ /dev/null @@ -1,31 +0,0 @@ -#!/usr/bin/env python3 -import re -import sys -from importlib.machinery import SourceFileLoader -from pathlib import Path - -THIS_DIR = Path(__file__).parent -PROJECT_ROOT = THIS_DIR / '..' / '..' - - -def main() -> int: - history = (PROJECT_ROOT / 'HISTORY.md').read_text() - history = re.sub(r'#(\d+)', r'[#\1](https://github.com/pydantic/pydantic/issues/\1)', history) - history = re.sub(r'(\s)@([\w\-]+)', r'\1[@\2](https://github.com/\2)', history, flags=re.I) - history = re.sub('@@', '@', history) - - (PROJECT_ROOT / 'docs/.changelog.md').write_text(history) - - version = SourceFileLoader('version', str(PROJECT_ROOT / 'pydantic/version.py')).load_module() - (PROJECT_ROOT / 'docs/.version.md').write_text(f'Documentation for version: **v{version.VERSION}**\n') - - sys.path.append(str(THIS_DIR.resolve())) - from exec_examples import exec_examples - from schema_mapping import build_schema_mappings - - build_schema_mappings() - return exec_examples() - - -if __name__ == '__main__': - sys.exit(main()) diff --git a/docs/build/schema_mapping.py b/docs/build/schema_mapping.py deleted file mode 100755 index e9ef28d560..0000000000 --- a/docs/build/schema_mapping.py +++ /dev/null @@ -1,565 +0,0 @@ -#!/usr/bin/env python3 -""" -Build a table of Python / Pydantic to JSON Schema mappings. - -Done like this rather than as a raw rst table to make future edits easier. - -Please edit this file directly not .tmp_schema_mappings.html -""" -from __future__ import annotations - -import json -import re -from pathlib import Path -from typing import Any - -table: list[tuple[str, str, str | dict[str, Any], str, str]] = [ - ( - 'None', - 'null', - '', - 'JSON Schema Core', - 'Same for `type(None)` or `Literal[None]`', - ), - ( - 'bool', - 'boolean', - '', - 'JSON Schema Core', - '', - ), - ( - 'str', - 'string', - '', - 'JSON Schema Core', - '', - ), - ( - 'float', - 'number', - '', - 'JSON Schema Core', - '', - ), - ( - 'int', - 'integer', - '', - 'JSON Schema Validation', - '', - ), - ( - 'dict', - 'object', - '', - 'JSON Schema Core', - '', - ), - ( - 'list', - 'array', - {'items': {}}, - 'JSON Schema Core', - '', - ), - ( - 'tuple-positional', - 'array', - {'items': {}}, - 'JSON Schema Core', - '', - ), - ( - 'tuple-variable', - 'array', - {'items': {}}, - 'JSON Schema Core', - '', - ), - ( - 'set', - 'array', - {'items': {}, 'uniqueItems': True}, - 'JSON Schema Validation', - '', - ), - ( - 'frozenset', - 'array', - {'items': {}, 'uniqueItems': True}, - 'JSON Schema Validation', - '', - ), - ( - 'List[str]', - 'array', - {'items': {'type': 'string'}}, - 'JSON Schema Validation', - 'And equivalently for any other sub type, e.g. `List[int]`.', - ), - ( - 'Tuple[str, ...]', - 'array', - {'items': {'type': 'string'}}, - 'JSON Schema Validation', - 'And equivalently for any other sub type, e.g. `Tuple[int, ...]`.', - ), - ( - 'Tuple[str, int]', - 'array', - {'items': [{'type': 'string'}, {'type': 'integer'}], 'minItems': 2, 'maxItems': 2}, - 'JSON Schema Validation', - ( - 'And equivalently for any other set of subtypes. Note: If using schemas for OpenAPI, ' - "you shouldn't use this declaration, as it would not be valid in OpenAPI (although it is " - 'valid in JSON Schema).' - ), - ), - ( - 'Dict[str, int]', - 'object', - {'additionalProperties': {'type': 'integer'}}, - 'JSON Schema Validation', - ( - 'And equivalently for any other subfields for dicts. Have in mind that although you can use other types as ' - 'keys for dicts with Pydantic, only strings are valid keys for JSON, and so, only str is valid as ' - 'JSON Schema key types.' - ), - ), - ( - 'Union[str, int]', - 'anyOf', - {'anyOf': [{'type': 'string'}, {'type': 'integer'}]}, - 'JSON Schema Validation', - 'And equivalently for any other subfields for unions.', - ), - ( - 'Enum', - 'enum', - '{"enum": [...]}', - 'JSON Schema Validation', - 'All the literal values in the enum are included in the definition.', - ), - ( - 'SecretStr', - 'string', - {'writeOnly': True}, - 'JSON Schema Validation', - '', - ), - ( - 'SecretBytes', - 'string', - {'writeOnly': True}, - 'JSON Schema Validation', - '', - ), - ( - 'EmailStr', - 'string', - {'format': 'email'}, - 'JSON Schema Validation', - '', - ), - ( - 'NameEmail', - 'string', - {'format': 'name-email'}, - 'Pydantic standard "format" extension', - '', - ), - ( - 'AnyUrl', - 'string', - {'format': 'uri'}, - 'JSON Schema Validation', - '', - ), - ( - 'Pattern', - 'string', - {'format': 'regex'}, - 'JSON Schema Validation', - '', - ), - ( - 'bytes', - 'string', - {'format': 'binary'}, - 'OpenAPI', - '', - ), - ( - 'Decimal', - 'number', - '', - 'JSON Schema Core', - '', - ), - ( - 'UUID1', - 'string', - {'format': 'uuid1'}, - 'Pydantic standard "format" extension', - '', - ), - ( - 'UUID3', - 'string', - {'format': 'uuid3'}, - 'Pydantic standard "format" extension', - '', - ), - ( - 'UUID4', - 'string', - {'format': 'uuid4'}, - 'Pydantic standard "format" extension', - '', - ), - ( - 'UUID5', - 'string', - {'format': 'uuid5'}, - 'Pydantic standard "format" extension', - '', - ), - ( - 'UUID', - 'string', - {'format': 'uuid'}, - 'Pydantic standard "format" extension', - 'Suggested in OpenAPI.', - ), - ( - 'FilePath', - 'string', - {'format': 'file-path'}, - 'Pydantic standard "format" extension', - '', - ), - ( - 'DirectoryPath', - 'string', - {'format': 'directory-path'}, - 'Pydantic standard "format" extension', - '', - ), - ( - 'Path', - 'string', - {'format': 'path'}, - 'Pydantic standard "format" extension', - '', - ), - ( - 'datetime', - 'string', - {'format': 'date-time'}, - 'JSON Schema Validation', - '', - ), - ( - 'date', - 'string', - {'format': 'date'}, - 'JSON Schema Validation', - '', - ), - ( - 'time', - 'string', - {'format': 'time'}, - 'JSON Schema Validation', - '', - ), - ( - 'timedelta', - 'number', - {'format': 'time-delta'}, - 'Difference in seconds (a `float`), with Pydantic standard "format" extension', - "Suggested in JSON Schema repository's issues by maintainer.", - ), - ( - 'Json', - 'string', - {'format': 'json-string'}, - 'Pydantic standard "format" extension', - '', - ), - ( - 'IPv4Address', - 'string', - {'format': 'ipv4'}, - 'JSON Schema Validation', - '', - ), - ( - 'IPv6Address', - 'string', - {'format': 'ipv6'}, - 'JSON Schema Validation', - '', - ), - ( - 'IPvAnyAddress', - 'string', - {'format': 'ipvanyaddress'}, - 'Pydantic standard "format" extension', - 'IPv4 or IPv6 address as used in `ipaddress` module', - ), - ( - 'IPv4Interface', - 'string', - {'format': 'ipv4interface'}, - 'Pydantic standard "format" extension', - 'IPv4 interface as used in `ipaddress` module', - ), - ( - 'IPv6Interface', - 'string', - {'format': 'ipv6interface'}, - 'Pydantic standard "format" extension', - 'IPv6 interface as used in `ipaddress` module', - ), - ( - 'IPvAnyInterface', - 'string', - {'format': 'ipvanyinterface'}, - 'Pydantic standard "format" extension', - 'IPv4 or IPv6 interface as used in `ipaddress` module', - ), - ( - 'IPv4Network', - 'string', - {'format': 'ipv4network'}, - 'Pydantic standard "format" extension', - 'IPv4 network as used in `ipaddress` module', - ), - ( - 'IPv6Network', - 'string', - {'format': 'ipv6network'}, - 'Pydantic standard "format" extension', - 'IPv6 network as used in `ipaddress` module', - ), - ( - 'IPvAnyNetwork', - 'string', - {'format': 'ipvanynetwork'}, - 'Pydantic standard "format" extension', - 'IPv4 or IPv6 network as used in `ipaddress` module', - ), - ( - 'StrictBool', - 'boolean', - '', - 'JSON Schema Core', - '', - ), - ( - 'StrictStr', - 'string', - '', - 'JSON Schema Core', - '', - ), - ( - 'ConstrainedStr', - 'string', - '', - 'JSON Schema Core', - ( - 'If the type has values declared for the constraints, they are included as validations. ' - 'See the mapping for `constr` below.' - ), - ), - ( - "constr(regex='^text$', min_length=2, max_length=10)", - 'string', - {'pattern': '^text$', 'minLength': 2, 'maxLength': 10}, - 'JSON Schema Validation', - 'Any argument not passed to the function (not defined) will not be included in the schema.', - ), - ( - 'ConstrainedInt', - 'integer', - '', - 'JSON Schema Core', - ( - 'If the type has values declared for the constraints, they are included as validations. ' - 'See the mapping for `conint` below.' - ), - ), - ( - 'conint(gt=1, ge=2, lt=6, le=5, multiple_of=2)', - 'integer', - {'maximum': 5, 'exclusiveMaximum': 6, 'minimum': 2, 'exclusiveMinimum': 1, 'multipleOf': 2}, - '', - 'Any argument not passed to the function (not defined) will not be included in the schema.', - ), - ( - 'PositiveInt', - 'integer', - {'exclusiveMinimum': 0}, - 'JSON Schema Validation', - '', - ), - ( - 'NegativeInt', - 'integer', - {'exclusiveMaximum': 0}, - 'JSON Schema Validation', - '', - ), - ( - 'NonNegativeInt', - 'integer', - {'minimum': 0}, - 'JSON Schema Validation', - '', - ), - ( - 'NonPositiveInt', - 'integer', - {'maximum': 0}, - 'JSON Schema Validation', - '', - ), - ( - 'ConstrainedFloat', - 'number', - '', - 'JSON Schema Core', - ( - 'If the type has values declared for the constraints, they are included as validations. ' - 'See the mapping for `confloat` below.' - ), - ), - ( - 'confloat(gt=1, ge=2, lt=6, le=5, multiple_of=2)', - 'number', - {'maximum': 5, 'exclusiveMaximum': 6, 'minimum': 2, 'exclusiveMinimum': 1, 'multipleOf': 2}, - 'JSON Schema Validation', - 'Any argument not passed to the function (not defined) will not be included in the schema.', - ), - ( - 'PositiveFloat', - 'number', - {'exclusiveMinimum': 0}, - 'JSON Schema Validation', - '', - ), - ( - 'NegativeFloat', - 'number', - {'exclusiveMaximum': 0}, - 'JSON Schema Validation', - '', - ), - ( - 'NonNegativeFloat', - 'number', - {'minimum': 0}, - 'JSON Schema Validation', - '', - ), - ( - 'NonPositiveFloat', - 'number', - {'maximum': 0}, - 'JSON Schema Validation', - '', - ), - ( - 'ConstrainedDecimal', - 'number', - '', - 'JSON Schema Core', - ( - 'If the type has values declared for the constraints, they are included as validations. ' - 'See the mapping for `condecimal` below.' - ), - ), - ( - 'condecimal(gt=1, ge=2, lt=6, le=5, multiple_of=2)', - 'number', - {'maximum': 5, 'exclusiveMaximum': 6, 'minimum': 2, 'exclusiveMinimum': 1, 'multipleOf': 2}, - 'JSON Schema Validation', - 'Any argument not passed to the function (not defined) will not be included in the schema.', - ), - ( - 'BaseModel', - 'object', - '', - 'JSON Schema Core', - 'All the properties defined will be defined with standard JSON Schema, including submodels.', - ), - ( - 'Color', - 'string', - {'format': 'color'}, - 'Pydantic standard "format" extension', - '', - ), -] - -headings = [ - 'Python type', - 'JSON Schema Type', - 'Additional JSON Schema', - 'Defined in', -] - - -def md2html(s: str) -> str: - return re.sub(r'`(.+?)`', r'\1', s) - - -def build_schema_mappings() -> None: - rows = [] - - for py_type, json_type, additional, defined_in, notes in table: - if additional and not isinstance(additional, str): - additional = json.dumps(additional) - cols = [ - f'{py_type}', - f'{json_type}', - f'{additional}' if additional else '', - md2html(defined_in) - ] - rows.append('\n'.join(f' \n {c}\n ' for c in cols)) - if notes: - rows.append( - f' \n' - f' {md2html(notes)}\n' - f' ' - ) - - heading = '\n'.join(f' {h}' for h in headings) - body = '\n\n\n'.join(rows) - text = f"""\ - - - - - -{heading} - - - - -{body} - - -
-""" - (Path(__file__).parent / '..' / '.tmp_schema_mappings.html').write_text(text) - - -if __name__ == '__main__': - build_schema_mappings() diff --git a/docs/changelog.md b/docs/changelog.md deleted file mode 100644 index 5df42d61cd..0000000000 --- a/docs/changelog.md +++ /dev/null @@ -1 +0,0 @@ -{!.changelog.md!} diff --git a/docs/datamodel_code_generator.md b/docs/datamodel_code_generator.md index 90daa8c3c9..410f7fa262 100644 --- a/docs/datamodel_code_generator.md +++ b/docs/datamodel_code_generator.md @@ -70,7 +70,29 @@ person.json: ``` model.py: -{!.tmp_examples/generate_models_person_model.md!} +```py upgrade="skip" requires="3.10" +# generated by datamodel-codegen: +# filename: person.json +# timestamp: 2020-05-19T15:07:31+00:00 +from __future__ import annotations + +from typing import Any + +from pydantic import BaseModel, Field, conint + + +class Pet(BaseModel): + name: str | None = None + age: int | None = None + + +class Person(BaseModel): + first_name: str = Field(..., description="The person's first name.") + last_name: str = Field(..., description="The person's last name.") + age: conint(ge=0) | None = Field(None, description='Age in years.') + pets: list[Pet] | None = None + comment: Any | None = None +``` More information can be found on the [official documentation](https://koxudaxi.github.io/datamodel-code-generator/) diff --git a/docs/examples/.editorconfig b/docs/examples/.editorconfig deleted file mode 100644 index e65b1275e2..0000000000 --- a/docs/examples/.editorconfig +++ /dev/null @@ -1,6 +0,0 @@ -root = true - -[*.py] -indent_style = space -indent_size = 4 -max_line_length = 80 diff --git a/docs/examples/annotated_types_named_tuple.py b/docs/examples/annotated_types_named_tuple.py deleted file mode 100644 index 6fd36cdaaf..0000000000 --- a/docs/examples/annotated_types_named_tuple.py +++ /dev/null @@ -1,20 +0,0 @@ -from typing import NamedTuple - -from pydantic import BaseModel, ValidationError - - -class Point(NamedTuple): - x: int - y: int - - -class Model(BaseModel): - p: Point - - -print(Model(p=('1', '2'))) - -try: - Model(p=('1.3', '2')) -except ValidationError as e: - print(e) diff --git a/docs/examples/annotated_types_typed_dict.py b/docs/examples/annotated_types_typed_dict.py deleted file mode 100644 index bbfbc877b2..0000000000 --- a/docs/examples/annotated_types_typed_dict.py +++ /dev/null @@ -1,45 +0,0 @@ -from typing_extensions import TypedDict - -from pydantic import BaseModel, Extra, ValidationError - - -# `total=False` means keys are non-required -class UserIdentity(TypedDict, total=False): - name: str - surname: str - - -class User(TypedDict): - identity: UserIdentity - age: int - - -class Model(BaseModel): - u: User - - class Config: - extra = Extra.forbid - - -print(Model(u={'identity': {'name': 'Smith', 'surname': 'John'}, 'age': '37'})) - -print(Model(u={'identity': {'name': None, 'surname': 'John'}, 'age': '37'})) - -print(Model(u={'identity': {}, 'age': '37'})) - - -try: - Model(u={'identity': {'name': ['Smith'], 'surname': 'John'}, 'age': '24'}) -except ValidationError as e: - print(e) - -try: - Model( - u={ - 'identity': {'name': 'Smith', 'surname': 'John'}, - 'age': '37', - 'email': 'john.smith@me.com', - } - ) -except ValidationError as e: - print(e) diff --git a/docs/examples/dataclasses_arbitrary_types_allowed.py b/docs/examples/dataclasses_arbitrary_types_allowed.py deleted file mode 100644 index 8a4f5597ab..0000000000 --- a/docs/examples/dataclasses_arbitrary_types_allowed.py +++ /dev/null @@ -1,42 +0,0 @@ -import dataclasses - -import pydantic - - -class ArbitraryType: - def __init__(self, value): - self.value = value - - def __repr__(self): - return f'ArbitraryType(value={self.value!r})' - - -@dataclasses.dataclass -class DC: - a: ArbitraryType - b: str - - -# valid as it is a builtin dataclass without validation -my_dc = DC(a=ArbitraryType(value=3), b='qwe') - -try: - class Model(pydantic.BaseModel): - dc: DC - other: str - - Model(dc=my_dc, other='other') -except RuntimeError as e: # invalid as it is now a pydantic dataclass - print(e) - - -class Model(pydantic.BaseModel): - dc: DC - other: str - - class Config: - arbitrary_types_allowed = True - - -m = Model(dc=my_dc, other='other') -print(repr(m)) diff --git a/docs/examples/dataclasses_config.py b/docs/examples/dataclasses_config.py deleted file mode 100644 index d7c4de52de..0000000000 --- a/docs/examples/dataclasses_config.py +++ /dev/null @@ -1,26 +0,0 @@ -from pydantic import ConfigDict -from pydantic.dataclasses import dataclass - - -# Option 1 - use directly a dict -# Note: `mypy` will still raise typo error -@dataclass(config=dict(validate_assignment=True)) -class MyDataclass1: - a: int - - -# Option 2 - use `ConfigDict` -# (same as before at runtime since it's a `TypedDict` but with intellisense) -@dataclass(config=ConfigDict(validate_assignment=True)) -class MyDataclass2: - a: int - - -# Option 3 - use a `Config` class like for a `BaseModel` -class Config: - validate_assignment = True - - -@dataclass(config=Config) -class MyDataclass3: - a: int diff --git a/docs/examples/dataclasses_default_schema.py b/docs/examples/dataclasses_default_schema.py deleted file mode 100644 index 5643d9ecc2..0000000000 --- a/docs/examples/dataclasses_default_schema.py +++ /dev/null @@ -1,21 +0,0 @@ -import dataclasses -from typing import List, Optional - -from pydantic import Field -from pydantic.dataclasses import dataclass - - -@dataclass -class User: - id: int - name: str = 'John Doe' - friends: List[int] = dataclasses.field(default_factory=lambda: [0]) - age: Optional[int] = dataclasses.field( - default=None, - metadata=dict(title='The age of the user', description='do not lie!') - ) - height: Optional[int] = Field(None, title='The height in cm', ge=50, le=300) - - -user = User(id='42') -print(user.__pydantic_model__.model_json_schema()) diff --git a/docs/examples/dataclasses_initvars.py b/docs/examples/dataclasses_initvars.py deleted file mode 100644 index ead7149933..0000000000 --- a/docs/examples/dataclasses_initvars.py +++ /dev/null @@ -1,23 +0,0 @@ -from dataclasses import InitVar -from pathlib import Path -from typing import Optional - -from pydantic.dataclasses import dataclass - - -@dataclass -class PathData: - path: Path - base_path: InitVar[Optional[Path]] - - def __post_init__(self, base_path): - print(f'Received path={self.path!r}, base_path={base_path!r}') - - def __post_init_post_parse__(self, base_path): - if base_path is not None: - self.path = base_path / self.path - - -path_data = PathData('world', base_path='/hello') -# Received path='world', base_path='/hello' -assert path_data.path == Path('/hello/world') diff --git a/docs/examples/dataclasses_json_dumps.py b/docs/examples/dataclasses_json_dumps.py deleted file mode 100644 index 80266b9268..0000000000 --- a/docs/examples/dataclasses_json_dumps.py +++ /dev/null @@ -1,17 +0,0 @@ -import dataclasses -import json -from typing import List - -from pydantic.dataclasses import dataclass -from pydantic.json import pydantic_encoder - - -@dataclass -class User: - id: int - name: str = 'John Doe' - friends: List[int] = dataclasses.field(default_factory=lambda: [0]) - - -user = User(id='42') -print(json.dumps(user, indent=4, default=pydantic_encoder)) diff --git a/docs/examples/dataclasses_main.py b/docs/examples/dataclasses_main.py deleted file mode 100644 index 415bb4ff41..0000000000 --- a/docs/examples/dataclasses_main.py +++ /dev/null @@ -1,13 +0,0 @@ -from datetime import datetime -from pydantic.dataclasses import dataclass - - -@dataclass -class User: - id: int - name: str = 'John Doe' - signup_ts: datetime = None - - -user = User(id='42', signup_ts='2032-06-21T12:00') -print(user) diff --git a/docs/examples/dataclasses_nested.py b/docs/examples/dataclasses_nested.py deleted file mode 100644 index f3e73c3c10..0000000000 --- a/docs/examples/dataclasses_nested.py +++ /dev/null @@ -1,16 +0,0 @@ -from pydantic import AnyUrl -from pydantic.dataclasses import dataclass - - -@dataclass -class NavbarButton: - href: AnyUrl - - -@dataclass -class Navbar: - button: NavbarButton - - -navbar = Navbar(button=('https://example.com',)) -print(navbar) diff --git a/docs/examples/dataclasses_post_init_post_parse.py b/docs/examples/dataclasses_post_init_post_parse.py deleted file mode 100644 index b5ae4fd154..0000000000 --- a/docs/examples/dataclasses_post_init_post_parse.py +++ /dev/null @@ -1,22 +0,0 @@ -from pydantic.dataclasses import dataclass - - -@dataclass -class Birth: - year: int - month: int - day: int - - -@dataclass -class User: - birth: Birth - - def __post_init__(self): - print(self.birth) - - def __post_init_post_parse__(self): - print(self.birth) - - -user = User(**{'birth': {'year': 1995, 'month': 3, 'day': 2}}) diff --git a/docs/examples/dataclasses_stdlib_inheritance.py b/docs/examples/dataclasses_stdlib_inheritance.py deleted file mode 100644 index 0409b46963..0000000000 --- a/docs/examples/dataclasses_stdlib_inheritance.py +++ /dev/null @@ -1,27 +0,0 @@ -import dataclasses - -import pydantic - - -@dataclasses.dataclass -class Z: - z: int - - -@dataclasses.dataclass -class Y(Z): - y: int = 0 - - -@pydantic.dataclasses.dataclass -class X(Y): - x: int = 0 - - -foo = X(x=b'1', y='2', z='3') -print(foo) - -try: - X(z='pika') -except pydantic.ValidationError as e: - print(e) diff --git a/docs/examples/dataclasses_stdlib_run_validation.py b/docs/examples/dataclasses_stdlib_run_validation.py deleted file mode 100644 index 6d13c0af32..0000000000 --- a/docs/examples/dataclasses_stdlib_run_validation.py +++ /dev/null @@ -1,30 +0,0 @@ -import dataclasses - -from pydantic import ValidationError -from pydantic.dataclasses import dataclass as pydantic_dataclass, set_validation - - -@dataclasses.dataclass -class User: - id: int - name: str - - -# Enhance stdlib dataclass -pydantic_dataclass(User) - - -user1 = User(id='whatever', name='I want') - -# validate data of `user1` -try: - user1.__pydantic_validate_values__() -except ValidationError as e: - print(e) - -# Enforce validation -try: - with set_validation(User, True): - User(id='whatever', name='I want') -except ValidationError as e: - print(e) diff --git a/docs/examples/dataclasses_stdlib_to_pydantic.py b/docs/examples/dataclasses_stdlib_to_pydantic.py deleted file mode 100644 index a3a04f62ce..0000000000 --- a/docs/examples/dataclasses_stdlib_to_pydantic.py +++ /dev/null @@ -1,47 +0,0 @@ -import dataclasses -from datetime import datetime -from typing import Optional - -import pydantic - - -@dataclasses.dataclass -class Meta: - modified_date: Optional[datetime] - seen_count: int - - -@dataclasses.dataclass -class File(Meta): - filename: str - - -# `ValidatedFile` will be a proxy around `File` -ValidatedFile = pydantic.dataclasses.dataclass(File) - -# the original dataclass is the `__dataclass__` attribute -assert ValidatedFile.__dataclass__ is File - - -validated_file = ValidatedFile( - filename=b'thefilename', - modified_date='2020-01-01T00:00', - seen_count='7', -) -print(validated_file) - -try: - ValidatedFile( - filename=['not', 'a', 'string'], - modified_date=None, - seen_count=3, - ) -except pydantic.ValidationError as e: - print(e) - -# `File` is not altered and still does no validation by default -print(File( - filename=['not', 'a', 'string'], - modified_date=None, - seen_count=3, -)) diff --git a/docs/examples/dataclasses_stdlib_with_basemodel.py b/docs/examples/dataclasses_stdlib_with_basemodel.py deleted file mode 100644 index 92b494891a..0000000000 --- a/docs/examples/dataclasses_stdlib_with_basemodel.py +++ /dev/null @@ -1,39 +0,0 @@ -import dataclasses -from datetime import datetime -from typing import Optional - -from pydantic import BaseModel, ValidationError - - -@dataclasses.dataclass(frozen=True) -class User: - name: str - - -@dataclasses.dataclass -class File: - filename: str - last_modification_time: Optional[datetime] = None - - -class Foo(BaseModel): - file: File - user: Optional[User] = None - - -file = File( - filename=['not', 'a', 'string'], - last_modification_time='2020-01-01T00:00', -) # nothing is validated as expected -print(file) - -try: - Foo(file=file) -except ValidationError as e: - print(e) - -foo = Foo(file=File(filename='myfile'), user=User(name='pika')) -try: - foo.user.name = 'bulbi' -except dataclasses.FrozenInstanceError as e: - print(e) diff --git a/docs/examples/devtools_main.py b/docs/examples/devtools_main.py deleted file mode 100644 index f0d7e6bc38..0000000000 --- a/docs/examples/devtools_main.py +++ /dev/null @@ -1,33 +0,0 @@ -# no-print-intercept -from datetime import datetime -from typing import List -from pydantic import BaseModel - -from devtools import debug - - -class Address(BaseModel): - street: str - country: str - lat: float - lng: float - - -class User(BaseModel): - id: int - name: str - signup_ts: datetime - friends: List[int] - address: Address - - -user = User( - id='123', - name='John Doe', - signup_ts='2019-06-01 12:22', - friends=[1234, 4567, 7890], - address=dict(street='Testing', country='uk', lat=51.5, lng=0), -) -debug(user) -print('\nshould be much easier read than:\n') -print('user:', user) diff --git a/docs/examples/exporting_models_copy.py b/docs/examples/exporting_models_copy.py deleted file mode 100644 index d6d681409a..0000000000 --- a/docs/examples/exporting_models_copy.py +++ /dev/null @@ -1,22 +0,0 @@ -from pydantic import BaseModel - - -class BarModel(BaseModel): - whatever: int - - -class FooBarModel(BaseModel): - banana: float - foo: str - bar: BarModel - - -m = FooBarModel(banana=3.14, foo='hello', bar={'whatever': 123}) - -print(m.copy(include={'foo', 'bar'})) -print(m.copy(exclude={'foo', 'bar'})) -print(m.copy(update={'banana': 0})) -print(id(m.bar), id(m.copy().bar)) -# normal copy gives the same object reference for `bar` -print(id(m.bar), id(m.copy(deep=True).bar)) -# deep copy gives a new object reference for `bar` diff --git a/docs/examples/exporting_models_dict.py b/docs/examples/exporting_models_dict.py deleted file mode 100644 index babfb16664..0000000000 --- a/docs/examples/exporting_models_dict.py +++ /dev/null @@ -1,19 +0,0 @@ -from pydantic import BaseModel - - -class BarModel(BaseModel): - whatever: int - - -class FooBarModel(BaseModel): - banana: float - foo: str - bar: BarModel - - -m = FooBarModel(banana=3.14, foo='hello', bar={'whatever': 123}) - -# returns a dictionary: -print(m.model_dump()) -print(m.model_dump(include={'foo', 'bar'})) -print(m.model_dump(exclude={'foo', 'bar'})) diff --git a/docs/examples/exporting_models_exclude1.py b/docs/examples/exporting_models_exclude1.py deleted file mode 100644 index 2490a620d1..0000000000 --- a/docs/examples/exporting_models_exclude1.py +++ /dev/null @@ -1,32 +0,0 @@ -from pydantic import BaseModel, SecretStr - - -class User(BaseModel): - id: int - username: str - password: SecretStr - - -class Transaction(BaseModel): - id: str - user: User - value: int - - -t = Transaction( - id='1234567890', - user=User( - id=42, - username='JohnDoe', - password='hashedpassword' - ), - value=9876543210, -) - -# using a set: -print(t.model_dump(exclude={'user', 'value'})) - -# using a dict: -print(t.model_dump(exclude={'user': {'username', 'password'}, 'value': True})) - -print(t.model_dump(include={'id': True, 'user': {'id'}})) diff --git a/docs/examples/exporting_models_exclude2.py b/docs/examples/exporting_models_exclude2.py deleted file mode 100644 index 30c3fd0bb7..0000000000 --- a/docs/examples/exporting_models_exclude2.py +++ /dev/null @@ -1,73 +0,0 @@ -import datetime -from typing import List - -from pydantic import BaseModel, SecretStr - - -class Country(BaseModel): - name: str - phone_code: int - - -class Address(BaseModel): - post_code: int - country: Country - - -class CardDetails(BaseModel): - number: SecretStr - expires: datetime.date - - -class Hobby(BaseModel): - name: str - info: str - - -class User(BaseModel): - first_name: str - second_name: str - address: Address - card_details: CardDetails - hobbies: List[Hobby] - - -user = User( - first_name='John', - second_name='Doe', - address=Address( - post_code=123456, - country=Country( - name='USA', - phone_code=1 - ) - ), - card_details=CardDetails( - number=4212934504460000, - expires=datetime.date(2020, 5, 1) - ), - hobbies=[ - Hobby(name='Programming', info='Writing code and stuff'), - Hobby(name='Gaming', info='Hell Yeah!!!'), - ], -) - -exclude_keys = { - 'second_name': True, - 'address': {'post_code': True, 'country': {'phone_code'}}, - 'card_details': True, - # You can exclude fields from specific members of a tuple/list by index: - 'hobbies': {-1: {'info'}}, -} - -include_keys = { - 'first_name': True, - 'address': {'country': {'name'}}, - 'hobbies': {0: True, -1: {'name'}}, -} - -# would be the same as user.model_dump(exclude=exclude_keys) in this case: -print(user.model_dump(include=include_keys)) - -# To exclude a field from all members of a nested list or tuple, use "__all__": -print(user.model_dump(exclude={'hobbies': {'__all__': {'info'}}})) diff --git a/docs/examples/exporting_models_exclude3.py b/docs/examples/exporting_models_exclude3.py deleted file mode 100644 index 414ee6960a..0000000000 --- a/docs/examples/exporting_models_exclude3.py +++ /dev/null @@ -1,29 +0,0 @@ -from pydantic import BaseModel, Field, SecretStr - - -class User(BaseModel): - id: int - username: str - password: SecretStr = Field(..., exclude=True) - - -class Transaction(BaseModel): - id: str - user: User = Field(..., exclude={'username'}) - value: int - - class Config: - fields = {'value': {'exclude': True}} - - -t = Transaction( - id='1234567890', - user=User( - id=42, - username='JohnDoe', - password='hashedpassword' - ), - value=9876543210, -) - -print(t.model_dump()) diff --git a/docs/examples/exporting_models_exclude4.py b/docs/examples/exporting_models_exclude4.py deleted file mode 100644 index 84d6c61de1..0000000000 --- a/docs/examples/exporting_models_exclude4.py +++ /dev/null @@ -1,26 +0,0 @@ -from pydantic import BaseModel, Field, SecretStr - - -class User(BaseModel): - id: int - username: str # overridden by explicit exclude - password: SecretStr = Field(exclude=True) - - -class Transaction(BaseModel): - id: str - user: User - value: int - - -t = Transaction( - id='1234567890', - user=User( - id=42, - username='JohnDoe', - password='hashedpassword' - ), - value=9876543210, -) - -print(t.model_dump(exclude={'value': True, 'user': {'username'}})) diff --git a/docs/examples/exporting_models_exclude5.py b/docs/examples/exporting_models_exclude5.py deleted file mode 100644 index c7153ec064..0000000000 --- a/docs/examples/exporting_models_exclude5.py +++ /dev/null @@ -1,26 +0,0 @@ -from pydantic import BaseModel, Field, SecretStr - - -class User(BaseModel): - id: int = Field(..., include=True) - username: str = Field(..., include=True) # overridden by explicit include - password: SecretStr - - -class Transaction(BaseModel): - id: str - user: User - value: int - - -t = Transaction( - id='1234567890', - user=User( - id=42, - username='JohnDoe', - password='hashedpassword' - ), - value=9876543210, -) - -print(t.model_dump(include={'id': True, 'user': {'id'}})) diff --git a/docs/examples/exporting_models_iterate.py b/docs/examples/exporting_models_iterate.py deleted file mode 100644 index 6236a9bf70..0000000000 --- a/docs/examples/exporting_models_iterate.py +++ /dev/null @@ -1,18 +0,0 @@ -from pydantic import BaseModel - - -class BarModel(BaseModel): - whatever: int - - -class FooBarModel(BaseModel): - banana: float - foo: str - bar: BarModel - - -m = FooBarModel(banana=3.14, foo='hello', bar={'whatever': 123}) - -print(dict(m)) -for name, value in m: - print(f'{name}: {value}') diff --git a/docs/examples/exporting_models_json.py b/docs/examples/exporting_models_json.py deleted file mode 100644 index 163256bb22..0000000000 --- a/docs/examples/exporting_models_json.py +++ /dev/null @@ -1,15 +0,0 @@ -from datetime import datetime -from pydantic import BaseModel - - -class BarModel(BaseModel): - whatever: int - - -class FooBarModel(BaseModel): - foo: datetime - bar: BarModel - - -m = FooBarModel(foo=datetime(2032, 6, 1, 12, 13, 14), bar={'whatever': 123}) -print(m.model_dump_json()) diff --git a/docs/examples/exporting_models_json_encoders.py b/docs/examples/exporting_models_json_encoders.py deleted file mode 100644 index b7eb50cf3b..0000000000 --- a/docs/examples/exporting_models_json_encoders.py +++ /dev/null @@ -1,18 +0,0 @@ -from datetime import datetime, timedelta -from pydantic import BaseModel -from pydantic.json import timedelta_isoformat - - -class WithCustomEncoders(BaseModel): - dt: datetime - diff: timedelta - - class Config: - json_encoders = { - datetime: lambda v: v.timestamp(), - timedelta: timedelta_isoformat, - } - - -m = WithCustomEncoders(dt=datetime(2032, 6, 1), diff=timedelta(hours=100)) -print(m.model_dump_json()) diff --git a/docs/examples/exporting_models_json_encoders_merge.py b/docs/examples/exporting_models_json_encoders_merge.py deleted file mode 100644 index 88852404f6..0000000000 --- a/docs/examples/exporting_models_json_encoders_merge.py +++ /dev/null @@ -1,24 +0,0 @@ -from datetime import datetime, timedelta -from pydantic import BaseModel -from pydantic.json import timedelta_isoformat - - -class BaseClassWithEncoders(BaseModel): - dt: datetime - diff: timedelta - - class Config: - json_encoders = { - datetime: lambda v: v.timestamp() - } - - -class ChildClassWithEncoders(BaseClassWithEncoders): - class Config: - json_encoders = { - timedelta: timedelta_isoformat - } - - -m = ChildClassWithEncoders(dt=datetime(2032, 6, 1), diff=timedelta(hours=100)) -print(m.model_dump_json()) diff --git a/docs/examples/exporting_models_json_forward_ref.py b/docs/examples/exporting_models_json_forward_ref.py deleted file mode 100644 index d413324d2f..0000000000 --- a/docs/examples/exporting_models_json_forward_ref.py +++ /dev/null @@ -1,34 +0,0 @@ -from typing import List, Optional - -from pydantic import BaseModel - - -class Address(BaseModel): - city: str - country: str - - -class User(BaseModel): - name: str - address: Address - friends: Optional[List['User']] = None - - class Config: - json_encoders = { - Address: lambda a: f'{a.city} ({a.country})', - 'User': lambda u: f'{u.name} in {u.address.city} ' - f'({u.address.country[:2].upper()})', - } - - -User.update_forward_refs() - -wolfgang = User( - name='Wolfgang', - address=Address(city='Berlin', country='Deutschland'), - friends=[ - User(name='Pierre', address=Address(city='Paris', country='France')), - User(name='John', address=Address(city='London', country='UK')), - ], -) -print(wolfgang.model_dump_json(models_as_dict=False)) diff --git a/docs/examples/exporting_models_json_subclass.py b/docs/examples/exporting_models_json_subclass.py deleted file mode 100644 index e580bbe0d2..0000000000 --- a/docs/examples/exporting_models_json_subclass.py +++ /dev/null @@ -1,27 +0,0 @@ -from datetime import date, timedelta -from pydantic import BaseModel -from pydantic.validators import int_validator - - -class DayThisYear(date): - """ - Contrived example of a special type of date that - takes an int and interprets it as a day in the current year - """ - - @classmethod - def __get_validators__(cls): - yield int_validator - yield cls.validate - - @classmethod - def validate(cls, v: int): - return date.today().replace(month=1, day=1) + timedelta(days=v) - - -class FooModel(BaseModel): - date: DayThisYear - - -m = FooModel(date=300) -print(m.model_dump_json()) diff --git a/docs/examples/exporting_models_orjson.py b/docs/examples/exporting_models_orjson.py deleted file mode 100644 index 262a540f45..0000000000 --- a/docs/examples/exporting_models_orjson.py +++ /dev/null @@ -1,22 +0,0 @@ -from datetime import datetime -import orjson -from pydantic import BaseModel - - -def orjson_dumps(v, *, default): - # orjson.dumps returns bytes, to match standard json.dumps we need to decode - return orjson.dumps(v, default=default).decode() - - -class User(BaseModel): - id: int - name = 'John Doe' - signup_ts: datetime = None - - class Config: - json_loads = orjson.loads - json_dumps = orjson_dumps - - -user = User.parse_raw('{"id":123,"signup_ts":1234567890,"name":"John Doe"}') -print(user.model_dump_json()) diff --git a/docs/examples/exporting_models_pickle.py b/docs/examples/exporting_models_pickle.py deleted file mode 100644 index 72d4c637ed..0000000000 --- a/docs/examples/exporting_models_pickle.py +++ /dev/null @@ -1,15 +0,0 @@ -import pickle -from pydantic import BaseModel - - -class FooBarModel(BaseModel): - a: str - b: int - - -m = FooBarModel(a='hello', b=123) -print(m) -data = pickle.dumps(m) -print(data) -m2 = pickle.loads(data) -print(m2) diff --git a/docs/examples/exporting_models_ujson.py b/docs/examples/exporting_models_ujson.py deleted file mode 100644 index 547295bcef..0000000000 --- a/docs/examples/exporting_models_ujson.py +++ /dev/null @@ -1,16 +0,0 @@ -from datetime import datetime -import ujson -from pydantic import BaseModel - - -class User(BaseModel): - id: int - name = 'John Doe' - signup_ts: datetime = None - - class Config: - json_loads = ujson.loads - - -user = User.parse_raw('{"id": 123,"signup_ts":1234567890,"name":"John Doe"}') -print(user) diff --git a/docs/examples/generate_models_person_model.py b/docs/examples/generate_models_person_model.py deleted file mode 100644 index a1961ba243..0000000000 --- a/docs/examples/generate_models_person_model.py +++ /dev/null @@ -1,20 +0,0 @@ -# dont-upgrade -# generated by datamodel-codegen: -# filename: person.json -# timestamp: 2020-05-19T15:07:31+00:00 -from __future__ import annotations -from typing import Any, List, Optional -from pydantic import BaseModel, Field, conint - - -class Pet(BaseModel): - name: Optional[str] = None - age: Optional[int] = None - - -class Person(BaseModel): - first_name: str = Field(..., description="The person's first name.") - last_name: str = Field(..., description="The person's last name.") - age: Optional[conint(ge=0)] = Field(None, description='Age in years.') - pets: Optional[List[Pet]] = None - comment: Optional[Any] = None diff --git a/docs/examples/hypothesis_property_based_test.py b/docs/examples/hypothesis_property_based_test.py deleted file mode 100644 index 00eead5bc9..0000000000 --- a/docs/examples/hypothesis_property_based_test.py +++ /dev/null @@ -1,24 +0,0 @@ -import typing -from hypothesis import given, strategies as st -from pydantic import BaseModel, EmailStr, PaymentCardNumber, PositiveFloat - - -class Model(BaseModel): - card: PaymentCardNumber - price: PositiveFloat - users: typing.List[EmailStr] - - -@given(st.builds(Model)) -def test_property(instance): - # Hypothesis calls this test function many times with varied Models, - # so you can write a test that should pass given *any* instance. - assert 0 < instance.price - assert all('@' in email for email in instance.users) - - -@given(st.builds(Model, price=st.floats(100, 200))) -def test_with_discount(instance): - # This test shows how you can override specific fields, - # and let Hypothesis fill in any you don't care about. - assert 100 <= instance.price <= 200 diff --git a/docs/examples/index_error.py b/docs/examples/index_error.py deleted file mode 100644 index 16887d04bf..0000000000 --- a/docs/examples/index_error.py +++ /dev/null @@ -1,11 +0,0 @@ -# output-json -from index_main import User - -# ignore-above -from pydantic import ValidationError - -try: - User(signup_ts='broken', friends=[1, 2, 'not number']) -except ValidationError as e: - print(e.json()) -# requires: User from previous example diff --git a/docs/examples/index_main.py b/docs/examples/index_main.py deleted file mode 100644 index a41526705b..0000000000 --- a/docs/examples/index_main.py +++ /dev/null @@ -1,22 +0,0 @@ -from datetime import datetime -from typing import List, Optional -from pydantic import BaseModel - - -class User(BaseModel): - id: int - name = 'John Doe' - signup_ts: Optional[datetime] = None - friends: List[int] = [] - - -external_data = { - 'id': '123', - 'signup_ts': '2019-06-01 12:22', - 'friends': [1, 2, '3'], -} -user = User(**external_data) -print(user.id) -print(repr(user.signup_ts)) -print(user.friends) -print(user.model_dump()) diff --git a/docs/examples/model_config_alias_generator.py b/docs/examples/model_config_alias_generator.py deleted file mode 100644 index f8d5c3d13c..0000000000 --- a/docs/examples/model_config_alias_generator.py +++ /dev/null @@ -1,18 +0,0 @@ -from pydantic import BaseModel - - -def to_camel(string: str) -> str: - return ''.join(word.capitalize() for word in string.split('_')) - - -class Voice(BaseModel): - name: str - language_code: str - - class Config: - alias_generator = to_camel - - -voice = Voice(Name='Filiz', LanguageCode='tr-TR') -print(voice.language_code) -print(voice.model_dump(by_alias=True)) diff --git a/docs/examples/model_config_alias_precedence.py b/docs/examples/model_config_alias_precedence.py deleted file mode 100644 index d223f7129b..0000000000 --- a/docs/examples/model_config_alias_precedence.py +++ /dev/null @@ -1,22 +0,0 @@ -from pydantic import BaseModel, Field - - -class Voice(BaseModel): - name: str = Field(None, alias='ActorName') - language_code: str = None - mood: str = None - - -class Character(Voice): - act: int = 1 - - class Config: - fields = {'language_code': 'lang'} - - @classmethod - def alias_generator(cls, string: str) -> str: - # this is the same as `alias_generator = to_camel` above - return ''.join(word.capitalize() for word in string.split('_')) - - -print(Character.model_json_schema(by_alias=True)) diff --git a/docs/examples/model_config_change_globally_custom.py b/docs/examples/model_config_change_globally_custom.py deleted file mode 100644 index 5cf1011e99..0000000000 --- a/docs/examples/model_config_change_globally_custom.py +++ /dev/null @@ -1,14 +0,0 @@ -from pydantic import BaseModel as PydanticBaseModel - - -class BaseModel(PydanticBaseModel): - class Config: - arbitrary_types_allowed = True - - -class MyClass: - """A random class""" - - -class Model(BaseModel): - x: MyClass diff --git a/docs/examples/model_config_class_kwargs.py b/docs/examples/model_config_class_kwargs.py deleted file mode 100644 index 3a1841c57b..0000000000 --- a/docs/examples/model_config_class_kwargs.py +++ /dev/null @@ -1,11 +0,0 @@ -from pydantic import BaseModel, ValidationError, Extra - - -class Model(BaseModel, extra=Extra.forbid): - a: str - - -try: - Model(a='spam', b='oh no') -except ValidationError as e: - print(e) diff --git a/docs/examples/model_config_dataclass.py b/docs/examples/model_config_dataclass.py deleted file mode 100644 index bb6bd2e561..0000000000 --- a/docs/examples/model_config_dataclass.py +++ /dev/null @@ -1,23 +0,0 @@ -from datetime import datetime - -from pydantic import ValidationError -from pydantic.dataclasses import dataclass - - -class MyConfig: - str_max_length = 10 - validate_assignment = True - - -@dataclass(config=MyConfig) -class User: - id: int - name: str = 'John Doe' - signup_ts: datetime = None - - -user = User(id='42', signup_ts='2032-06-21T12:00') -try: - user.name = 'x' * 20 -except ValidationError as e: - print(e) diff --git a/docs/examples/model_config_main.py b/docs/examples/model_config_main.py deleted file mode 100644 index ff03cdeb4a..0000000000 --- a/docs/examples/model_config_main.py +++ /dev/null @@ -1,12 +0,0 @@ -from pydantic import ConfigDict, BaseModel, ValidationError - - -class Model(BaseModel): - model_config = ConfigDict(str_max_length=10) - v: str - - -try: - m = Model(v='x' * 20) -except ValidationError as e: - print(e) diff --git a/docs/examples/model_config_smart_union_off.py b/docs/examples/model_config_smart_union_off.py deleted file mode 100644 index f31ead0f2e..0000000000 --- a/docs/examples/model_config_smart_union_off.py +++ /dev/null @@ -1,19 +0,0 @@ -from typing import Union - -from pydantic import BaseModel - - -class Foo(BaseModel): - pass - - -class Bar(BaseModel): - pass - - -class Model(BaseModel): - x: Union[str, int] - y: Union[Foo, Bar] - - -print(Model(x=1, y=Bar())) diff --git a/docs/examples/model_config_smart_union_on.py b/docs/examples/model_config_smart_union_on.py deleted file mode 100644 index 91f7ac32ce..0000000000 --- a/docs/examples/model_config_smart_union_on.py +++ /dev/null @@ -1,22 +0,0 @@ -from typing import Union - -from pydantic import BaseModel - - -class Foo(BaseModel): - pass - - -class Bar(BaseModel): - pass - - -class Model(BaseModel): - x: Union[str, int] - y: Union[Foo, Bar] - - class Config: - smart_union = True - - -print(Model(x=1, y=Bar())) diff --git a/docs/examples/model_config_smart_union_on_edge_case.py b/docs/examples/model_config_smart_union_on_edge_case.py deleted file mode 100644 index 1a2c6c2e39..0000000000 --- a/docs/examples/model_config_smart_union_on_edge_case.py +++ /dev/null @@ -1,14 +0,0 @@ -from typing import List, Union - -from pydantic import BaseModel - - -class Model(BaseModel, smart_union=True): - x: Union[List[str], List[int]] - - -# Expected coercion -print(Model(x=[1, '2'])) - -# Unexpected coercion -print(Model(x=[1, 2])) diff --git a/docs/examples/models_abc.py b/docs/examples/models_abc.py deleted file mode 100644 index f32519931f..0000000000 --- a/docs/examples/models_abc.py +++ /dev/null @@ -1,11 +0,0 @@ -import abc -from pydantic import BaseModel - - -class FooBarModel(BaseModel, abc.ABC): - a: str - b: int - - @abc.abstractmethod - def my_abstract_method(self): - pass diff --git a/docs/examples/models_construct.py b/docs/examples/models_construct.py deleted file mode 100644 index 3586ceae27..0000000000 --- a/docs/examples/models_construct.py +++ /dev/null @@ -1,29 +0,0 @@ -from pydantic import BaseModel - - -class User(BaseModel): - id: int - age: int - name: str = 'John Doe' - - -original_user = User(id=123, age=32) - -user_data = original_user.model_dump() -print(user_data) -fields_set = original_user.__fields_set__ -print(fields_set) - -# ... -# pass user_data and fields_set to RPC or save to the database etc. -# ... - -# you can then create a new instance of User without -# re-running validation which would be unnecessary at this point: -new_user = User.model_construct(_fields_set=fields_set, **user_data) -print(repr(new_user)) -print(new_user.__fields_set__) - -# construct can be dangerous, only use it with validated data!: -bad_user = User.model_construct(id='dog') -print(repr(bad_user)) diff --git a/docs/examples/models_custom_root_access.py b/docs/examples/models_custom_root_access.py deleted file mode 100644 index 569422f7b6..0000000000 --- a/docs/examples/models_custom_root_access.py +++ /dev/null @@ -1,17 +0,0 @@ -from typing import List -from pydantic import BaseModel - - -class Pets(BaseModel): - __root__: List[str] - - def __iter__(self): - return iter(self.__root__) - - def __getitem__(self, item): - return self.__root__[item] - - -pets = Pets.model_validate(['dog', 'cat']) -print(pets[0]) -print([pet for pet in pets]) diff --git a/docs/examples/models_custom_root_field.py b/docs/examples/models_custom_root_field.py deleted file mode 100644 index cafee32062..0000000000 --- a/docs/examples/models_custom_root_field.py +++ /dev/null @@ -1,16 +0,0 @@ -from typing import List -import json -from pydantic import BaseModel -from pydantic.json_schema import schema - - -class Pets(BaseModel): - __root__: List[str] - - -print(Pets(__root__=['dog', 'cat'])) -print(Pets(__root__=['dog', 'cat']).model_dump_json()) -print(Pets.model_validate(['dog', 'cat'])) -print(Pets.model_json_schema()) -pets_schema = schema([Pets]) -print(json.dumps(pets_schema, indent=2)) diff --git a/docs/examples/models_custom_root_field_parse_obj.py b/docs/examples/models_custom_root_field_parse_obj.py deleted file mode 100644 index 5bd6610018..0000000000 --- a/docs/examples/models_custom_root_field_parse_obj.py +++ /dev/null @@ -1,21 +0,0 @@ -from typing import List, Dict -from pydantic import BaseModel, ValidationError - - -class Pets(BaseModel): - __root__: List[str] - - -print(Pets.model_validate(['dog', 'cat'])) -print(Pets.model_validate({'__root__': ['dog', 'cat']})) # not recommended - - -class PetsByName(BaseModel): - __root__: Dict[str, str] - - -print(PetsByName.model_validate({'Otis': 'dog', 'Milo': 'cat'})) -try: - PetsByName.model_validate({'__root__': {'Otis': 'dog', 'Milo': 'cat'}}) -except ValidationError as e: - print(e) diff --git a/docs/examples/models_data_conversion.py b/docs/examples/models_data_conversion.py deleted file mode 100644 index 27425645ee..0000000000 --- a/docs/examples/models_data_conversion.py +++ /dev/null @@ -1,10 +0,0 @@ -from pydantic import BaseModel - - -class Model(BaseModel): - a: int - b: float - c: str - - -print(Model(a=3.1415, b=' 2.72 ', c=123).model_dump()) diff --git a/docs/examples/models_default_factory.py b/docs/examples/models_default_factory.py deleted file mode 100644 index 44b247daa5..0000000000 --- a/docs/examples/models_default_factory.py +++ /dev/null @@ -1,14 +0,0 @@ -from datetime import datetime -from uuid import UUID, uuid4 -from pydantic import BaseModel, Field - - -class Model(BaseModel): - uid: UUID = Field(default_factory=uuid4) - updated: datetime = Field(default_factory=datetime.utcnow) - - -m1 = Model() -m2 = Model() -print(f'{m1.uid} != {m2.uid}') -print(f'{m1.updated} != {m2.updated}') diff --git a/docs/examples/models_dynamic_creation.py b/docs/examples/models_dynamic_creation.py deleted file mode 100644 index 7e5d5c3c5f..0000000000 --- a/docs/examples/models_dynamic_creation.py +++ /dev/null @@ -1,8 +0,0 @@ -from pydantic import BaseModel, create_model - -DynamicFoobarModel = create_model('DynamicFoobarModel', foo=(str, ...), bar=123) - - -class StaticFoobarModel(BaseModel): - foo: str - bar: int = 123 diff --git a/docs/examples/models_dynamic_inheritance.py b/docs/examples/models_dynamic_inheritance.py deleted file mode 100644 index 0d3639301c..0000000000 --- a/docs/examples/models_dynamic_inheritance.py +++ /dev/null @@ -1,16 +0,0 @@ -from pydantic import BaseModel, create_model - - -class FooModel(BaseModel): - foo: str - bar: int = 123 - - -BarModel = create_model( - 'BarModel', - apple='russet', - banana='yellow', - __base__=FooModel, -) -print(BarModel) -print(BarModel.model_fields.keys()) diff --git a/docs/examples/models_dynamic_validators.py b/docs/examples/models_dynamic_validators.py deleted file mode 100644 index 132624cab5..0000000000 --- a/docs/examples/models_dynamic_validators.py +++ /dev/null @@ -1,26 +0,0 @@ -from pydantic import create_model, ValidationError, validator - - -def username_alphanumeric(cls, v): - assert v.isalnum(), 'must be alphanumeric' - return v - - -validators = { - 'username_validator': - validator('username')(username_alphanumeric) -} - -UserModel = create_model( - 'UserModel', - username=(str, ...), - __validators__=validators -) - -user = UserModel(username='scolvin') -print(user) - -try: - UserModel(username='scolvi%n') -except ValidationError as e: - print(e) diff --git a/docs/examples/models_errors1.py b/docs/examples/models_errors1.py deleted file mode 100644 index ea9ebc990d..0000000000 --- a/docs/examples/models_errors1.py +++ /dev/null @@ -1,33 +0,0 @@ -from typing import List -from pydantic import BaseModel, ValidationError, conint - - -class Location(BaseModel): - lat = 0.1 - lng = 10.1 - - -class Model(BaseModel): - is_required: float - gt_int: conint(gt=42) - list_of_ints: List[int] = None - a_float: float = None - recursive_model: Location = None - - -data = dict( - list_of_ints=['1', 2, 'bad'], - a_float='not a float', - recursive_model={'lat': 4.2, 'lng': 'New York'}, - gt_int=21, -) - -try: - Model(**data) -except ValidationError as e: - print(e) - -try: - Model(**data) -except ValidationError as e: - print(e.json()) diff --git a/docs/examples/models_errors2.py b/docs/examples/models_errors2.py deleted file mode 100644 index 7aa1b15690..0000000000 --- a/docs/examples/models_errors2.py +++ /dev/null @@ -1,18 +0,0 @@ -from pydantic import BaseModel, ValidationError, validator - - -class Model(BaseModel): - foo: str - - @validator('foo') - def value_must_equal_bar(cls, v): - if v != 'bar': - raise ValueError('value must be "bar"') - - return v - - -try: - Model(foo='ber') -except ValidationError as e: - print(e.errors()) diff --git a/docs/examples/models_errors3.py b/docs/examples/models_errors3.py deleted file mode 100644 index 32a34d65ec..0000000000 --- a/docs/examples/models_errors3.py +++ /dev/null @@ -1,22 +0,0 @@ -from pydantic import BaseModel, PydanticValueError, ValidationError, validator - - -class NotABarError(PydanticValueError): - code = 'not_a_bar' - msg_template = 'value is not "bar", got "{wrong_value}"' - - -class Model(BaseModel): - foo: str - - @validator('foo') - def value_must_equal_bar(cls, v): - if v != 'bar': - raise NotABarError(wrong_value=v) - return v - - -try: - Model(foo='ber') -except ValidationError as e: - print(e.json()) diff --git a/docs/examples/models_field_order.py b/docs/examples/models_field_order.py deleted file mode 100644 index 083d1ec5d4..0000000000 --- a/docs/examples/models_field_order.py +++ /dev/null @@ -1,20 +0,0 @@ -from pydantic import BaseModel, ValidationError - - -class Model(BaseModel): - a: int - b = 2 - c: int = 1 - d = 0 - e: float - - -print(Model.model_fields.keys()) -m = Model(e=2, a=1) -print(m.model_dump()) -try: - Model(a='x', b='x', c='x', d='x', e='x') -except ValidationError as err: - error_locations = [e['loc'] for e in err.errors()] - -print(error_locations) diff --git a/docs/examples/models_from_typeddict.py b/docs/examples/models_from_typeddict.py deleted file mode 100644 index 2454db70fb..0000000000 --- a/docs/examples/models_from_typeddict.py +++ /dev/null @@ -1,21 +0,0 @@ -from typing_extensions import TypedDict - -from pydantic import ValidationError, create_model_from_typeddict - - -class User(TypedDict): - name: str - id: int - - -class Config: - extra = 'forbid' - - -UserM = create_model_from_typeddict(User, __config__=Config) -print(repr(UserM(name=123, id='3'))) - -try: - UserM(name=123, id='3', other='no') -except ValidationError as e: - print(e) diff --git a/docs/examples/models_generics.py b/docs/examples/models_generics.py deleted file mode 100644 index 6aee111bdb..0000000000 --- a/docs/examples/models_generics.py +++ /dev/null @@ -1,42 +0,0 @@ -from typing import Generic, TypeVar, Optional, List - -from pydantic import BaseModel, validator, ValidationError - -DataT = TypeVar('DataT') - - -class Error(BaseModel): - code: int - message: str - - -class DataModel(BaseModel): - numbers: List[int] - people: List[str] - - -class Response(BaseModel, Generic[DataT]): - data: Optional[DataT] - error: Optional[Error] - - @validator('error', always=True) - def check_consistency(cls, v, values): - if v is not None and values['data'] is not None: - raise ValueError('must not provide both data and error') - if v is None and values.get('data') is None: - raise ValueError('must provide data or error') - return v - - -data = DataModel(numbers=[1, 2, 3], people=[]) -error = Error(code=404, message='Not found') - -print(Response[int](data=1)) -print(Response[str](data='value')) -print(Response[str](data='value').model_dump()) -print(Response[DataModel](data=data).model_dump()) -print(Response[DataModel](error=error).model_dump()) -try: - Response[int](data='value') -except ValidationError as e: - print(e) diff --git a/docs/examples/models_generics_inheritance.py b/docs/examples/models_generics_inheritance.py deleted file mode 100644 index 04268ceab3..0000000000 --- a/docs/examples/models_generics_inheritance.py +++ /dev/null @@ -1,17 +0,0 @@ -from typing import TypeVar, Generic -from pydantic import BaseModel - -TypeX = TypeVar('TypeX') - - -class BaseClass(BaseModel, Generic[TypeX]): - X: TypeX - - -class ChildClass(BaseClass[TypeX], Generic[TypeX]): - # Inherit from Generic[TypeX] - pass - - -# Replace TypeX by int -print(ChildClass[int](X=1)) diff --git a/docs/examples/models_generics_inheritance_extend.py b/docs/examples/models_generics_inheritance_extend.py deleted file mode 100644 index 201cced1f6..0000000000 --- a/docs/examples/models_generics_inheritance_extend.py +++ /dev/null @@ -1,19 +0,0 @@ -from typing import TypeVar, Generic -from pydantic import BaseModel - -TypeX = TypeVar('TypeX') -TypeY = TypeVar('TypeY') -TypeZ = TypeVar('TypeZ') - - -class BaseClass(BaseModel, Generic[TypeX, TypeY]): - x: TypeX - y: TypeY - - -class ChildClass(BaseClass[int, TypeY], Generic[TypeY, TypeZ]): - z: TypeZ - - -# Replace TypeY by str -print(ChildClass[str, int](x=1, y='y', z=3)) diff --git a/docs/examples/models_generics_naming.py b/docs/examples/models_generics_naming.py deleted file mode 100644 index 493a47192d..0000000000 --- a/docs/examples/models_generics_naming.py +++ /dev/null @@ -1,17 +0,0 @@ -from typing import Generic, TypeVar, Type, Any, Tuple - -from pydantic import BaseModel - -DataT = TypeVar('DataT') - - -class Response(BaseModel, Generic[DataT]): - data: DataT - - @classmethod - def __concrete_name__(cls: Type[Any], params: Tuple[Type[Any], ...]) -> str: - return f'{params[0].__name__.title()}Response' - - -print(repr(Response[int](data=1))) -print(repr(Response[str](data='a'))) diff --git a/docs/examples/models_generics_nested.py b/docs/examples/models_generics_nested.py deleted file mode 100644 index fec3919cbe..0000000000 --- a/docs/examples/models_generics_nested.py +++ /dev/null @@ -1,23 +0,0 @@ -from typing import Generic, TypeVar - -from pydantic import BaseModel, ValidationError - -T = TypeVar('T') - - -class InnerT(BaseModel, Generic[T]): - inner: T - - -class OuterT(BaseModel, Generic[T]): - outer: T - nested: InnerT[T] - - -nested = InnerT[int](inner=1) -print(OuterT[int](outer=1, nested=nested)) -try: - nested = InnerT[str](inner='a') - print(OuterT[int](outer='a', nested=nested)) -except ValidationError as e: - print(e) diff --git a/docs/examples/models_generics_typevars.py b/docs/examples/models_generics_typevars.py deleted file mode 100644 index ce83dc8bcb..0000000000 --- a/docs/examples/models_generics_typevars.py +++ /dev/null @@ -1,25 +0,0 @@ -from typing import Generic, TypeVar - -from pydantic import BaseModel, ValidationError - -AT = TypeVar('AT') -BT = TypeVar('BT') - - -class Model(BaseModel, Generic[AT, BT]): - a: AT - b: BT - - -print(Model(a='a', b='a')) - -IntT = TypeVar('IntT', bound=int) -typevar_model = Model[int, IntT] -print(typevar_model(a=1, b=1)) -try: - typevar_model(a='a', b='a') -except ValidationError as exc: - print(exc) - -concrete_model = typevar_model[int] -print(concrete_model(a=1, b=1)) diff --git a/docs/examples/models_mutation.py b/docs/examples/models_mutation.py deleted file mode 100644 index bfc4587540..0000000000 --- a/docs/examples/models_mutation.py +++ /dev/null @@ -1,22 +0,0 @@ -from pydantic import BaseModel - - -class FooBarModel(BaseModel): - a: str - b: dict - - class Config: - allow_mutation = False - - -foobar = FooBarModel(a='hello', b={'apple': 'pear'}) - -try: - foobar.a = 'different' -except TypeError as e: - print(e) - -print(foobar.a) -print(foobar.b) -foobar.b['apple'] = 'grape' -print(foobar.b) diff --git a/docs/examples/models_orm_mode.py b/docs/examples/models_orm_mode.py deleted file mode 100644 index 75fcf89284..0000000000 --- a/docs/examples/models_orm_mode.py +++ /dev/null @@ -1,36 +0,0 @@ -from typing import List -from sqlalchemy import Column, Integer, String -from sqlalchemy.dialects.postgresql import ARRAY -from sqlalchemy.ext.declarative import declarative_base -from pydantic import BaseModel, constr - -Base = declarative_base() - - -class CompanyOrm(Base): - __tablename__ = 'companies' - id = Column(Integer, primary_key=True, nullable=False) - public_key = Column(String(20), index=True, nullable=False, unique=True) - name = Column(String(63), unique=True) - domains = Column(ARRAY(String(255))) - - -class CompanyModel(BaseModel): - id: int - public_key: constr(max_length=20) - name: constr(max_length=63) - domains: List[constr(max_length=255)] - - class Config: - from_attributes = True - - -co_orm = CompanyOrm( - id=123, - public_key='foobar', - name='Testing', - domains=['example.com', 'foobar.com'], -) -print(co_orm) -co_model = CompanyModel.from_orm(co_orm) -print(co_model) diff --git a/docs/examples/models_orm_mode_data_binding.py b/docs/examples/models_orm_mode_data_binding.py deleted file mode 100644 index 6a4125de82..0000000000 --- a/docs/examples/models_orm_mode_data_binding.py +++ /dev/null @@ -1,43 +0,0 @@ -from pydantic import BaseModel -from typing import Any, Optional -from pydantic.utils import GetterDict -from xml.etree.ElementTree import fromstring - - -xmlstring = """ - - - - -""" - - -class UserGetter(GetterDict): - - def get(self, key: str, default: Any) -> Any: - - # element attributes - if key in {'Id', 'Status'}: - return self._obj.attrib.get(key, default) - - # element children - else: - try: - return self._obj.find(key).attrib['Value'] - except (AttributeError, KeyError): - return default - - -class User(BaseModel): - Id: int - Status: Optional[str] - FirstName: Optional[str] - LastName: Optional[str] - LoggedIn: bool - - class Config: - from_attributes = True - getter_dict = UserGetter - - -user = User.from_orm(fromstring(xmlstring)) diff --git a/docs/examples/models_orm_mode_recursive.py b/docs/examples/models_orm_mode_recursive.py deleted file mode 100644 index ef771f38ae..0000000000 --- a/docs/examples/models_orm_mode_recursive.py +++ /dev/null @@ -1,39 +0,0 @@ -from typing import List -from pydantic import BaseModel - - -class PetCls: - def __init__(self, *, name: str, species: str): - self.name = name - self.species = species - - -class PersonCls: - def __init__(self, *, name: str, age: float = None, pets: List[PetCls]): - self.name = name - self.age = age - self.pets = pets - - -class Pet(BaseModel): - name: str - species: str - - class Config: - from_attributes = True - - -class Person(BaseModel): - name: str - age: float = None - pets: List[Pet] - - class Config: - from_attributes = True - - -bones = PetCls(name='Bones', species='dog') -orion = PetCls(name='Orion', species='cat') -anna = PersonCls(name='Anna', age=20, pets=[bones, orion]) -anna_model = Person.from_orm(anna) -print(anna_model) diff --git a/docs/examples/models_orm_mode_reserved_name.py b/docs/examples/models_orm_mode_reserved_name.py deleted file mode 100644 index 26ede718a2..0000000000 --- a/docs/examples/models_orm_mode_reserved_name.py +++ /dev/null @@ -1,30 +0,0 @@ -import typing - -from pydantic import BaseModel, Field -import sqlalchemy as sa -from sqlalchemy.ext.declarative import declarative_base - - -class MyModel(BaseModel): - metadata: typing.Dict[str, str] = Field(alias='metadata_') - - class Config: - from_attributes = True - - -Base = declarative_base() - - -class SQLModel(Base): - __tablename__ = 'my_table' - id = sa.Column('id', sa.Integer, primary_key=True) - # 'metadata' is reserved by SQLAlchemy, hence the '_' - metadata_ = sa.Column('metadata', sa.JSON) - - -sql_model = SQLModel(metadata_={'key': 'val'}, id=1) - -pydantic_model = MyModel.from_orm(sql_model) - -print(pydantic_model.model_dump()) -print(pydantic_model.model_dump(by_alias=True)) diff --git a/docs/examples/models_parse.py b/docs/examples/models_parse.py deleted file mode 100644 index 66ddce7e62..0000000000 --- a/docs/examples/models_parse.py +++ /dev/null @@ -1,42 +0,0 @@ -import pickle -from datetime import datetime -from pathlib import Path - -from pydantic import BaseModel, ValidationError - - -class User(BaseModel): - id: int - name = 'John Doe' - signup_ts: datetime = None - - -m = User.model_validate({'id': 123, 'name': 'James'}) -print(m) - -try: - User.model_validate(['not', 'a', 'dict']) -except ValidationError as e: - print(e) - -# assumes json as no content type passed -m = User.parse_raw('{"id": 123, "name": "James"}') -print(m) - -pickle_data = pickle.dumps({ - 'id': 123, - 'name': 'James', - 'signup_ts': datetime(2017, 7, 14) -}) -m = User.parse_raw( - pickle_data, content_type='application/pickle', allow_pickle=True -) -print(m) - -path = Path('data.json') -path.write_text('{"id": 123, "name": "James"}') -m = User.parse_file(path) -print(m) -# ignore-below -if path.exists(): - path.unlink() diff --git a/docs/examples/models_recursive.py b/docs/examples/models_recursive.py deleted file mode 100644 index aba692d59f..0000000000 --- a/docs/examples/models_recursive.py +++ /dev/null @@ -1,22 +0,0 @@ -from typing import List, Optional -from pydantic import BaseModel - - -class Foo(BaseModel): - count: int - size: Optional[float] = None - - -class Bar(BaseModel): - apple = 'x' - banana = 'y' - - -class Spam(BaseModel): - foo: Foo - bars: List[Bar] - - -m = Spam(foo={'count': 4}, bars=[{'apple': 'x1'}, {'apple': 'x2'}]) -print(m) -print(m.model_dump()) diff --git a/docs/examples/models_required_field_optional.py b/docs/examples/models_required_field_optional.py deleted file mode 100644 index 562abf38c5..0000000000 --- a/docs/examples/models_required_field_optional.py +++ /dev/null @@ -1,15 +0,0 @@ -from typing import Optional -from pydantic import BaseModel, Field, ValidationError - - -class Model(BaseModel): - a: Optional[int] - b: Optional[int] = ... - c: Optional[int] = Field(...) - - -print(Model(b=1, c=2)) -try: - Model(a=1, b=2) -except ValidationError as e: - print(e) diff --git a/docs/examples/models_required_fields.py b/docs/examples/models_required_fields.py deleted file mode 100644 index 605a2a08c1..0000000000 --- a/docs/examples/models_required_fields.py +++ /dev/null @@ -1,7 +0,0 @@ -from pydantic import BaseModel, Field - - -class Model(BaseModel): - a: int - b: int = ... - c: int = Field(...) diff --git a/docs/examples/models_signature.py b/docs/examples/models_signature.py deleted file mode 100644 index 6c0aac03b2..0000000000 --- a/docs/examples/models_signature.py +++ /dev/null @@ -1,12 +0,0 @@ -import inspect -from pydantic import BaseModel, Field - - -class FooModel(BaseModel): - id: int - name: str = None - description: str = 'Foo' - apple: int = Field(..., alias='pear') - - -print(inspect.signature(FooModel)) diff --git a/docs/examples/models_signature_custom_init.py b/docs/examples/models_signature_custom_init.py deleted file mode 100644 index 7da1fc50cc..0000000000 --- a/docs/examples/models_signature_custom_init.py +++ /dev/null @@ -1,15 +0,0 @@ -import inspect - -from pydantic import BaseModel - - -class MyModel(BaseModel): - id: int - info: str = 'Foo' - - def __init__(self, id: int = 1, *, bar: str, **data) -> None: - """My custom init!""" - super().__init__(id=id, bar=bar, **data) - - -print(inspect.signature(MyModel)) diff --git a/docs/examples/models_structural_pattern_matching.py b/docs/examples/models_structural_pattern_matching.py deleted file mode 100644 index cbc8c27226..0000000000 --- a/docs/examples/models_structural_pattern_matching.py +++ /dev/null @@ -1,18 +0,0 @@ -# requires python3.10 -from pydantic import BaseModel - - -class Pet(BaseModel): - name: str - species: str - - -a = Pet(name='Bones', species='dog') - -match a: - # match `species` to 'dog', declare and initialize `dog_name` - case Pet(species='dog', name=dog_name): - print(f'{dog_name} is a dog') - # default case - case _: - print('No dog matched') diff --git a/docs/examples/mypy_main.py b/docs/examples/mypy_main.py deleted file mode 100644 index 8b95e39723..0000000000 --- a/docs/examples/mypy_main.py +++ /dev/null @@ -1,17 +0,0 @@ -# dont-execute -from datetime import datetime -from typing import List, Optional -from pydantic import BaseModel, NoneStr - - -class Model(BaseModel): - age: int - first_name = 'John' - last_name: NoneStr = None - signup_ts: Optional[datetime] = None - list_of_ints: List[int] - - -m = Model(age=42, list_of_ints=[1, '2', b'3']) -print(m.middle_name) # not a model field! -Model() # will raise a validation error for age and list_of_ints diff --git a/docs/examples/parse_obj_as.py b/docs/examples/parse_obj_as.py deleted file mode 100644 index 6f70d74b33..0000000000 --- a/docs/examples/parse_obj_as.py +++ /dev/null @@ -1,16 +0,0 @@ -from typing import List - -from pydantic import BaseModel, parse_obj_as - - -class Item(BaseModel): - id: int - name: str - - -# `item_data` could come from an API call, eg., via something like: -# item_data = requests.get('https://my-api.com/items').json() -item_data = [{'id': 1, 'name': 'My Item'}] - -items = parse_obj_as(List[Item], item_data) -print(items) diff --git a/docs/examples/postponed_annotations_broken.py b/docs/examples/postponed_annotations_broken.py deleted file mode 100644 index b24873fc05..0000000000 --- a/docs/examples/postponed_annotations_broken.py +++ /dev/null @@ -1,23 +0,0 @@ -from __future__ import annotations -from pydantic import BaseModel -from pydantic.errors import PydanticUserError - - -def this_is_broken(): - from pydantic import HttpUrl # HttpUrl is defined in function local scope - - class Model(BaseModel): - a: HttpUrl - - try: - Model(a='https://example.com') - except PydanticUserError as e: - print(e) - - try: - Model.update_forward_refs() - except NameError as e: - print(e) - - -this_is_broken() diff --git a/docs/examples/postponed_annotations_forward_ref.py b/docs/examples/postponed_annotations_forward_ref.py deleted file mode 100644 index 3e3377f328..0000000000 --- a/docs/examples/postponed_annotations_forward_ref.py +++ /dev/null @@ -1,15 +0,0 @@ -from typing import ForwardRef -from pydantic import BaseModel - -Foo = ForwardRef('Foo') - - -class Foo(BaseModel): - a: int = 123 - b: Foo = None - - -Foo.update_forward_refs() - -print(Foo()) -print(Foo(b={'a': '321'})) diff --git a/docs/examples/postponed_annotations_main.py b/docs/examples/postponed_annotations_main.py deleted file mode 100644 index 50d524c11a..0000000000 --- a/docs/examples/postponed_annotations_main.py +++ /dev/null @@ -1,11 +0,0 @@ -from __future__ import annotations -from typing import Any, List -from pydantic import BaseModel - - -class Model(BaseModel): - a: List[int] - b: Any - - -print(Model(a=('1', 2, 3), b='ok')) diff --git a/docs/examples/postponed_annotations_self_referencing_annotations.py b/docs/examples/postponed_annotations_self_referencing_annotations.py deleted file mode 100644 index aa8966c1c2..0000000000 --- a/docs/examples/postponed_annotations_self_referencing_annotations.py +++ /dev/null @@ -1,12 +0,0 @@ -from __future__ import annotations -from pydantic import BaseModel - - -class Foo(BaseModel): - a: int = 123 - #: The sibling of `Foo` is referenced directly by type - sibling: Foo = None - - -print(Foo()) -print(Foo(sibling={'a': '321'})) diff --git a/docs/examples/postponed_annotations_self_referencing_string.py b/docs/examples/postponed_annotations_self_referencing_string.py deleted file mode 100644 index 3290051bb5..0000000000 --- a/docs/examples/postponed_annotations_self_referencing_string.py +++ /dev/null @@ -1,11 +0,0 @@ -from pydantic import BaseModel - - -class Foo(BaseModel): - a: int = 123 - #: The sibling of `Foo` is referenced by string - sibling: 'Foo' = None - - -print(Foo()) -print(Foo(sibling={'a': '321'})) diff --git a/docs/examples/postponed_annotations_works.py b/docs/examples/postponed_annotations_works.py deleted file mode 100644 index 7a88631450..0000000000 --- a/docs/examples/postponed_annotations_works.py +++ /dev/null @@ -1,13 +0,0 @@ -from __future__ import annotations -from pydantic import BaseModel -from pydantic import HttpUrl # HttpUrl is defined in the module's global scope - - -def this_works(): - class Model(BaseModel): - a: HttpUrl - - print(Model(a='https://example.com')) - - -this_works() diff --git a/docs/examples/private_attributes.py b/docs/examples/private_attributes.py deleted file mode 100644 index 51da4456e9..0000000000 --- a/docs/examples/private_attributes.py +++ /dev/null @@ -1,19 +0,0 @@ -from datetime import datetime -from random import randint - -from pydantic import BaseModel, PrivateAttr - - -class TimeAwareModel(BaseModel): - _processed_at: datetime = PrivateAttr(default_factory=datetime.now) - _secret_value: str = PrivateAttr() - - def __init__(self, **data): - super().__init__(**data) - # this could also be done with default_factory - self._secret_value = randint(1, 5) - - -m = TimeAwareModel() -print(m._processed_at) -print(m._secret_value) diff --git a/docs/examples/private_attributes_underscore_attrs_are_private.py b/docs/examples/private_attributes_underscore_attrs_are_private.py deleted file mode 100644 index f24db5f1c5..0000000000 --- a/docs/examples/private_attributes_underscore_attrs_are_private.py +++ /dev/null @@ -1,16 +0,0 @@ -from typing import ClassVar - -from pydantic import BaseModel - - -class Model(BaseModel): - _class_var: ClassVar[str] = 'class var value' - _private_attr: str = 'private attr value' - - class Config: - underscore_attrs_are_private = True - - -print(Model._class_var) -print(Model._private_attr) -print(Model()._private_attr) diff --git a/docs/examples/ruff.toml b/docs/examples/ruff.toml deleted file mode 100644 index fcfeca4088..0000000000 --- a/docs/examples/ruff.toml +++ /dev/null @@ -1,9 +0,0 @@ -line-length = 80 -ignore = ["E402", "F722"] -extend-select = ["Q"] -flake8-quotes = {inline-quotes = "single", multiline-quotes = "double"} - -[per-file-ignores] -# waiting for https://github.com/charliermarsh/ruff/issues/282 -"models_structural_pattern_matching.py" = ["E999"] -"schema_unenforced_constraints.py" = ["F811"] diff --git a/docs/examples/schema_ad_hoc.py b/docs/examples/schema_ad_hoc.py deleted file mode 100644 index 10862e162d..0000000000 --- a/docs/examples/schema_ad_hoc.py +++ /dev/null @@ -1,20 +0,0 @@ -from typing import Literal, Union - -from typing_extensions import Annotated - -from pydantic import BaseModel, Field, schema_json_of - - -class Cat(BaseModel): - pet_type: Literal['cat'] - cat_name: str - - -class Dog(BaseModel): - pet_type: Literal['dog'] - dog_name: str - - -Pet = Annotated[Union[Cat, Dog], Field(discriminator='pet_type')] - -print(schema_json_of(Pet, title='The Pet Schema', indent=2)) diff --git a/docs/examples/schema_annotated.py b/docs/examples/schema_annotated.py deleted file mode 100644 index 12f1ab5f12..0000000000 --- a/docs/examples/schema_annotated.py +++ /dev/null @@ -1,9 +0,0 @@ -from uuid import uuid4 - -from pydantic import BaseModel, Field -from typing_extensions import Annotated - - -class Foo(BaseModel): - id: Annotated[str, Field(default_factory=lambda: uuid4().hex)] - name: Annotated[str, Field(max_length=256)] = 'Bar' diff --git a/docs/examples/schema_custom.py b/docs/examples/schema_custom.py deleted file mode 100644 index 1341762767..0000000000 --- a/docs/examples/schema_custom.py +++ /dev/null @@ -1,17 +0,0 @@ -# output-json -import json -from pydantic import BaseModel -from pydantic.json_schema import schema - - -class Foo(BaseModel): - a: int - - -class Model(BaseModel): - a: Foo - - -# Default location for OpenAPI -top_level_schema = schema([Model], ref_prefix='#/components/schemas/') -print(json.dumps(top_level_schema, indent=2)) diff --git a/docs/examples/schema_extra_callable.py b/docs/examples/schema_extra_callable.py deleted file mode 100644 index bc5bff4500..0000000000 --- a/docs/examples/schema_extra_callable.py +++ /dev/null @@ -1,19 +0,0 @@ -# output-json -from typing import Dict, Any, Type -from pydantic import BaseModel - - -class Person(BaseModel): - name: str - age: int - - class Config: - # TODO: This is no longer valid in v2; - # update example to use __pydantic_modify_json_schema__ - @staticmethod - def schema_extra(schema: Dict[str, Any], model: Type['Person']) -> None: - for prop in schema.get('properties', {}).values(): - prop.pop('title', None) - - -print(Person.schema_json(indent=2)) diff --git a/docs/examples/schema_main.py b/docs/examples/schema_main.py deleted file mode 100644 index b6a482fae2..0000000000 --- a/docs/examples/schema_main.py +++ /dev/null @@ -1,38 +0,0 @@ -# output-json -from enum import Enum -from pydantic import BaseModel, Field - - -class FooBar(BaseModel): - count: int - size: float = None - - -class Gender(str, Enum): - male = 'male' - female = 'female' - other = 'other' - not_given = 'not_given' - - -class MainModel(BaseModel): - """ - This is the description of the main model - """ - - foo_bar: FooBar = Field(...) - gender: Gender = Field(None, alias='Gender') - snap: int = Field( - 42, - title='The Snap', - description='this is the value of snap', - gt=30, - lt=50, - ) - - class Config: - title = 'Main' - - -# this is equivalent to json.dumps(MainModel.model_json_schema(), indent=2): -print(MainModel.schema_json(indent=2)) diff --git a/docs/examples/schema_top_level.py b/docs/examples/schema_top_level.py deleted file mode 100644 index 5aa4c3ecfe..0000000000 --- a/docs/examples/schema_top_level.py +++ /dev/null @@ -1,20 +0,0 @@ -# output-json -import json -from pydantic import BaseModel -from pydantic.json_schema import schema - - -class Foo(BaseModel): - a: str = None - - -class Model(BaseModel): - b: Foo - - -class Bar(BaseModel): - c: int - - -top_level_schema = schema([Model, Bar], title='My Schema') -print(json.dumps(top_level_schema, indent=2)) diff --git a/docs/examples/schema_unenforced_constraints.py b/docs/examples/schema_unenforced_constraints.py deleted file mode 100644 index ca83d28d8d..0000000000 --- a/docs/examples/schema_unenforced_constraints.py +++ /dev/null @@ -1,30 +0,0 @@ -from pydantic import BaseModel, Field, PositiveInt - -try: - # this won't work since PositiveInt takes precedence over the - # constraints defined in Field meaning they're ignored - class Model(BaseModel): - foo: PositiveInt = Field(..., lt=10) -except ValueError as e: - print(e) - - -# but you can set the schema attribute directly: -# (Note: here exclusiveMaximum will not be enforce) -class ModelA(BaseModel): - foo: PositiveInt = Field(..., exclusiveMaximum=10) - - -print(ModelA.model_json_schema()) - - -# if you find yourself needing this, an alternative is to declare -# the constraints in Field (or you could use conint()) -# here both constraints will be enforced: -class ModelB(BaseModel): - # Here both constraints will be applied and the schema - # will be generated correctly - foo: int = Field(..., gt=0, lt=10) - - -print(ModelB.model_json_schema()) diff --git a/docs/examples/schema_with_example.py b/docs/examples/schema_with_example.py deleted file mode 100644 index 8076bb782d..0000000000 --- a/docs/examples/schema_with_example.py +++ /dev/null @@ -1,22 +0,0 @@ -# output-json -from pydantic import BaseModel - - -class Person(BaseModel): - name: str - age: int - - class Config: - # TODO: This is no longer valid in v2; - # update example to use __pydantic_modify_json_schema__ - schema_extra = { - 'examples': [ - { - 'name': 'John Doe', - 'age': 25, - } - ] - } - - -print(Person.schema_json(indent=2)) diff --git a/docs/examples/schema_with_field.py b/docs/examples/schema_with_field.py deleted file mode 100644 index aa0acdbe44..0000000000 --- a/docs/examples/schema_with_field.py +++ /dev/null @@ -1,33 +0,0 @@ -# output-json -from typing import Any, Callable, Dict, Generator, Optional - -from pydantic import BaseModel, Field -from pydantic.fields import ModelField - - -class RestrictedAlphabetStr(str): - @classmethod - def __get_validators__(cls) -> Generator[Callable, None, None]: - yield cls.validate - - @classmethod - def validate(cls, value: str, field: ModelField): - alphabet = field.field_info.extra['alphabet'] - if any(c not in alphabet for c in value): - raise ValueError(f'{value!r} is not restricted to {alphabet!r}') - return cls(value) - - @classmethod - def __pydantic_modify_json_schema__( - cls, field_schema: Dict[str, Any], field: Optional[ModelField] - ): - if field: - alphabet = field.field_info.extra['alphabet'] - field_schema['examples'] = [c * 3 for c in alphabet] - - -class MyModel(BaseModel): - value: RestrictedAlphabetStr = Field(alphabet='ABC') - - -print(MyModel.schema_json(indent=2)) diff --git a/docs/examples/types_arbitrary_allowed.py b/docs/examples/types_arbitrary_allowed.py deleted file mode 100644 index 14379b4959..0000000000 --- a/docs/examples/types_arbitrary_allowed.py +++ /dev/null @@ -1,37 +0,0 @@ -from pydantic import BaseModel, ValidationError - - -# This is not a pydantic model, it's an arbitrary class -class Pet: - def __init__(self, name: str): - self.name = name - - -class Model(BaseModel): - pet: Pet - owner: str - - class Config: - arbitrary_types_allowed = True - - -pet = Pet(name='Hedwig') -# A simple check of instance type is used to validate the data -model = Model(owner='Harry', pet=pet) -print(model) -print(model.pet) -print(model.pet.name) -print(type(model.pet)) -try: - # If the value is not an instance of the type, it's invalid - Model(owner='Harry', pet='Hedwig') -except ValidationError as e: - print(e) -# Nothing in the instance of the arbitrary type is checked -# Here name probably should have been a str, but it's not validated -pet2 = Pet(name=42) -model2 = Model(owner='Harry', pet=pet2) -print(model2) -print(model2.pet) -print(model2.pet.name) -print(type(model2.pet)) diff --git a/docs/examples/types_bare_type.py b/docs/examples/types_bare_type.py deleted file mode 100644 index aea9fef533..0000000000 --- a/docs/examples/types_bare_type.py +++ /dev/null @@ -1,20 +0,0 @@ -# dont-upgrade -from typing import Type - -from pydantic import BaseModel, ValidationError - - -class Foo: - pass - - -class LenientSimpleModel(BaseModel): - any_class_goes: Type - - -LenientSimpleModel(any_class_goes=int) -LenientSimpleModel(any_class_goes=Foo) -try: - LenientSimpleModel(any_class_goes=Foo()) -except ValidationError as e: - print(e) diff --git a/docs/examples/types_boolean.py b/docs/examples/types_boolean.py deleted file mode 100644 index 6c2d03a27c..0000000000 --- a/docs/examples/types_boolean.py +++ /dev/null @@ -1,13 +0,0 @@ -from pydantic import BaseModel, ValidationError - - -class BooleanModel(BaseModel): - bool_value: bool - - -print(BooleanModel(bool_value=False)) -print(BooleanModel(bool_value='False')) -try: - BooleanModel(bool_value=[]) -except ValidationError as e: - print(str(e)) diff --git a/docs/examples/types_bytesize.py b/docs/examples/types_bytesize.py deleted file mode 100644 index 55878a8908..0000000000 --- a/docs/examples/types_bytesize.py +++ /dev/null @@ -1,15 +0,0 @@ -from pydantic import BaseModel, ByteSize - - -class MyModel(BaseModel): - size: ByteSize - - -print(MyModel(size=52000).size) -print(MyModel(size='3000 KiB').size) - -m = MyModel(size='50 PB') -print(m.size.human_readable()) -print(m.size.human_readable(decimal=True)) - -print(m.size.to('TiB')) diff --git a/docs/examples/types_callable.py b/docs/examples/types_callable.py deleted file mode 100644 index 3772a5ffbc..0000000000 --- a/docs/examples/types_callable.py +++ /dev/null @@ -1,10 +0,0 @@ -from typing import Callable -from pydantic import BaseModel - - -class Foo(BaseModel): - callback: Callable[[int], int] - - -m = Foo(callback=lambda x: x) -print(m) diff --git a/docs/examples/types_choices.py b/docs/examples/types_choices.py deleted file mode 100644 index 3e3477ba37..0000000000 --- a/docs/examples/types_choices.py +++ /dev/null @@ -1,26 +0,0 @@ -from enum import Enum, IntEnum - -from pydantic import BaseModel, ValidationError - - -class FruitEnum(str, Enum): - pear = 'pear' - banana = 'banana' - - -class ToolEnum(IntEnum): - spanner = 1 - wrench = 2 - - -class CookingModel(BaseModel): - fruit: FruitEnum = FruitEnum.pear - tool: ToolEnum = ToolEnum.spanner - - -print(CookingModel()) -print(CookingModel(tool=2, fruit='banana')) -try: - CookingModel(fruit='other') -except ValidationError as e: - print(e) diff --git a/docs/examples/types_color.py b/docs/examples/types_color.py deleted file mode 100644 index 705d7d5455..0000000000 --- a/docs/examples/types_color.py +++ /dev/null @@ -1,21 +0,0 @@ -from pydantic import BaseModel, ValidationError -from pydantic.color import Color - -c = Color('ff00ff') -print(c.as_named()) -print(c.as_hex()) -c2 = Color('green') -print(c2.as_rgb_tuple()) -print(c2.original()) -print(repr(Color('hsl(180, 100%, 50%)'))) - - -class Model(BaseModel): - color: Color - - -print(Model(color='purple')) -try: - Model(color='hello') -except ValidationError as e: - print(e) diff --git a/docs/examples/types_constrained.py b/docs/examples/types_constrained.py deleted file mode 100644 index fd4e7ca635..0000000000 --- a/docs/examples/types_constrained.py +++ /dev/null @@ -1,59 +0,0 @@ -from decimal import Decimal - -from pydantic import ( - BaseModel, - NegativeFloat, - NegativeInt, - PositiveFloat, - PositiveInt, - NonNegativeFloat, - NonNegativeInt, - NonPositiveFloat, - NonPositiveInt, - conbytes, - condecimal, - confloat, - conint, - conlist, - conset, - constr, - Field, -) - - -class Model(BaseModel): - upper_bytes: conbytes(to_upper=True) - lower_bytes: conbytes(to_lower=True) - short_bytes: conbytes(min_length=2, max_length=10) - strip_bytes: conbytes(strip_whitespace=True) - - upper_str: constr(to_upper=True) - lower_str: constr(to_lower=True) - short_str: constr(min_length=2, max_length=10) - regex_str: constr(regex=r'^apple (pie|tart|sandwich)$') - strip_str: constr(strip_whitespace=True) - - big_int: conint(gt=1000, lt=1024) - mod_int: conint(multiple_of=5) - pos_int: PositiveInt - neg_int: NegativeInt - non_neg_int: NonNegativeInt - non_pos_int: NonPositiveInt - - big_float: confloat(gt=1000, lt=1024) - unit_interval: confloat(ge=0, le=1) - mod_float: confloat(multiple_of=0.5) - pos_float: PositiveFloat - neg_float: NegativeFloat - non_neg_float: NonNegativeFloat - non_pos_float: NonPositiveFloat - - short_list: conlist(int, min_items=1, max_items=4) - short_set: conset(int, min_items=1, max_items=4) - - decimal_positive: condecimal(gt=0) - decimal_negative: condecimal(lt=0) - decimal_max_digits_and_places: condecimal(max_digits=2, decimal_places=2) - mod_decimal: condecimal(multiple_of=Decimal('0.25')) - - bigger_int: int = Field(..., gt=10000) diff --git a/docs/examples/types_custom_type.py b/docs/examples/types_custom_type.py deleted file mode 100644 index ed1abe6989..0000000000 --- a/docs/examples/types_custom_type.py +++ /dev/null @@ -1,68 +0,0 @@ -import re -from pydantic import BaseModel - -# https://en.wikipedia.org/wiki/Postcodes_in_the_United_Kingdom#Validation -post_code_regex = re.compile( - r'(?:' - r'([A-Z]{1,2}[0-9][A-Z0-9]?|ASCN|STHL|TDCU|BBND|[BFS]IQQ|PCRN|TKCA) ?' - r'([0-9][A-Z]{2})|' - r'(BFPO) ?([0-9]{1,4})|' - r'(KY[0-9]|MSR|VG|AI)[ -]?[0-9]{4}|' - r'([A-Z]{2}) ?([0-9]{2})|' - r'(GE) ?(CX)|' - r'(GIR) ?(0A{2})|' - r'(SAN) ?(TA1)' - r')' -) - - -class PostCode(str): - """ - Partial UK postcode validation. Note: this is just an example, and is not - intended for use in production; in particular this does NOT guarantee - a postcode exists, just that it has a valid format. - """ - - @classmethod - def __get_validators__(cls): - # one or more validators may be yielded which will be called in the - # order to validate the input, each validator will receive as an input - # the value returned from the previous validator - yield cls.validate - - @classmethod - def __pydantic_modify_json_schema__(cls, field_schema): - # __pydantic_modify_json_schema__ should mutate the dict it receives - # in place, the returned value will be ignored - field_schema.update( - # simplified regex here for brevity, see the wikipedia link above - pattern='^[A-Z]{1,2}[0-9][A-Z0-9]? ?[0-9][A-Z]{2}$', - # some example postcodes - examples=['SP11 9DG', 'w1j7bu'], - ) - - @classmethod - def validate(cls, v): - if not isinstance(v, str): - raise TypeError('string required') - m = post_code_regex.fullmatch(v.upper()) - if not m: - raise ValueError('invalid postcode format') - # you could also return a string here which would mean model.post_code - # would be a string, pydantic won't care but you could end up with some - # confusion since the value's type won't match the type annotation - # exactly - return cls(f'{m.group(1)} {m.group(2)}') - - def __repr__(self): - return f'PostCode({super().__repr__()})' - - -class Model(BaseModel): - post_code: PostCode - - -model = Model(post_code='sw8 5el') -print(model) -print(model.post_code) -print(Model.model_json_schema()) diff --git a/docs/examples/types_dt.py b/docs/examples/types_dt.py deleted file mode 100644 index c168ff5595..0000000000 --- a/docs/examples/types_dt.py +++ /dev/null @@ -1,19 +0,0 @@ -from datetime import date, datetime, time, timedelta -from pydantic import BaseModel - - -class Model(BaseModel): - d: date = None - dt: datetime = None - t: time = None - td: timedelta = None - - -m = Model( - d=1966280412345.6789, - dt='2032-04-23T10:20:30.400+02:30', - t=time(4, 8, 16), - td='P3DT12H30M5S', -) - -print(m.model_dump()) diff --git a/docs/examples/types_generics.py b/docs/examples/types_generics.py deleted file mode 100644 index 50cfafd163..0000000000 --- a/docs/examples/types_generics.py +++ /dev/null @@ -1,83 +0,0 @@ -from pydantic import BaseModel, ValidationError -from pydantic.fields import ModelField -from typing import TypeVar, Generic - -AgedType = TypeVar('AgedType') -QualityType = TypeVar('QualityType') - - -# This is not a pydantic model, it's an arbitrary generic class -class TastingModel(Generic[AgedType, QualityType]): - def __init__(self, name: str, aged: AgedType, quality: QualityType): - self.name = name - self.aged = aged - self.quality = quality - - @classmethod - def __get_validators__(cls): - yield cls.validate - - @classmethod - # You don't need to add the "ModelField", but it will help your - # editor give you completion and catch errors - def validate(cls, v, field: ModelField): - if not isinstance(v, cls): - # The value is not even a TastingModel - raise TypeError('Invalid value') - if not field.sub_fields: - # Generic parameters were not provided so we don't try to validate - # them and just return the value as is - return v - aged_f = field.sub_fields[0] - quality_f = field.sub_fields[1] - errors = [] - # Here we don't need the validated value, but we want the errors - valid_value, error = aged_f.validate(v.aged, {}, loc='aged') - if error: - errors.append(error) - # Here we don't need the validated value, but we want the errors - valid_value, error = quality_f.validate(v.quality, {}, loc='quality') - if error: - errors.append(error) - if errors: - raise ValidationError(errors, cls) - # Validation passed without errors, return the same instance received - return v - - -class Model(BaseModel): - # for wine, "aged" is an int with years, "quality" is a float - wine: TastingModel[int, float] - # for cheese, "aged" is a bool, "quality" is a str - cheese: TastingModel[bool, str] - # for thing, "aged" is a Any, "quality" is Any - thing: TastingModel - - -model = Model( - # This wine was aged for 20 years and has a quality of 85.6 - wine=TastingModel(name='Cabernet Sauvignon', aged=20, quality=85.6), - # This cheese is aged (is mature) and has "Good" quality - cheese=TastingModel(name='Gouda', aged=True, quality='Good'), - # This Python thing has aged "Not much" and has a quality "Awesome" - thing=TastingModel(name='Python', aged='Not much', quality='Awesome'), -) -print(model) -print(model.wine.aged) -print(model.wine.quality) -print(model.cheese.aged) -print(model.cheese.quality) -print(model.thing.aged) -try: - # If the values of the sub-types are invalid, we get an error - Model( - # For wine, aged should be an int with the years, and quality a float - wine=TastingModel(name='Merlot', aged=True, quality='Kinda good'), - # For cheese, aged should be a bool, and quality a str - cheese=TastingModel(name='Gouda', aged='yeah', quality=5), - # For thing, no type parameters are declared, and we skipped validation - # in those cases in the Assessment.validate() function - thing=TastingModel(name='Python', aged='Not much', quality='Awesome'), - ) -except ValidationError as e: - print(e) diff --git a/docs/examples/types_import_string_serialization.py b/docs/examples/types_import_string_serialization.py deleted file mode 100644 index 4153441de1..0000000000 --- a/docs/examples/types_import_string_serialization.py +++ /dev/null @@ -1,42 +0,0 @@ -from pydantic import BaseModel, ImportString -from types import BuiltinFunctionType - - -# The following class will not successfully serialize to JSON -# Since "obj" is evaluated to an object, not a pydantic `ImportString` -class WithCustomEncodersBad(BaseModel): - obj: ImportString - - class Config: - json_encoders = { - ImportString: lambda x: str(x) - } - - -# Create an instance -m = WithCustomEncodersBad(obj='math.cos') - -try: - m.json() -except TypeError as e: - print(e) - -# Let's do some sanity checks to verify that m.obj is not an "ImportString" -print(isinstance(m.obj, ImportString)) -print(isinstance(m.obj, BuiltinFunctionType)) - - -# So now that we know that after an ImportString is evaluated by Pydantic -# it results in its underlying object, we can configure our json encoder -# to account for those specific types -class WithCustomEncodersGood(BaseModel): - obj: ImportString - - class Config: - json_encoders = { - BuiltinFunctionType: lambda x: str(x) - } - - -m = WithCustomEncodersGood(obj='math.cos') -print(m.json()) diff --git a/docs/examples/types_import_string_usage.py b/docs/examples/types_import_string_usage.py deleted file mode 100644 index e8753d8935..0000000000 --- a/docs/examples/types_import_string_usage.py +++ /dev/null @@ -1,40 +0,0 @@ -from pydantic import BaseModel, ImportString, ValidationError - - -class ImportThings(BaseModel): - obj: ImportString - - -# A string value will cause an automatic import -my_cos = ImportThings(obj='math.cos') - -# You can use the imported function as you would expect -cos_of_0 = my_cos.obj(0) -assert cos_of_0 == 1 - - -# A string whose value cannot be imported will raise an error -try: - ImportThings(obj='foo.bar') -except ValidationError as e: - print(e) - - -# An object defined in the current namespace can indeed be imported, -# though you should probably avoid doing this (since the ordering of declaration -# can have an impact on behavior). -class Foo: - bar = 1 - - -# This now works -my_foo = ImportThings(obj=Foo) -# So does this -my_foo_2 = ImportThings(obj='__main__.Foo') - - -# Actual python objects can be assigned as well -from math import cos # noqa: E402 -my_cos = ImportThings(obj=cos) -my_cos_2 = ImportThings(obj='math.cos') -assert my_cos == my_cos_2 diff --git a/docs/examples/types_infinite_generator.py b/docs/examples/types_infinite_generator.py deleted file mode 100644 index 22aa6262c3..0000000000 --- a/docs/examples/types_infinite_generator.py +++ /dev/null @@ -1,22 +0,0 @@ -from typing import Iterable -from pydantic import BaseModel - - -class Model(BaseModel): - infinite: Iterable[int] - - -def infinite_ints(): - i = 0 - while True: - yield i - i += 1 - - -m = Model(infinite=infinite_ints()) -print(m) - -for i in m.infinite: - print(i) - if i == 10: - break diff --git a/docs/examples/types_infinite_generator_validate_first.py b/docs/examples/types_infinite_generator_validate_first.py deleted file mode 100644 index a0bd5633d0..0000000000 --- a/docs/examples/types_infinite_generator_validate_first.py +++ /dev/null @@ -1,46 +0,0 @@ -import itertools -from typing import Iterable -from pydantic import BaseModel, validator, ValidationError -from pydantic.fields import ModelField - - -class Model(BaseModel): - infinite: Iterable[int] - - @validator('infinite') - # You don't need to add the "ModelField", but it will help your - # editor give you completion and catch errors - def infinite_first_int(cls, iterable, field: ModelField): - first_value = next(iterable) - if field.sub_fields: - # The Iterable had a parameter type, in this case it's int - # We use it to validate the first value - sub_field = field.sub_fields[0] - v, error = sub_field.validate(first_value, {}, loc='first_value') - if error: - raise ValidationError([error], cls) - # This creates a new generator that returns the first value and then - # the rest of the values from the (already started) iterable - return itertools.chain([first_value], iterable) - - -def infinite_ints(): - i = 0 - while True: - yield i - i += 1 - - -m = Model(infinite=infinite_ints()) -print(m) - - -def infinite_strs(): - while True: - yield from 'allthesingleladies' - - -try: - Model(infinite=infinite_strs()) -except ValidationError as e: - print(e) diff --git a/docs/examples/types_iterables.py b/docs/examples/types_iterables.py deleted file mode 100644 index 3e5a3caf2a..0000000000 --- a/docs/examples/types_iterables.py +++ /dev/null @@ -1,44 +0,0 @@ -from typing import ( - Deque, Dict, FrozenSet, List, Optional, Sequence, Set, Tuple, Union -) - -from pydantic import BaseModel - - -class Model(BaseModel): - simple_list: list = None - list_of_ints: List[int] = None - - simple_tuple: tuple = None - tuple_of_different_types: Tuple[int, float, str, bool] = None - - simple_dict: dict = None - dict_str_float: Dict[str, float] = None - - simple_set: set = None - set_bytes: Set[bytes] = None - frozen_set: FrozenSet[int] = None - - str_or_bytes: Union[str, bytes] = None - none_or_str: Optional[str] = None - - sequence_of_ints: Sequence[int] = None - - compound: Dict[Union[str, bytes], List[Set[int]]] = None - - deque: Deque[int] = None - - -print(Model(simple_list=['1', '2', '3']).simple_list) -print(Model(list_of_ints=['1', '2', '3']).list_of_ints) - -print(Model(simple_dict={'a': 1, b'b': 2}).simple_dict) -print(Model(dict_str_float={'a': 1, b'b': 2}).dict_str_float) - -print(Model(simple_tuple=[1, 2, 3, 4]).simple_tuple) -print(Model(tuple_of_different_types=[4, 3, 2, 1]).tuple_of_different_types) - -print(Model(sequence_of_ints=[1, 2, 3, 4]).sequence_of_ints) -print(Model(sequence_of_ints=(1, 2, 3, 4)).sequence_of_ints) - -print(Model(deque=[1, 2, 3]).deque) diff --git a/docs/examples/types_json_type.py b/docs/examples/types_json_type.py deleted file mode 100644 index 7f4826a94f..0000000000 --- a/docs/examples/types_json_type.py +++ /dev/null @@ -1,29 +0,0 @@ -from typing import Any, List - -from pydantic import BaseModel, Json, ValidationError - - -class AnyJsonModel(BaseModel): - json_obj: Json[Any] - - -class ConstrainedJsonModel(BaseModel): - json_obj: Json[List[int]] - - -print(AnyJsonModel(json_obj='{"b": 1}')) -print(ConstrainedJsonModel(json_obj='[1, 2, 3]')) -try: - ConstrainedJsonModel(json_obj=12) -except ValidationError as e: - print(e) - -try: - ConstrainedJsonModel(json_obj='[a, b]') -except ValidationError as e: - print(e) - -try: - ConstrainedJsonModel(json_obj='["a", "b"]') -except ValidationError as e: - print(e) diff --git a/docs/examples/types_literal1.py b/docs/examples/types_literal1.py deleted file mode 100644 index 1cc952c5d5..0000000000 --- a/docs/examples/types_literal1.py +++ /dev/null @@ -1,15 +0,0 @@ -from typing import Literal - -from pydantic import BaseModel, ValidationError - - -class Pie(BaseModel): - flavor: Literal['apple', 'pumpkin'] - - -Pie(flavor='apple') -Pie(flavor='pumpkin') -try: - Pie(flavor='cherry') -except ValidationError as e: - print(str(e)) diff --git a/docs/examples/types_literal2.py b/docs/examples/types_literal2.py deleted file mode 100644 index 6a44c68ae2..0000000000 --- a/docs/examples/types_literal2.py +++ /dev/null @@ -1,27 +0,0 @@ -from typing import ClassVar, List, Union - -from typing import Literal - -from pydantic import BaseModel, ValidationError - - -class Cake(BaseModel): - kind: Literal['cake'] - required_utensils: ClassVar[List[str]] = ['fork', 'knife'] - - -class IceCream(BaseModel): - kind: Literal['icecream'] - required_utensils: ClassVar[List[str]] = ['spoon'] - - -class Meal(BaseModel): - dessert: Union[Cake, IceCream] - - -print(type(Meal(dessert={'kind': 'cake'}).dessert).__name__) -print(type(Meal(dessert={'kind': 'icecream'}).dessert).__name__) -try: - Meal(dessert={'kind': 'pie'}) -except ValidationError as e: - print(str(e)) diff --git a/docs/examples/types_literal3.py b/docs/examples/types_literal3.py deleted file mode 100644 index f783c3900d..0000000000 --- a/docs/examples/types_literal3.py +++ /dev/null @@ -1,32 +0,0 @@ -from typing import Optional, Union - -from typing import Literal - -from pydantic import BaseModel - - -class Dessert(BaseModel): - kind: str - - -class Pie(Dessert): - kind: Literal['pie'] - flavor: Optional[str] - - -class ApplePie(Pie): - flavor: Literal['apple'] - - -class PumpkinPie(Pie): - flavor: Literal['pumpkin'] - - -class Meal(BaseModel): - dessert: Union[ApplePie, PumpkinPie, Pie, Dessert] - - -print(type(Meal(dessert={'kind': 'pie', 'flavor': 'apple'}).dessert).__name__) -print(type(Meal(dessert={'kind': 'pie', 'flavor': 'pumpkin'}).dessert).__name__) -print(type(Meal(dessert={'kind': 'pie'}).dessert).__name__) -print(type(Meal(dessert={'kind': 'cake'}).dessert).__name__) diff --git a/docs/examples/types_payment_card_number.py b/docs/examples/types_payment_card_number.py deleted file mode 100644 index 22d93febfb..0000000000 --- a/docs/examples/types_payment_card_number.py +++ /dev/null @@ -1,30 +0,0 @@ -from datetime import date - -from pydantic import BaseModel -from pydantic.types import PaymentCardBrand, PaymentCardNumber, constr - - -class Card(BaseModel): - name: constr(strip_whitespace=True, min_length=1) - number: PaymentCardNumber - exp: date - - @property - def brand(self) -> PaymentCardBrand: - return self.number.brand - - @property - def expired(self) -> bool: - return self.exp < date.today() - - -card = Card( - name='Georg Wilhelm Friedrich Hegel', - number='4000000000000002', - exp=date(2023, 9, 30), -) - -assert card.number.brand == PaymentCardBrand.visa -assert card.number.bin == '400000' -assert card.number.last4 == '0002' -assert card.number.masked == '400000******0002' diff --git a/docs/examples/types_secret_types.py b/docs/examples/types_secret_types.py deleted file mode 100644 index 826f9e080a..0000000000 --- a/docs/examples/types_secret_types.py +++ /dev/null @@ -1,50 +0,0 @@ -from pydantic import BaseModel, SecretStr, SecretBytes, ValidationError - - -class SimpleModel(BaseModel): - password: SecretStr - password_bytes: SecretBytes - - -sm = SimpleModel(password='IAmSensitive', password_bytes=b'IAmSensitiveBytes') - -# Standard access methods will not display the secret -print(sm) -print(sm.password) -print(sm.model_dump()) -print(sm.model_dump_json()) - -# Use get_secret_value method to see the secret's content. -print(sm.password.get_secret_value()) -print(sm.password_bytes.get_secret_value()) - -try: - SimpleModel(password=[1, 2, 3], password_bytes=[1, 2, 3]) -except ValidationError as e: - print(e) - - -# If you want the secret to be dumped as plain-text using the json method, -# you can use json_encoders in the Config class. -class SimpleModelDumpable(BaseModel): - password: SecretStr - password_bytes: SecretBytes - - class Config: - json_encoders = { - SecretStr: lambda v: v.get_secret_value() if v else None, - SecretBytes: lambda v: v.get_secret_value() if v else None, - } - - -sm2 = SimpleModelDumpable( - password='IAmSensitive', password_bytes=b'IAmSensitiveBytes' -) - -# Standard access methods will not display the secret -print(sm2) -print(sm2.password) -print(sm2.model_dump()) - -# But the json method will -print(sm2.model_dump_json()) diff --git a/docs/examples/types_strict.py b/docs/examples/types_strict.py deleted file mode 100644 index 6612dc5515..0000000000 --- a/docs/examples/types_strict.py +++ /dev/null @@ -1,53 +0,0 @@ -from pydantic import ( - BaseModel, - StrictBytes, - StrictBool, - StrictInt, - ValidationError, - confloat, -) - - -class StrictBytesModel(BaseModel): - strict_bytes: StrictBytes - - -try: - StrictBytesModel(strict_bytes='hello world') -except ValidationError as e: - print(e) - - -class StrictIntModel(BaseModel): - strict_int: StrictInt - - -try: - StrictIntModel(strict_int=3.14159) -except ValidationError as e: - print(e) - - -class ConstrainedFloatModel(BaseModel): - constrained_float: confloat(strict=True, ge=0.0) - - -try: - ConstrainedFloatModel(constrained_float=3) -except ValidationError as e: - print(e) - -try: - ConstrainedFloatModel(constrained_float=-1.23) -except ValidationError as e: - print(e) - - -class StrictBoolModel(BaseModel): - strict_bool: StrictBool - - -try: - StrictBoolModel(strict_bool='False') -except ValidationError as e: - print(str(e)) diff --git a/docs/examples/types_type.py b/docs/examples/types_type.py deleted file mode 100644 index 933d7ee945..0000000000 --- a/docs/examples/types_type.py +++ /dev/null @@ -1,28 +0,0 @@ -from typing import Type - -from pydantic import BaseModel -from pydantic import ValidationError - - -class Foo: - pass - - -class Bar(Foo): - pass - - -class Other: - pass - - -class SimpleModel(BaseModel): - just_subclasses: Type[Foo] - - -SimpleModel(just_subclasses=Foo) -SimpleModel(just_subclasses=Bar) -try: - SimpleModel(just_subclasses=Other) -except ValidationError as e: - print(e) diff --git a/docs/examples/types_typevar.py b/docs/examples/types_typevar.py deleted file mode 100644 index 85dc563240..0000000000 --- a/docs/examples/types_typevar.py +++ /dev/null @@ -1,18 +0,0 @@ -from typing import TypeVar -from pydantic import BaseModel - -Foobar = TypeVar('Foobar') -BoundFloat = TypeVar('BoundFloat', bound=float) -IntStr = TypeVar('IntStr', int, str) - - -class Model(BaseModel): - a: Foobar # equivalent of ": Any" - b: BoundFloat # equivalent of ": float" - c: IntStr # equivalent of ": Union[int, str]" - - -print(Model(a=[1], b=4.2, c='x')) - -# a may be None and is therefore optional -print(Model(b=1, c=1)) diff --git a/docs/examples/types_undefined_warning.py b/docs/examples/types_undefined_warning.py deleted file mode 100644 index a591cc192d..0000000000 --- a/docs/examples/types_undefined_warning.py +++ /dev/null @@ -1,45 +0,0 @@ -from __future__ import annotations - -from pydantic import BaseModel - - -# This example shows how Book and Person types reference each other. -# We will demonstrate how to suppress the undefined types warning -# when define such models. - - -class Book(BaseModel): - title: str - author: Person # note the `Person` type is not yet defined - - # Suppress undefined types warning so we can continue defining our models. - class Config: - undefined_types_warning = False - - -class Person(BaseModel): - name: str - books_read: list[Book] | None = None - - -# Now, we can rebuild the `Book` model, since the `Person` model is now defined. -# Note: there's no need to call `model_rebuild()` on `Person`, -# it's already complete. -Book.model_rebuild() - -# Let's create some instances of our models, to demonstrate that they work. -python_crash_course = Book( - title='Python Crash Course', - author=Person(name='Eric Matthes'), -) -jane_doe = Person(name='Jane Doe', books_read=[python_crash_course]) - -assert jane_doe.dict(exclude_unset=True) == { - 'name': 'Jane Doe', - 'books_read': [ - { - 'title': 'Python Crash Course', - 'author': {'name': 'Eric Matthes'}, - }, - ], -} diff --git a/docs/examples/types_union_correct.py b/docs/examples/types_union_correct.py deleted file mode 100644 index 9a4283f758..0000000000 --- a/docs/examples/types_union_correct.py +++ /dev/null @@ -1,15 +0,0 @@ -from uuid import UUID -from typing import Union -from pydantic import BaseModel - - -class User(BaseModel): - id: Union[UUID, int, str] - name: str - - -user_03_uuid = UUID('cf57432e-809e-4353-adbd-9d5c0d733868') -user_03 = User(id=user_03_uuid, name='John Doe') -print(user_03) -print(user_03.id) -print(user_03_uuid.int) diff --git a/docs/examples/types_union_discriminated.py b/docs/examples/types_union_discriminated.py deleted file mode 100644 index bae8476562..0000000000 --- a/docs/examples/types_union_discriminated.py +++ /dev/null @@ -1,30 +0,0 @@ -from typing import Literal, Union - -from pydantic import BaseModel, Field, ValidationError - - -class Cat(BaseModel): - pet_type: Literal['cat'] - meows: int - - -class Dog(BaseModel): - pet_type: Literal['dog'] - barks: float - - -class Lizard(BaseModel): - pet_type: Literal['reptile', 'lizard'] - scales: bool - - -class Model(BaseModel): - pet: Union[Cat, Dog, Lizard] = Field(..., discriminator='pet_type') - n: int - - -print(Model(pet={'pet_type': 'dog', 'barks': 3.14}, n=1)) -try: - Model(pet={'pet_type': 'dog'}, n=1) -except ValidationError as e: - print(e) diff --git a/docs/examples/types_union_discriminated_nested.py b/docs/examples/types_union_discriminated_nested.py deleted file mode 100644 index df258b28b2..0000000000 --- a/docs/examples/types_union_discriminated_nested.py +++ /dev/null @@ -1,50 +0,0 @@ -from typing import Literal, Union - -from typing_extensions import Annotated - -from pydantic import BaseModel, Field, ValidationError - - -class BlackCat(BaseModel): - pet_type: Literal['cat'] - color: Literal['black'] - black_name: str - - -class WhiteCat(BaseModel): - pet_type: Literal['cat'] - color: Literal['white'] - white_name: str - - -# Can also be written with a custom root type -# -# class Cat(BaseModel): -# __root__: Annotated[Union[BlackCat, WhiteCat], Field(discriminator='color')] - -Cat = Annotated[Union[BlackCat, WhiteCat], Field(discriminator='color')] - - -class Dog(BaseModel): - pet_type: Literal['dog'] - name: str - - -Pet = Annotated[Union[Cat, Dog], Field(discriminator='pet_type')] - - -class Model(BaseModel): - pet: Pet - n: int - - -m = Model(pet={'pet_type': 'cat', 'color': 'black', 'black_name': 'felix'}, n=1) -print(m) -try: - Model(pet={'pet_type': 'cat', 'color': 'red'}, n='1') -except ValidationError as e: - print(e) -try: - Model(pet={'pet_type': 'cat', 'color': 'black'}, n='1') -except ValidationError as e: - print(e) diff --git a/docs/examples/types_union_incorrect.py b/docs/examples/types_union_incorrect.py deleted file mode 100644 index f493f78e44..0000000000 --- a/docs/examples/types_union_incorrect.py +++ /dev/null @@ -1,21 +0,0 @@ -from uuid import UUID -from typing import Union -from pydantic import BaseModel - - -class User(BaseModel): - id: Union[int, str, UUID] - name: str - - -user_01 = User(id=123, name='John Doe') -print(user_01) -print(user_01.id) -user_02 = User(id='1234', name='John Doe') -print(user_02) -print(user_02.id) -user_03_uuid = UUID('cf57432e-809e-4353-adbd-9d5c0d733868') -user_03 = User(id=user_03_uuid, name='John Doe') -print(user_03) -print(user_03.id) -print(user_03_uuid.int) diff --git a/docs/examples/types_url_properties.py b/docs/examples/types_url_properties.py deleted file mode 100644 index f2885acc7f..0000000000 --- a/docs/examples/types_url_properties.py +++ /dev/null @@ -1,33 +0,0 @@ -from pydantic import BaseModel, HttpUrl, PostgresDsn, ValidationError, validator - - -class MyModel(BaseModel): - url: HttpUrl - - -m = MyModel(url='http://www.example.com') - -# the repr() method for a url will display all properties of the url -print(repr(m.url)) -print(m.url.scheme) -print(m.url.host) -print(m.url.host_type) -print(m.url.port) - - -class MyDatabaseModel(BaseModel): - db: PostgresDsn - - @validator('db') - def check_db_name(cls, v): - assert v.path and len(v.path) > 1, 'database must be provided' - return v - - -m = MyDatabaseModel(db='postgres://user:pass@localhost:5432/foobar') -print(m.db) - -try: - MyDatabaseModel(db='postgres://user:pass@localhost:5432') -except ValidationError as e: - print(e) diff --git a/docs/examples/types_url_punycode.py b/docs/examples/types_url_punycode.py deleted file mode 100644 index 31dd85fdd3..0000000000 --- a/docs/examples/types_url_punycode.py +++ /dev/null @@ -1,16 +0,0 @@ -from pydantic import BaseModel, HttpUrl - - -class MyModel(BaseModel): - url: HttpUrl - - -m1 = MyModel(url='http://puny£code.com') -print(m1.url) -print(m1.url.host_type) -m2 = MyModel(url='https://www.аррӏе.com/') -print(m2.url) -print(m2.url.host_type) -m3 = MyModel(url='https://www.example.珠宝/') -print(m3.url) -print(m3.url.host_type) diff --git a/docs/examples/types_urls.py b/docs/examples/types_urls.py deleted file mode 100644 index fb49280ff9..0000000000 --- a/docs/examples/types_urls.py +++ /dev/null @@ -1,19 +0,0 @@ -from pydantic import BaseModel, HttpUrl, ValidationError - - -class MyModel(BaseModel): - url: HttpUrl - - -m = MyModel(url='http://www.example.com') -print(m.url) - -try: - MyModel(url='ftp://invalid.url') -except ValidationError as e: - print(e) - -try: - MyModel(url='not a url') -except ValidationError as e: - print(e) diff --git a/docs/examples/validation_decorator_async.py b/docs/examples/validation_decorator_async.py deleted file mode 100644 index 009043b34d..0000000000 --- a/docs/examples/validation_decorator_async.py +++ /dev/null @@ -1,32 +0,0 @@ -class Connection: - async def execute(self, sql, *args): - return 'testing@example.com' - - -conn = Connection() -# ignore-above -import asyncio -from pydantic import PositiveInt, ValidationError, validate_arguments - - -@validate_arguments -async def get_user_email(user_id: PositiveInt): - # `conn` is some fictional connection to a database - email = await conn.execute('select email from users where id=$1', user_id) - if email is None: - raise RuntimeError('user not found') - else: - return email - - -async def main(): - email = await get_user_email(123) - print(email) - try: - await get_user_email(-4) - except ValidationError as exc: - print(exc.errors()) - - -asyncio.run(main()) -# requires: `conn.execute()` that will return `'testing@example.com'` diff --git a/docs/examples/validation_decorator_config.py b/docs/examples/validation_decorator_config.py deleted file mode 100644 index b25c7f9567..0000000000 --- a/docs/examples/validation_decorator_config.py +++ /dev/null @@ -1,26 +0,0 @@ -from pydantic import ValidationError, validate_arguments - - -class Foobar: - def __init__(self, v: str): - self.v = v - - def __add__(self, other: 'Foobar') -> str: - return f'{self} + {other}' - - def __str__(self) -> str: - return f'Foobar({self.v})' - - -@validate_arguments(config=dict(arbitrary_types_allowed=True)) -def add_foobars(a: Foobar, b: Foobar): - return a + b - - -c = add_foobars(Foobar('a'), Foobar('b')) -print(c) - -try: - add_foobars(1, 2) -except ValidationError as e: - print(e) diff --git a/docs/examples/validation_decorator_field.py b/docs/examples/validation_decorator_field.py deleted file mode 100644 index 0d246b655c..0000000000 --- a/docs/examples/validation_decorator_field.py +++ /dev/null @@ -1,22 +0,0 @@ -from datetime import datetime -from pydantic import validate_arguments, Field, ValidationError -from typing_extensions import Annotated - - -@validate_arguments -def how_many(num: Annotated[int, Field(gt=10)]): - return num - - -try: - how_many(1) -except ValidationError as e: - print(e) - - -@validate_arguments -def when(dt: datetime = Field(default_factory=datetime.now)): - return dt - - -print(type(when())) diff --git a/docs/examples/validation_decorator_field_alias.py b/docs/examples/validation_decorator_field_alias.py deleted file mode 100644 index 9cbae0c125..0000000000 --- a/docs/examples/validation_decorator_field_alias.py +++ /dev/null @@ -1,10 +0,0 @@ -from pydantic import Field, validate_arguments -from typing_extensions import Annotated - - -@validate_arguments -def how_many(num: Annotated[int, Field(gt=10, alias='number')]): - return num - - -how_many(number=42) diff --git a/docs/examples/validation_decorator_main.py b/docs/examples/validation_decorator_main.py deleted file mode 100644 index b260a43855..0000000000 --- a/docs/examples/validation_decorator_main.py +++ /dev/null @@ -1,19 +0,0 @@ -from pydantic import validate_arguments, ValidationError - - -@validate_arguments -def repeat(s: str, count: int, *, separator: bytes = b'') -> bytes: - b = s.encode() - return separator.join(b for _ in range(count)) - - -a = repeat('hello', 3) -print(a) - -b = repeat('x', '4', separator=' ') -print(b) - -try: - c = repeat('hello', 'wrong') -except ValidationError as exc: - print(exc) diff --git a/docs/examples/validation_decorator_parameter_types.py b/docs/examples/validation_decorator_parameter_types.py deleted file mode 100644 index 9d0b59148c..0000000000 --- a/docs/examples/validation_decorator_parameter_types.py +++ /dev/null @@ -1,68 +0,0 @@ -# requires python3.8 -from pydantic import validate_arguments - - -@validate_arguments -def pos_or_kw(a: int, b: int = 2) -> str: - return f'a={a} b={b}' - - -print(pos_or_kw(1)) -print(pos_or_kw(a=1)) -print(pos_or_kw(1, 3)) -print(pos_or_kw(a=1, b=3)) - - -@validate_arguments -def kw_only(*, a: int, b: int = 2) -> str: - return f'a={a} b={b}' - - -print(kw_only(a=1)) -print(kw_only(a=1, b=3)) - - -@validate_arguments -def pos_only(a: int, b: int = 2, /) -> str: # python 3.8 only - return f'a={a} b={b}' - - -print(pos_only(1)) -print(pos_only(1, 2)) - - -@validate_arguments -def var_args(*args: int) -> str: - return str(args) - - -print(var_args(1)) -print(var_args(1, 2)) -print(var_args(1, 2, 3)) - - -@validate_arguments -def var_kwargs(**kwargs: int) -> str: - return str(kwargs) - - -print(var_kwargs(a=1)) -print(var_kwargs(a=1, b=2)) - - -@validate_arguments -def armageddon( - a: int, - /, # python 3.8 only - b: int, - c: int = None, - *d: int, - e: int, - f: int = None, - **g: int, -) -> str: - return f'a={a} b={b} c={c} d={d} e={e} f={f} g={g}' - - -print(armageddon(1, 2, e=3)) -print(armageddon(1, 2, 3, 4, 5, 6, e=8, f=9, g=10, spam=11)) diff --git a/docs/examples/validation_decorator_raw_function.py b/docs/examples/validation_decorator_raw_function.py deleted file mode 100644 index 8e76fd40f0..0000000000 --- a/docs/examples/validation_decorator_raw_function.py +++ /dev/null @@ -1,14 +0,0 @@ -from pydantic import validate_arguments - - -@validate_arguments -def repeat(s: str, count: int, *, separator: bytes = b'') -> bytes: - b = s.encode() - return separator.join(b for _ in range(count)) - - -a = repeat('hello', 3) -print(a) - -b = repeat.raw_function('good bye', 2, separator=b', ') -print(b) diff --git a/docs/examples/validation_decorator_types.py b/docs/examples/validation_decorator_types.py deleted file mode 100644 index 2b4fad6cd7..0000000000 --- a/docs/examples/validation_decorator_types.py +++ /dev/null @@ -1,21 +0,0 @@ -import os -from pathlib import Path -from typing import Pattern, Optional - -from pydantic import validate_arguments, DirectoryPath - - -@validate_arguments -def find_file(path: DirectoryPath, regex: Pattern, max=None) -> Optional[Path]: - for i, f in enumerate(path.glob('**/*')): - if max and i > max: - return - if f.is_file() and regex.fullmatch(str(f.relative_to(path))): - return f - - -# note: this_dir is a string here -this_dir = os.path.dirname(__file__) - -print(find_file(this_dir, '^validation.*')) -print(find_file(this_dir, '^foobar.*', max=3)) diff --git a/docs/examples/validation_decorator_validate.py b/docs/examples/validation_decorator_validate.py deleted file mode 100644 index 849b8af0d2..0000000000 --- a/docs/examples/validation_decorator_validate.py +++ /dev/null @@ -1,17 +0,0 @@ -from pydantic import validate_arguments, ValidationError - - -@validate_arguments -def slow_sum(a: int, b: int) -> int: - print(f'Called with a={a}, b={b}') - return a + b - - -slow_sum(1, 1) - -slow_sum.validate(2, 2) - -try: - slow_sum.validate(1, 'b') -except ValidationError as exc: - print(exc) diff --git a/docs/examples/validators_allow_reuse.py b/docs/examples/validators_allow_reuse.py deleted file mode 100644 index c6f52f30b1..0000000000 --- a/docs/examples/validators_allow_reuse.py +++ /dev/null @@ -1,25 +0,0 @@ -from pydantic import BaseModel, validator - - -def normalize(name: str) -> str: - return ' '.join((word.capitalize()) for word in name.split(' ')) - - -class Producer(BaseModel): - name: str - - # validators - _normalize_name = validator('name', allow_reuse=True)(normalize) - - -class Consumer(BaseModel): - name: str - - # validators - _normalize_name = validator('name', allow_reuse=True)(normalize) - - -jane_doe = Producer(name='JaNe DOE') -john_doe = Consumer(name='joHN dOe') -assert jane_doe.name == 'Jane Doe' -assert john_doe.name == 'John Doe' diff --git a/docs/examples/validators_always.py b/docs/examples/validators_always.py deleted file mode 100644 index f829c684b8..0000000000 --- a/docs/examples/validators_always.py +++ /dev/null @@ -1,15 +0,0 @@ -from datetime import datetime - -from pydantic import BaseModel, validator - - -class DemoModel(BaseModel): - ts: datetime = None - - @validator('ts', pre=True, always=True) - def set_ts_now(cls, v): - return v or datetime.now() - - -print(DemoModel()) -print(DemoModel(ts='2017-11-08T14:00')) diff --git a/docs/examples/validators_dataclass.py b/docs/examples/validators_dataclass.py deleted file mode 100644 index 11db5fca3c..0000000000 --- a/docs/examples/validators_dataclass.py +++ /dev/null @@ -1,17 +0,0 @@ -from datetime import datetime - -from pydantic import validator -from pydantic.dataclasses import dataclass - - -@dataclass -class DemoDataclass: - ts: datetime = None - - @validator('ts', pre=True, always=True) - def set_ts_now(cls, v): - return v or datetime.now() - - -print(DemoDataclass()) -print(DemoDataclass(ts='2017-11-08T14:00')) diff --git a/docs/examples/validators_pre_item.py b/docs/examples/validators_pre_item.py deleted file mode 100644 index 255712de8a..0000000000 --- a/docs/examples/validators_pre_item.py +++ /dev/null @@ -1,46 +0,0 @@ -from typing import List -from pydantic import BaseModel, ValidationError, validator - - -class DemoModel(BaseModel): - square_numbers: List[int] = [] - cube_numbers: List[int] = [] - - # '*' is the same as 'cube_numbers', 'square_numbers' here: - @validator('*', pre=True) - def split_str(cls, v): - if isinstance(v, str): - return v.split('|') - return v - - @validator('cube_numbers', 'square_numbers') - def check_sum(cls, v): - if sum(v) > 42: - raise ValueError('sum of numbers greater than 42') - return v - - @validator('square_numbers', each_item=True) - def check_squares(cls, v): - assert v ** 0.5 % 1 == 0, f'{v} is not a square number' - return v - - @validator('cube_numbers', each_item=True) - def check_cubes(cls, v): - # 64 ** (1 / 3) == 3.9999999999999996 (!) - # this is not a good way of checking cubes - assert v ** (1 / 3) % 1 == 0, f'{v} is not a cubed number' - return v - - -print(DemoModel(square_numbers=[1, 4, 9])) -print(DemoModel(square_numbers='1|4|16')) -print(DemoModel(square_numbers=[16], cube_numbers=[8, 27])) -try: - DemoModel(square_numbers=[1, 4, 2]) -except ValidationError as e: - print(e) - -try: - DemoModel(cube_numbers=[27, 27]) -except ValidationError as e: - print(e) diff --git a/docs/examples/validators_root.py b/docs/examples/validators_root.py deleted file mode 100644 index 381204f9c4..0000000000 --- a/docs/examples/validators_root.py +++ /dev/null @@ -1,36 +0,0 @@ -from pydantic import BaseModel, ValidationError, root_validator - - -class UserModel(BaseModel): - username: str - password1: str - password2: str - - @root_validator(pre=True) - def check_card_number_omitted(cls, values): - assert 'card_number' not in values, 'card_number should not be included' - return values - - @root_validator(skip_on_failure=True) - def check_passwords_match(cls, values): - pw1, pw2 = values.get('password1'), values.get('password2') - if pw1 is not None and pw2 is not None and pw1 != pw2: - raise ValueError('passwords do not match') - return values - - -print(UserModel(username='scolvin', password1='zxcvbn', password2='zxcvbn')) -try: - UserModel(username='scolvin', password1='zxcvbn', password2='zxcvbn2') -except ValidationError as e: - print(e) - -try: - UserModel( - username='scolvin', - password1='zxcvbn', - password2='zxcvbn', - card_number='1234', - ) -except ValidationError as e: - print(e) diff --git a/docs/examples/validators_simple.py b/docs/examples/validators_simple.py deleted file mode 100644 index 5dd5f8812a..0000000000 --- a/docs/examples/validators_simple.py +++ /dev/null @@ -1,44 +0,0 @@ -from pydantic import BaseModel, ValidationError, validator - - -class UserModel(BaseModel): - name: str - username: str - password1: str - password2: str - - @validator('name') - def name_must_contain_space(cls, v): - if ' ' not in v: - raise ValueError('must contain a space') - return v.title() - - @validator('password2') - def passwords_match(cls, v, values, **kwargs): - if 'password1' in values and v != values['password1']: - raise ValueError('passwords do not match') - return v - - @validator('username') - def username_alphanumeric(cls, v): - assert v.isalnum(), 'must be alphanumeric' - return v - - -user = UserModel( - name='samuel colvin', - username='scolvin', - password1='zxcvbn', - password2='zxcvbn', -) -print(user) - -try: - UserModel( - name='samuel', - username='scolvin', - password1='zxcvbn', - password2='zxcvbn2', - ) -except ValidationError as e: - print(e) diff --git a/docs/examples/validators_subclass_each_item.py b/docs/examples/validators_subclass_each_item.py deleted file mode 100644 index 4b050950d9..0000000000 --- a/docs/examples/validators_subclass_each_item.py +++ /dev/null @@ -1,36 +0,0 @@ -from typing import List -from pydantic import BaseModel, ValidationError, validator - - -class ParentModel(BaseModel): - names: List[str] - - -class ChildModel(ParentModel): - @validator('names', each_item=True) - def check_names_not_empty(cls, v): - assert v != '', 'Empty strings are not allowed.' - return v - - -# This will NOT raise a ValidationError because the validator was not called -try: - child = ChildModel(names=['Alice', 'Bob', 'Eve', '']) -except ValidationError as e: - print(e) -else: - print('No ValidationError caught.') - - -class ChildModel2(ParentModel): - @validator('names') - def check_names_not_empty(cls, v): - for name in v: - assert name != '', 'Empty strings are not allowed.' - return v - - -try: - child = ChildModel2(names=['Alice', 'Bob', 'Eve', '']) -except ValidationError as e: - print(e) diff --git a/docs/hypothesis_plugin.md b/docs/hypothesis_plugin.md index ad5aeeb9fd..f2c0bf391b 100644 --- a/docs/hypothesis_plugin.md +++ b/docs/hypothesis_plugin.md @@ -18,7 +18,35 @@ strategies support them without any user configuration. ### Example tests -{!.tmp_examples/hypothesis_property_based_test.md!} +```py +import typing + +from hypothesis import given +from hypothesis import strategies as st + +from pydantic import BaseModel, EmailStr, PaymentCardNumber, PositiveFloat + + +class Model(BaseModel): + card: PaymentCardNumber + price: PositiveFloat + users: typing.List[EmailStr] + + +@given(st.builds(Model)) +def test_property(instance): + # Hypothesis calls this test function many times with varied Models, + # so you can write a test that should pass given *any* instance. + assert 0 < instance.price + assert all('@' in email for email in instance.users) + + +@given(st.builds(Model, price=st.floats(100, 200))) +def test_with_discount(instance): + # This test shows how you can override specific fields, + # and let Hypothesis fill in any you don't care about. + assert 100 <= instance.price <= 200 +``` ### Use with JSON Schemas diff --git a/docs/index.md b/docs/index.md index 1a9801dd2b..8d6ff1dc4c 100644 --- a/docs/index.md +++ b/docs/index.md @@ -5,7 +5,7 @@ [![downloads](https://pepy.tech/badge/pydantic/month)](https://pepy.tech/project/pydantic) [![license](https://img.shields.io/github/license/pydantic/pydantic.svg)](https://github.com/pydantic/pydantic/blob/main/LICENSE) -{!.version.md!} +{{ version }}. Pydantic is the most widely used data validation library for Python. @@ -17,7 +17,42 @@ Try Pydantic today! [Installation](/install/) is as simple as: [`pip install pyd ## Example -{!.tmp_examples/index_main.md!} +```py +from datetime import datetime +from typing import List, Optional + +from pydantic import BaseModel + + +class User(BaseModel): + id: int + name: str = 'John Doe' + signup_ts: Optional[datetime] = None + friends: List[int] = [] + + +external_data = { + 'id': '123', + 'signup_ts': '2019-06-01 12:22', + 'friends': [1, 2, '3'], +} +user = User(**external_data) +print(user.id) +#> 123 +print(repr(user.signup_ts)) +#> datetime.datetime(2019, 6, 1, 12, 22) +print(user.friends) +#> [1, 2, 3] +print(user.model_dump()) +""" +{ + 'id': 123, + 'name': 'John Doe', + 'signup_ts': datetime.datetime(2019, 6, 1, 12, 22), + 'friends': [1, 2, 3], +} +""" +``` What's going on here: @@ -31,7 +66,40 @@ What's going on here: If validation fails, Pydantic will raise an error with a breakdown of what was wrong: -{!.tmp_examples/index_error.md!} +```py +from index_main import User + +# ignore-above +from pydantic import ValidationError + +try: + User(signup_ts='broken', friends=[1, 2, 'not number']) +except ValidationError as e: + print(e.errors()) + """ + [ + { + 'type': 'missing', + 'loc': ('id',), + 'msg': 'Field required', + 'input': {'signup_ts': 'broken', 'friends': [1, 2, 'not number']}, + }, + { + 'type': 'datetime_parsing', + 'loc': ('signup_ts',), + 'msg': 'Input should be a valid datetime, input is too short', + 'input': 'broken', + 'ctx': {'error': 'input is too short'}, + }, + { + 'type': 'int_parsing', + 'loc': ('friends', 2), + 'msg': 'Input should be a valid integer, unable to parse string as an integer', + 'input': 'not number', + }, + ] + """ +``` ## Rationale diff --git a/docs/mypy_plugin.md b/docs/mypy_plugin.md index c5275c3d77..3b1c0e723f 100644 --- a/docs/mypy_plugin.md +++ b/docs/mypy_plugin.md @@ -4,7 +4,26 @@ However, Pydantic also ships with a mypy plugin that adds a number of important features to mypy that improve its ability to type-check your code. For example, consider the following script: -{!.tmp_examples/mypy_main.md!} + +```py test="skip" +from datetime import datetime +from typing import List, Optional + +from pydantic import BaseModel + + +class Model(BaseModel): + age: int + first_name = 'John' + last_name: Optional[str] = None + signup_ts: Optional[datetime] = None + list_of_ints: List[int] + + +m = Model(age=42, list_of_ints=[1, '2', b'3']) +print(m.middle_name) # not a model field! +Model() # will raise a validation error for age and list_of_ints +``` Without any special configuration, mypy catches one of the errors (see [here](usage/mypy.md) for usage instructions): diff --git a/docs/plugins/devtools_output.html b/docs/plugins/devtools_output.html new file mode 100644 index 0000000000..42cd1f5cbc --- /dev/null +++ b/docs/plugins/devtools_output.html @@ -0,0 +1,22 @@ + +devtools_example.py:31 <module> + user: User( + id=123, + name='John Doe', + signup_ts=datetime.datetime(2019, 6, 1, 12, 22), + friends=[ + 1234, + 4567, + 7890, + ], + address=Address( + street='Testing', + country='uk', + lat=51.5, + lng=0.0, + ), + ) (User) + +should be much easier read than: + +user: id=123 name='John Doe' signup_ts=datetime.datetime(2019, 6, 1, 12, 22) friends=[1234, 4567, 7890] address=Address(street='Testing', country='uk', lat=51.5, lng=0.0) diff --git a/docs/plugins/main.py b/docs/plugins/main.py new file mode 100644 index 0000000000..6411105ff0 --- /dev/null +++ b/docs/plugins/main.py @@ -0,0 +1,214 @@ +import json +import logging +import os +import re +from pathlib import Path +from textwrap import indent + +import autoflake # type: ignore +import pyupgrade._main as pyupgrade_main # type: ignore +import tomli +from mkdocs.config import Config +from mkdocs.structure.files import Files +from mkdocs.structure.pages import Page + +logger = logging.getLogger('mkdocs.plugin') +THIS_DIR = Path(__file__).parent +DOCS_DIR = THIS_DIR.parent +PROJECT_ROOT = DOCS_DIR.parent + + +def on_pre_build(config: Config) -> None: + """ + Before the build starts. + """ + add_changelog() + + +def on_files(files: Files, config: Config) -> Files: + """ + After the files are loaded, but before they are read. + """ + return files + + +def on_page_markdown(markdown: str, page: Page, config: Config, files: Files) -> str: + """ + Called on each file after it is read and before it is converted to HTML. + """ + markdown = upgrade_python(markdown) + markdown = remove_code_fence_attributes(markdown) + if md := add_version(markdown, page): + return md + elif md := build_schema_mappings(markdown, page): + return md + elif md := devtools_example(markdown, page): + return md + else: + return markdown + + +def add_changelog() -> None: + history = (PROJECT_ROOT / 'HISTORY.md').read_text() + history = re.sub(r'#(\d+)', r'[#\1](https://github.com/pydantic/pydantic/issues/\1)', history) + history = re.sub(r'(\s)@([\w\-]+)', r'\1[@\2](https://github.com/\2)', history, flags=re.I) + history = re.sub('@@', '@', history) + new_file = DOCS_DIR / 'changelog.md' + + # avoid writing file unless the content has changed to avoid infinite build loop + if not new_file.is_file() or new_file.read_text() != history: + new_file.write_text(history) + + +MIN_MINOR_VERSION = 7 +MAX_MINOR_VERSION = 11 + + +def upgrade_python(markdown: str) -> str: + """ + Apply pyupgrade to all python code blocks, unless explicitly skipped, create a tab for each version. + """ + + def add_tabs(match: re.Match[str]) -> str: + prefix = match.group(1) + if 'upgrade="skip"' in prefix: + return match.group(0) + + if m := re.search(r'requires="3.(\d+)"', prefix): + min_minor_version = int(m.group(1)) + else: + min_minor_version = MIN_MINOR_VERSION + + py_code = match.group(2) + output = [] + last_code = py_code + for minor_version in range(min_minor_version, MAX_MINOR_VERSION + 1): + if minor_version == min_minor_version: + tab_code = py_code + else: + tab_code = _upgrade_code(py_code, minor_version) + if tab_code == last_code: + continue + last_code = tab_code + + content = indent(f'{prefix}\n{tab_code}```', ' ' * 4) + output.append(f'=== "Python 3.{minor_version} and above"\n\n{content}') + + if len(output) == 1: + return match.group(0) + else: + return '\n\n'.join(output) + + return re.sub(r'^(``` *py.*?)\n(.+?)^```', add_tabs, markdown, flags=re.M | re.S) + + +def _upgrade_code(code: str, min_version: int) -> str: + upgraded = pyupgrade_main._fix_plugins( + code, + settings=pyupgrade_main.Settings( + min_version=(3, min_version), + keep_percent_format=True, + keep_mock=False, + keep_runtime_typing=True, + ), + ) + return autoflake.fix_code(upgraded, remove_all_unused_imports=True) + + +def remove_code_fence_attributes(markdown: str) -> str: + """ + There's no way to add attributes to code fences that works with both pycharm and mkdocs, hence we use + `py key="value"` to provide attributes to pytest-examples, then remove those attributes here. + + https://youtrack.jetbrains.com/issue/IDEA-297873 & https://python-markdown.github.io/extensions/fenced_code_blocks/ + """ + + def remove_attrs(match: re.Match[str]) -> str: + suffix = re.sub(r' (?:test|lint|upgrade|group|requires)=".+?"', '', match.group(2), flags=re.M) + return f'{match.group(1)}{suffix}' + + return re.sub(r'^( *``` *py)(.*)', remove_attrs, markdown, flags=re.M) + + +def add_version(markdown: str, page: Page) -> str | None: + if page.file.src_uri != 'index.md': + return None + + version_ref = os.getenv('GITHUB_REF') + if version_ref: + version = re.sub('^refs/tags/', '', version_ref.lower()) + version_str = f'Documentation for version: **{version}**' + else: + version_str = 'Documentation for development version' + markdown = re.sub(r'{{ *version *}}', version_str, markdown) + return markdown + + +headings = [ + 'Python type', + 'JSON Schema Type', + 'Additional JSON Schema', + 'Defined in', +] + + +def md2html(s: str) -> str: + return re.sub(r'`(.+?)`', r'\1', s) + + +def build_schema_mappings(markdown: str, page: Page) -> str | None: + if page.file.src_uri != 'usage/schema.md': + return None + + rows = [] + with (THIS_DIR / 'schema_mappings.toml').open('rb') as f: + table = tomli.load(f) + + for t in table.values(): + py_type = t['py_type'] + json_type = t['json_type'] + additional = t['additional'] + defined_in = t['defined_in'] + notes = t['notes'] + if additional and not isinstance(additional, str): + additional = json.dumps(additional) + cols = [ + f'{py_type}', + f'{json_type}', + f'{additional}' if additional else '', + md2html(defined_in), + ] + rows.append('\n'.join(f' \n {c}\n ' for c in cols)) + if notes: + rows.append( + f' \n' + f' {md2html(notes)}\n' + f' ' + ) + + heading = '\n'.join(f' {h}' for h in headings) + body = '\n\n\n'.join(rows) + table_text = f"""\ + + + +{heading} + + + + +{body} + + +
+""" + return re.sub(r'{{ *schema_mappings_table *}}', table_text, markdown) + + +def devtools_example(markdown: str, page: Page) -> str | None: + if page.file.src_uri != 'usage/devtools.md': + return None + + html = (THIS_DIR / 'devtools_output.html').read_text().strip('\n') + full_html = f'
\n
{html}
\n
' + return re.sub(r'{{ *devtools_example *}}', full_html, markdown) diff --git a/docs/plugins/schema_mappings.toml b/docs/plugins/schema_mappings.toml new file mode 100644 index 0000000000..24c1b3a9a9 --- /dev/null +++ b/docs/plugins/schema_mappings.toml @@ -0,0 +1,590 @@ +[None] +py_type = "None" +json_type = "null" +additional = "" +defined_in = "JSON Schema Core" +notes = "Same for `type(None)` or `Literal[None]`" + +[bool] +py_type = "bool" +json_type = "boolean" +additional = "" +defined_in = "JSON Schema Core" +notes = "" + +[str] +py_type = "str" +json_type = "string" +additional = "" +defined_in = "JSON Schema Core" +notes = "" + +[float] +py_type = "float" +json_type = "number" +additional = "" +defined_in = "JSON Schema Core" +notes = "" + +[int] +py_type = "int" +json_type = "integer" +additional = "" +defined_in = "JSON Schema Validation" +notes = "" + +[dict] +py_type = "dict" +json_type = "object" +additional = "" +defined_in = "JSON Schema Core" +notes = "" + +[list] +py_type = "list" +json_type = "array" +defined_in = "JSON Schema Core" +notes = "" +[list.additional.items] + +[tuple-positional] +py_type = "tuple-positional" +json_type = "array" +defined_in = "JSON Schema Core" +notes = "" +[tuple-positional.additional.items] + +[tuple-variable] +py_type = "tuple-variable" +json_type = "array" +defined_in = "JSON Schema Core" +notes = "" +[tuple-variable.additional.items] + +[set] +py_type = "set" +json_type = "array" +defined_in = "JSON Schema Validation" +notes = "" + +[set.additional] +uniqueItems = true + +[set.additional.items] + +[frozenset] +py_type = "frozenset" +json_type = "array" +defined_in = "JSON Schema Validation" +notes = "" + +[frozenset.additional] +uniqueItems = true + +[frozenset.additional.items] + +["List[str]"] +py_type = "List[str]" +json_type = "array" +defined_in = "JSON Schema Validation" +notes = "And equivalently for any other sub type, e.g. `List[int]`." +["List[str]".additional.items] +type = "string" + +["Tuple[str, ...]"] +py_type = "Tuple[str, ...]" +json_type = "array" +defined_in = "JSON Schema Validation" +notes = "And equivalently for any other sub type, e.g. `Tuple[int, ...]`." +["Tuple[str, ...]".additional.items] +type = "string" + +["Tuple[str, int]"] +py_type = "Tuple[str, int]" +json_type = "array" +defined_in = "JSON Schema Validation" +notes = "And equivalently for any other set of subtypes. Note: If using schemas for OpenAPI, you shouldn't use this declaration, as it would not be valid in OpenAPI (although it is valid in JSON Schema)." + +["Tuple[str, int]".additional] +minItems = 2 +maxItems = 2 + +[["Tuple[str, int]".additional.items]] +type = "string" + +[["Tuple[str, int]".additional.items]] +type = "integer" + +["Dict[str, int]"] +py_type = "Dict[str, int]" +json_type = "object" +defined_in = "JSON Schema Validation" +notes = "And equivalently for any other subfields for dicts. Have in mind that although you can use other types as keys for dicts with Pydantic, only strings are valid keys for JSON, and so, only str is valid as JSON Schema key types." +["Dict[str, int]".additional.additionalProperties] +type = "integer" + +["Union[str, int]"] +py_type = "Union[str, int]" +json_type = "anyOf" +defined_in = "JSON Schema Validation" +notes = "And equivalently for any other subfields for unions." +[["Union[str, int]".additional.anyOf]] +type = "string" + +[["Union[str, int]".additional.anyOf]] +type = "integer" + +[Enum] +py_type = "Enum" +json_type = "enum" +additional = "{\"enum\": [...]}" +defined_in = "JSON Schema Validation" +notes = "All the literal values in the enum are included in the definition." + +[SecretStr] +py_type = "SecretStr" +json_type = "string" +defined_in = "JSON Schema Validation" +notes = "" + +[SecretStr.additional] +writeOnly = true + +[SecretBytes] +py_type = "SecretBytes" +json_type = "string" +defined_in = "JSON Schema Validation" +notes = "" + +[SecretBytes.additional] +writeOnly = true + +[EmailStr] +py_type = "EmailStr" +json_type = "string" +defined_in = "JSON Schema Validation" +notes = "" + +[EmailStr.additional] +format = "email" + +[NameEmail] +py_type = "NameEmail" +json_type = "string" +defined_in = "Pydantic standard \"format\" extension" +notes = "" + +[NameEmail.additional] +format = "name-email" + +[AnyUrl] +py_type = "AnyUrl" +json_type = "string" +defined_in = "JSON Schema Validation" +notes = "" + +[AnyUrl.additional] +format = "uri" + +[Pattern] +py_type = "Pattern" +json_type = "string" +defined_in = "JSON Schema Validation" +notes = "" + +[Pattern.additional] +format = "regex" + +[bytes] +py_type = "bytes" +json_type = "string" +defined_in = "OpenAPI" +notes = "" + +[bytes.additional] +format = "binary" + +[Decimal] +py_type = "Decimal" +json_type = "number" +additional = "" +defined_in = "JSON Schema Core" +notes = "" + +[UUID1] +py_type = "UUID1" +json_type = "string" +defined_in = "Pydantic standard \"format\" extension" +notes = "" + +[UUID1.additional] +format = "uuid1" + +[UUID3] +py_type = "UUID3" +json_type = "string" +defined_in = "Pydantic standard \"format\" extension" +notes = "" + +[UUID3.additional] +format = "uuid3" + +[UUID4] +py_type = "UUID4" +json_type = "string" +defined_in = "Pydantic standard \"format\" extension" +notes = "" + +[UUID4.additional] +format = "uuid4" + +[UUID5] +py_type = "UUID5" +json_type = "string" +defined_in = "Pydantic standard \"format\" extension" +notes = "" + +[UUID5.additional] +format = "uuid5" + +[UUID] +py_type = "UUID" +json_type = "string" +defined_in = "Pydantic standard \"format\" extension" +notes = "Suggested in OpenAPI." + +[UUID.additional] +format = "uuid" + +[FilePath] +py_type = "FilePath" +json_type = "string" +defined_in = "Pydantic standard \"format\" extension" +notes = "" + +[FilePath.additional] +format = "file-path" + +[DirectoryPath] +py_type = "DirectoryPath" +json_type = "string" +defined_in = "Pydantic standard \"format\" extension" +notes = "" + +[DirectoryPath.additional] +format = "directory-path" + +[Path] +py_type = "Path" +json_type = "string" +defined_in = "Pydantic standard \"format\" extension" +notes = "" + +[Path.additional] +format = "path" + +[datetime] +py_type = "datetime" +json_type = "string" +defined_in = "JSON Schema Validation" +notes = "" + +[datetime.additional] +format = "date-time" + +[date] +py_type = "date" +json_type = "string" +defined_in = "JSON Schema Validation" +notes = "" + +[date.additional] +format = "date" + +[time] +py_type = "time" +json_type = "string" +defined_in = "JSON Schema Validation" +notes = "" + +[time.additional] +format = "time" + +[timedelta] +py_type = "timedelta" +json_type = "number" +defined_in = "Difference in seconds (a `float`), with Pydantic standard \"format\" extension" +notes = "Suggested in JSON Schema repository's issues by maintainer." + +[timedelta.additional] +format = "time-delta" + +[Json] +py_type = "Json" +json_type = "string" +defined_in = "Pydantic standard \"format\" extension" +notes = "" + +[Json.additional] +format = "json-string" + +[IPv4Address] +py_type = "IPv4Address" +json_type = "string" +defined_in = "JSON Schema Validation" +notes = "" + +[IPv4Address.additional] +format = "ipv4" + +[IPv6Address] +py_type = "IPv6Address" +json_type = "string" +defined_in = "JSON Schema Validation" +notes = "" + +[IPv6Address.additional] +format = "ipv6" + +[IPvAnyAddress] +py_type = "IPvAnyAddress" +json_type = "string" +defined_in = "Pydantic standard \"format\" extension" +notes = "IPv4 or IPv6 address as used in `ipaddress` module" + +[IPvAnyAddress.additional] +format = "ipvanyaddress" + +[IPv4Interface] +py_type = "IPv4Interface" +json_type = "string" +defined_in = "Pydantic standard \"format\" extension" +notes = "IPv4 interface as used in `ipaddress` module" + +[IPv4Interface.additional] +format = "ipv4interface" + +[IPv6Interface] +py_type = "IPv6Interface" +json_type = "string" +defined_in = "Pydantic standard \"format\" extension" +notes = "IPv6 interface as used in `ipaddress` module" + +[IPv6Interface.additional] +format = "ipv6interface" + +[IPvAnyInterface] +py_type = "IPvAnyInterface" +json_type = "string" +defined_in = "Pydantic standard \"format\" extension" +notes = "IPv4 or IPv6 interface as used in `ipaddress` module" + +[IPvAnyInterface.additional] +format = "ipvanyinterface" + +[IPv4Network] +py_type = "IPv4Network" +json_type = "string" +defined_in = "Pydantic standard \"format\" extension" +notes = "IPv4 network as used in `ipaddress` module" + +[IPv4Network.additional] +format = "ipv4network" + +[IPv6Network] +py_type = "IPv6Network" +json_type = "string" +defined_in = "Pydantic standard \"format\" extension" +notes = "IPv6 network as used in `ipaddress` module" + +[IPv6Network.additional] +format = "ipv6network" + +[IPvAnyNetwork] +py_type = "IPvAnyNetwork" +json_type = "string" +defined_in = "Pydantic standard \"format\" extension" +notes = "IPv4 or IPv6 network as used in `ipaddress` module" + +[IPvAnyNetwork.additional] +format = "ipvanynetwork" + +[StrictBool] +py_type = "StrictBool" +json_type = "boolean" +additional = "" +defined_in = "JSON Schema Core" +notes = "" + +[StrictStr] +py_type = "StrictStr" +json_type = "string" +additional = "" +defined_in = "JSON Schema Core" +notes = "" + +[ConstrainedStr] +py_type = "ConstrainedStr" +json_type = "string" +additional = "" +defined_in = "JSON Schema Core" +notes = "If the type has values declared for the constraints, they are included as validations. See the mapping for `constr` below." + +["constr(regex='^text$', min_length=2, max_length=10)"] +py_type = "constr(regex='^text$', min_length=2, max_length=10)" +json_type = "string" +defined_in = "JSON Schema Validation" +notes = "Any argument not passed to the function (not defined) will not be included in the schema." + +["constr(regex='^text$', min_length=2, max_length=10)".additional] +pattern = "^text$" +minLength = 2 +maxLength = 10 + +[ConstrainedInt] +py_type = "ConstrainedInt" +json_type = "integer" +additional = "" +defined_in = "JSON Schema Core" +notes = "If the type has values declared for the constraints, they are included as validations. See the mapping for `conint` below." + +["conint(gt=1, ge=2, lt=6, le=5, multiple_of=2)"] +py_type = "conint(gt=1, ge=2, lt=6, le=5, multiple_of=2)" +json_type = "integer" +defined_in = "" +notes = "Any argument not passed to the function (not defined) will not be included in the schema." + +["conint(gt=1, ge=2, lt=6, le=5, multiple_of=2)".additional] +maximum = 5 +exclusiveMaximum = 6 +minimum = 2 +exclusiveMinimum = 1 +multipleOf = 2 + +[PositiveInt] +py_type = "PositiveInt" +json_type = "integer" +defined_in = "JSON Schema Validation" +notes = "" + +[PositiveInt.additional] +exclusiveMinimum = 0 + +[NegativeInt] +py_type = "NegativeInt" +json_type = "integer" +defined_in = "JSON Schema Validation" +notes = "" + +[NegativeInt.additional] +exclusiveMaximum = 0 + +[NonNegativeInt] +py_type = "NonNegativeInt" +json_type = "integer" +defined_in = "JSON Schema Validation" +notes = "" + +[NonNegativeInt.additional] +minimum = 0 + +[NonPositiveInt] +py_type = "NonPositiveInt" +json_type = "integer" +defined_in = "JSON Schema Validation" +notes = "" + +[NonPositiveInt.additional] +maximum = 0 + +[ConstrainedFloat] +py_type = "ConstrainedFloat" +json_type = "number" +additional = "" +defined_in = "JSON Schema Core" +notes = "If the type has values declared for the constraints, they are included as validations. See the mapping for `confloat` below." + +["confloat(gt=1, ge=2, lt=6, le=5, multiple_of=2)"] +py_type = "confloat(gt=1, ge=2, lt=6, le=5, multiple_of=2)" +json_type = "number" +defined_in = "JSON Schema Validation" +notes = "Any argument not passed to the function (not defined) will not be included in the schema." + +["confloat(gt=1, ge=2, lt=6, le=5, multiple_of=2)".additional] +maximum = 5 +exclusiveMaximum = 6 +minimum = 2 +exclusiveMinimum = 1 +multipleOf = 2 + +[PositiveFloat] +py_type = "PositiveFloat" +json_type = "number" +defined_in = "JSON Schema Validation" +notes = "" + +[PositiveFloat.additional] +exclusiveMinimum = 0 + +[NegativeFloat] +py_type = "NegativeFloat" +json_type = "number" +defined_in = "JSON Schema Validation" +notes = "" + +[NegativeFloat.additional] +exclusiveMaximum = 0 + +[NonNegativeFloat] +py_type = "NonNegativeFloat" +json_type = "number" +defined_in = "JSON Schema Validation" +notes = "" + +[NonNegativeFloat.additional] +minimum = 0 + +[NonPositiveFloat] +py_type = "NonPositiveFloat" +json_type = "number" +defined_in = "JSON Schema Validation" +notes = "" + +[NonPositiveFloat.additional] +maximum = 0 + +[ConstrainedDecimal] +py_type = "ConstrainedDecimal" +json_type = "number" +additional = "" +defined_in = "JSON Schema Core" +notes = "If the type has values declared for the constraints, they are included as validations. See the mapping for `condecimal` below." + +["condecimal(gt=1, ge=2, lt=6, le=5, multiple_of=2)"] +py_type = "condecimal(gt=1, ge=2, lt=6, le=5, multiple_of=2)" +json_type = "number" +defined_in = "JSON Schema Validation" +notes = "Any argument not passed to the function (not defined) will not be included in the schema." + +["condecimal(gt=1, ge=2, lt=6, le=5, multiple_of=2)".additional] +maximum = 5 +exclusiveMaximum = 6 +minimum = 2 +exclusiveMinimum = 1 +multipleOf = 2 + +[BaseModel] +py_type = "BaseModel" +json_type = "object" +additional = "" +defined_in = "JSON Schema Core" +notes = "All the properties defined will be defined with standard JSON Schema, including submodels." + +[Color] +py_type = "Color" +json_type = "string" +defined_in = "Pydantic standard \"format\" extension" +notes = "" + +[Color.additional] +format = "color" diff --git a/docs/usage/dataclasses.md b/docs/usage/dataclasses.md index c3be630d73..11d594263e 100644 --- a/docs/usage/dataclasses.md +++ b/docs/usage/dataclasses.md @@ -1,7 +1,23 @@ If you don't want to use _pydantic_'s `BaseModel` you can instead get the same data validation on standard [dataclasses](https://docs.python.org/3/library/dataclasses.html) (introduced in Python 3.7). -{!.tmp_examples/dataclasses_main.md!} +```py +from datetime import datetime + +from pydantic.dataclasses import dataclass + + +@dataclass +class User: + id: int + name: str = 'John Doe' + signup_ts: datetime = None + + +user = User(id='42', signup_ts='2032-06-21T12:00') +print(user) +#> User(id=42, name='John Doe', signup_ts=datetime.datetime(2032, 6, 21, 12, 0)) +``` !!! note Keep in mind that `pydantic.dataclasses.dataclass` is a drop-in replacement for `dataclasses.dataclass` @@ -17,7 +33,30 @@ created by the standard library `dataclass` decorator. The underlying model and its schema can be accessed through `__pydantic_model__`. Also, fields that require a `default_factory` can be specified by either a `pydantic.Field` or a `dataclasses.field`. -{!.tmp_examples/dataclasses_default_schema.md!} +```py +import dataclasses +from typing import List, Optional + +from pydantic import Field +from pydantic.dataclasses import dataclass + + +@dataclass +class User: + id: int + name: str = 'John Doe' + friends: List[int] = dataclasses.field(default_factory=lambda: [0]) + age: Optional[int] = dataclasses.field( + default=None, + metadata=dict(title='The age of the user', description='do not lie!'), + ) + height: Optional[int] = Field(None, title='The height in cm', ge=50, le=300) + + +user = User(id='42') +# TODO use methods! +# print(user.__pydantic_model__.model_json_schema()) +``` `pydantic.dataclasses.dataclass`'s arguments are the same as the standard decorator, except one extra keyword argument `config` which has the same meaning as [Config](model_config.md). @@ -32,7 +71,24 @@ For more information about combining validators with dataclasses, see If you want to modify the `Config` like you would with a `BaseModel`, you have three options: -{!.tmp_examples/dataclasses_config.md!} +```py +from pydantic import ConfigDict +from pydantic.dataclasses import dataclass + + +# Option 1 - use directly a dict +# Note: `mypy` will still raise typo error +@dataclass(config=dict(validate_assignment=True)) +class MyDataclass1: + a: int + + +# Option 2 - use `ConfigDict` +# (same as before at runtime since it's a `TypedDict` but with intellisense) +@dataclass(config=ConfigDict(validate_assignment=True)) +class MyDataclass2: + a: int +``` !!! warning After v1.10, _pydantic_ dataclasses support `Config.extra` but some default behaviour of stdlib dataclasses @@ -44,35 +100,72 @@ If you want to modify the `Config` like you would with a `BaseModel`, you have t Nested dataclasses are supported both in dataclasses and normal models. -{!.tmp_examples/dataclasses_nested.md!} +```py +from pydantic import AnyUrl +from pydantic.dataclasses import dataclass -Dataclasses attributes can be populated by tuples, dictionaries or instances of the dataclass itself. -## Stdlib dataclasses and _pydantic_ dataclasses +@dataclass +class NavbarButton: + href: AnyUrl + -### Convert stdlib dataclasses into _pydantic_ dataclasses +@dataclass +class Navbar: + button: NavbarButton -Stdlib dataclasses (nested or not) can be easily converted into _pydantic_ dataclasses by just decorating -them with `pydantic.dataclasses.dataclass`. -_Pydantic_ will enhance the given stdlib dataclass but won't alter the default behaviour (i.e. without validation). -It will instead create a wrapper around it to trigger validation that will act like a plain proxy. -The stdlib dataclass can still be accessed via the `__dataclass__` attribute (see example below). -{!.tmp_examples/dataclasses_stdlib_to_pydantic.md!} +navbar = Navbar(button={'href': 'https://example.com'}) +print(navbar) +#> Navbar(button=NavbarButton(href=Url('https://example.com/'))) +``` -### Choose when to trigger validation +Dataclasses attributes can be populated by tuples, dictionaries or instances of the dataclass itself. -As soon as your stdlib dataclass has been decorated with _pydantic_ dataclass decorator, magic methods have been -added to validate input data. If you want, you can still keep using your dataclass and choose when to trigger it. +## Stdlib dataclasses and _pydantic_ dataclasses -{!.tmp_examples/dataclasses_stdlib_run_validation.md!} +**TODO this all needs re-doing since most things now work.** ### Inherit from stdlib dataclasses Stdlib dataclasses (nested or not) can also be inherited and _pydantic_ will automatically validate all the inherited fields. -{!.tmp_examples/dataclasses_stdlib_inheritance.md!} +```py +import dataclasses + +import pydantic + + +@dataclasses.dataclass +class Z: + z: int + + +@dataclasses.dataclass +class Y(Z): + y: int = 0 + + +@pydantic.dataclasses.dataclass +class X(Y): + x: int = 0 + + +foo = X(x=b'1', y='2', z='3') +print(foo) +#> X(z=3, y=2) + +try: + X(z='pika') +except pydantic.ValidationError as e: + print(e) + """ + 1 validation error for X + z + Input should be a valid integer, unable to parse string as an integer [type=int_parsing, input_value='pika', input_type=str] + """ +``` ### Use of stdlib dataclasses with `BaseModel` @@ -80,7 +173,54 @@ Bear in mind that stdlib dataclasses (nested or not) are **automatically convert dataclasses when mixed with `BaseModel`! Furthermore the generated _pydantic_ dataclass will have the **exact same configuration** (`order`, `frozen`, ...) as the original one. -{!.tmp_examples/dataclasses_stdlib_with_basemodel.md!} +```py +import dataclasses +from datetime import datetime +from typing import Optional + +from pydantic import BaseModel, ValidationError + + +@dataclasses.dataclass(frozen=True) +class User: + name: str + + +@dataclasses.dataclass +class File: + filename: str + last_modification_time: Optional[datetime] = None + + +class Foo(BaseModel): + file: File + user: Optional[User] = None + + +file = File( + filename=['not', 'a', 'string'], + last_modification_time='2020-01-01T00:00', +) # nothing is validated as expected +print(file) +#> File(filename=['not', 'a', 'string'], last_modification_time='2020-01-01T00:00') + +try: + Foo(file=file) +except ValidationError as e: + print(e) + """ + 1 validation error for Foo + file -> filename + Input should be a valid string [type=string_type, input_value=['not', 'a', 'string'], input_type=list] + """ + +foo = Foo(file=File(filename='myfile'), user=User(name='pika')) +try: + foo.user.name = 'bulbi' +except dataclasses.FrozenInstanceError as e: + print(e) + #> cannot assign to field 'name' +``` ### Use custom types @@ -88,10 +228,59 @@ Since stdlib dataclasses are automatically converted to add validation using custom types may cause some unexpected behaviour. In this case you can simply add `arbitrary_types_allowed` in the config! -{!.tmp_examples/dataclasses_arbitrary_types_allowed.md!} +```py +import dataclasses + +import pydantic +from pydantic.errors import PydanticSchemaGenerationError + + +class ArbitraryType: + def __init__(self, value): + self.value = value + + def __repr__(self): + return f'ArbitraryType(value={self.value!r})' + + +@dataclasses.dataclass +class DC: + a: ArbitraryType + b: str + + +# valid as it is a builtin dataclass without validation +my_dc = DC(a=ArbitraryType(value=3), b='qwe') + +try: + + class Model(pydantic.BaseModel): + dc: DC + other: str + + Model(dc=my_dc, other='other') +except PydanticSchemaGenerationError as e: # invalid as it is now a pydantic dataclass + print(e) + """ + Unable to generate pydantic-core schema for . Setting `arbitrary_types_allowed=True` in the model_config may prevent this error. + """ + + +class Model(pydantic.BaseModel): + model_config = dict(arbitrary_types_allowed=True) + dc: DC + other: str + + +m = Model(dc=my_dc, other='other') +print(repr(m)) +#> Model(dc=DC(a=ArbitraryType(value=3), b='qwe'), other='other') +``` ## Initialize hooks +**TODO this has changed!** + When you initialize a dataclass, it is possible to execute code *after* validation with the help of `__post_init_post_parse__`. This is not the same as `__post_init__`, which executes code *before* validation. @@ -101,12 +290,53 @@ code *before* validation. be done before. In this case you can set `Config.post_init_call = 'after_validation'` -{!.tmp_examples/dataclasses_post_init_post_parse.md!} +```py +from pydantic.dataclasses import dataclass + + +@dataclass +class Birth: + year: int + month: int + day: int + + +@dataclass +class User: + birth: Birth + + def __post_init__(self): + print(self.birth) + #> Birth(year=1995, month=3, day=2) + + +user = User(**{'birth': {'year': 1995, 'month': 3, 'day': 2}}) +``` + +```py requires="3.8" +from dataclasses import InitVar +from pathlib import Path +from typing import Optional + +from pydantic.dataclasses import dataclass + + +@dataclass +class PathData: + path: Path + base_path: InitVar[Optional[Path]] + + def __post_init__(self, base_path): + print(f'Received path={self.path!r}, base_path={base_path!r}') + #> Received path=PosixPath('world'), base_path=PosixPath('/hello') + if base_path is not None: + self.path = base_path / self.path -Since version **v1.0**, any fields annotated with `dataclasses.InitVar` are passed to both `__post_init__` *and* -`__post_init_post_parse__`. -{!.tmp_examples/dataclasses_initvars.md!} +path_data = PathData('world', base_path='/hello') +# Received path='world', base_path='/hello' +assert path_data.path == Path('/hello/world') +``` ### Difference with stdlib dataclasses @@ -118,4 +348,31 @@ When substituting usage of `dataclasses.dataclass` with `pydantic.dataclasses.da _Pydantic_ dataclasses do not feature a `.json()` function. To dump them as JSON, you will need to make use of the `pydantic_encoder` as follows: -{!.tmp_examples/dataclasses_json_dumps.md!} +```py +import dataclasses +import json +from typing import List + +from pydantic.dataclasses import dataclass +from pydantic.json import pydantic_encoder + + +@dataclass +class User: + id: int + name: str = 'John Doe' + friends: List[int] = dataclasses.field(default_factory=lambda: [0]) + + +user = User(id='42') +print(json.dumps(user, indent=4, default=pydantic_encoder)) +""" +{ + "id": 42, + "name": "John Doe", + "friends": [ + 0 + ] +} +""" +``` diff --git a/docs/usage/devtools.md b/docs/usage/devtools.md index 3253ba224e..5e010e17b9 100644 --- a/docs/usage/devtools.md +++ b/docs/usage/devtools.md @@ -11,8 +11,42 @@ is on and what value was printed. In particular `debug()` is useful when inspecting models: -{!.tmp_examples/devtools_main.md!} +```py test="no-print-intercept" +from datetime import datetime +from typing import List + +from devtools import debug + +from pydantic import BaseModel + + +class Address(BaseModel): + street: str + country: str + lat: float + lng: float + + +class User(BaseModel): + id: int + name: str + signup_ts: datetime + friends: List[int] + address: Address + + +user = User( + id='123', + name='John Doe', + signup_ts='2019-06-01 12:22', + friends=[1234, 4567, 7890], + address=dict(street='Testing', country='uk', lat=51.5, lng=0), +) +debug(user) +print('\nshould be much easier read than:\n') +print('user:', user) +``` Will output in your terminal: -{!.tmp_examples/devtools_main.html!} +{{ devtools_example }} diff --git a/docs/usage/exporting_models.md b/docs/usage/exporting_models.md index bf5242c6c2..e08f2f7c83 100644 --- a/docs/usage/exporting_models.md +++ b/docs/usage/exporting_models.md @@ -20,7 +20,30 @@ Arguments: Example: -{!.tmp_examples/exporting_models_dict.md!} +```py +from pydantic import BaseModel + + +class BarModel(BaseModel): + whatever: int + + +class FooBarModel(BaseModel): + banana: float + foo: str + bar: BarModel + + +m = FooBarModel(banana=3.14, foo='hello', bar={'whatever': 123}) + +# returns a dictionary: +print(m.model_dump()) +#> {'banana': 3.14, 'foo': 'hello', 'bar': {'whatever': 123}} +print(m.model_dump(include={'foo', 'bar'})) +#> {'foo': 'hello', 'bar': {'whatever': 123}} +print(m.model_dump(exclude={'foo', 'bar'})) +#> {'banana': 3.14} +``` ## `dict(model)` and iteration @@ -30,7 +53,30 @@ returned, so sub-models will not be converted to dictionaries. Example: -{!.tmp_examples/exporting_models_iterate.md!} +```py +from pydantic import BaseModel + + +class BarModel(BaseModel): + whatever: int + + +class FooBarModel(BaseModel): + banana: float + foo: str + bar: BarModel + + +m = FooBarModel(banana=3.14, foo='hello', bar={'whatever': 123}) + +print(dict(m)) +#> {'banana': 3.14, 'foo': 'hello', 'bar': BarModel(whatever=123)} +for name, value in m: + print(f'{name}: {value}') + #> banana: 3.14 + #> foo: hello + #> bar: whatever=123 +``` ## `model.copy(...)` @@ -45,7 +91,34 @@ Arguments: Example: -{!.tmp_examples/exporting_models_copy.md!} +```py +from pydantic import BaseModel + + +class BarModel(BaseModel): + whatever: int + + +class FooBarModel(BaseModel): + banana: float + foo: str + bar: BarModel + + +m = FooBarModel(banana=3.14, foo='hello', bar={'whatever': 123}) + +# TODO! +# print(m.model_copy(include={'foo', 'bar'})) +# print(m.model_copy(exclude={'foo', 'bar'})) +print(m.model_copy(update={'banana': 0})) +#> banana=0 foo='hello' bar=BarModel(whatever=123) +print(id(m.bar) == id(m.model_copy().bar)) +#> True +# normal copy gives the same object reference for bar +print(id(m.bar) == id(m.model_copy(deep=True).bar)) +#> False +# deep copy gives a new object reference for `bar` +``` ## `model.model_dump_json(...)` @@ -71,22 +144,54 @@ Arguments: *pydantic* can serialise many commonly used types to JSON (e.g. `datetime`, `date` or `UUID`) which would normally fail with a simple `json.dumps(foobar)`. -{!.tmp_examples/exporting_models_json.md!} +```py +from datetime import datetime + +from pydantic import BaseModel + + +class BarModel(BaseModel): + whatever: int + + +class FooBarModel(BaseModel): + foo: datetime + bar: BarModel + + +m = FooBarModel(foo=datetime(2032, 6, 1, 12, 13, 14), bar={'whatever': 123}) +print(m.model_dump_json()) +#> b'{"foo":"2032-06-01T12:13:14","bar":{"whatever":123}}' +``` ### `json_encoders` Serialisation can be customised on a model using the `json_encoders` config property; the keys should be types (or names of types for forward references), and the values should be functions which serialise that type (see the example below): -{!.tmp_examples/exporting_models_json_encoders.md!} +```py +from datetime import datetime, timedelta -By default, `timedelta` is encoded as a simple float of total seconds. The `timedelta_isoformat` is provided -as an optional alternative which implements [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) time diff encoding. +from pydantic import BaseModel, serializer + + +class WithCustomEncoders(BaseModel): + model_config = dict(ser_json_timedelta='iso8601') + dt: datetime + diff: timedelta + + @serializer('dt') + def serialize_dt(self, dt: datetime, _info): + return dt.timestamp() -The `json_encoders` are also merged during the models inheritance with the child -encoders taking precedence over the parent one. -{!.tmp_examples/exporting_models_json_encoders_merge.md!} +m = WithCustomEncoders(dt=datetime(2032, 6, 1), diff=timedelta(hours=100)) +print(m.model_dump_json()) +#> b'{"dt":1969660800.0,"diff":"P4DT14400S"}' +``` + +By default, `timedelta` is encoded as a simple float of total seconds. The `timedelta_isoformat` is provided +as an optional alternative which implements [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) time diff encoding. ### Serialising self-reference or other models @@ -94,7 +199,44 @@ By default, models are serialised as dictionaries. If you want to serialise them differently, you can add `models_as_dict=False` when calling `json()` method and add the classes of the model in `json_encoders`. In case of forward references, you can use a string with the class name instead of the class itself -{!.tmp_examples/exporting_models_json_forward_ref.md!} + +```py test="skip" +# TODO we need to serializers for this example +from typing import List, Optional + +from pydantic import BaseModel + + +class Address(BaseModel): + city: str + country: str + + +class User(BaseModel): + name: str + address: Address + friends: Optional[List['User']] = None + + class Config: + json_encoders = { + Address: lambda a: f'{a.city} ({a.country})', + 'User': lambda u: f'{u.name} in {u.address.city} ' + f'({u.address.country[:2].upper()})', + } + + +User.update_forward_refs() + +wolfgang = User( + name='Wolfgang', + address=Address(city='Berlin', country='Deutschland'), + friends=[ + User(name='Pierre', address=Address(city='Paris', country='France')), + User(name='John', address=Address(city='London', country='UK')), + ], +) +print(wolfgang.model_dump_json(models_as_dict=False)) +``` ### Serialising subclasses @@ -105,37 +247,104 @@ In case of forward references, you can use a string with the class name instead Subclasses of common types are automatically encoded like their super-classes: -{!.tmp_examples/exporting_models_json_subclass.md!} +```py +from datetime import date, timedelta + +from pydantic_core import core_schema + +from pydantic import BaseModel + -### Custom JSON (de)serialisation +class DayThisYear(date): + """ + Contrived example of a special type of date that + takes an int and interprets it as a day in the current year + """ -To improve the performance of encoding and decoding JSON, alternative JSON implementations -(e.g. [ujson](https://pypi.python.org/pypi/ujson)) can be used via the -`json_loads` and `json_dumps` properties of `Config`. + @classmethod + def __get_pydantic_core_schema__(cls, **kwargs): + return core_schema.general_after_validator_function( + cls.validate, + core_schema.int_schema(), + serialization=core_schema.format_ser_schema('%Y-%m-%d'), + ) -{!.tmp_examples/exporting_models_ujson.md!} + @classmethod + def validate(cls, v: int, _info): + return date.today().replace(month=1, day=1) + timedelta(days=v) -`ujson` generally cannot be used to dump JSON since it doesn't support encoding of objects like datetimes and does -not accept a `default` fallback function argument. To do this, you may use another library like -[orjson](https://github.com/ijl/orjson). -{!.tmp_examples/exporting_models_orjson.md!} +class FooModel(BaseModel): + date: DayThisYear -Note that `orjson` takes care of `datetime` encoding natively, making it faster than `json.dumps` but -meaning you cannot always customise the encoding using `Config.json_encoders`. + +m = FooModel(date=300) +print(m.model_dump_json()) +#> b'{"date":"2023-10-28"}' +``` ## `pickle.dumps(model)` Using the same plumbing as `copy()`, *pydantic* models support efficient pickling and unpickling. -{!.tmp_examples/exporting_models_pickle.md!} +```py test="skip" +# TODO need to get pickling to work +import pickle + +from pydantic import BaseModel + + +class FooBarModel(BaseModel): + a: str + b: int + + +m = FooBarModel(a='hello', b=123) +print(m) +data = pickle.dumps(m) +print(data) +m2 = pickle.loads(data) +print(m2) +``` ## Advanced include and exclude The `dict`, `json`, and `copy` methods support `include` and `exclude` arguments which can either be sets or dictionaries. This allows nested selection of which fields to export: -{!.tmp_examples/exporting_models_exclude1.md!} +```py +from pydantic import BaseModel, SecretStr + + +class User(BaseModel): + id: int + username: str + password: SecretStr + + +class Transaction(BaseModel): + id: str + user: User + value: int + + +t = Transaction( + id='1234567890', + user=User(id=42, username='JohnDoe', password='hashedpassword'), + value=9876543210, +) + +# using a set: +print(t.model_dump(exclude={'user', 'value'})) +#> {'id': '1234567890'} + +# using a dict: +print(t.model_dump(exclude={'user': {'username', 'password'}, 'value': True})) +#> {'id': '1234567890', 'user': {'id': 42}} + +print(t.model_dump(include={'id': True, 'user': {'id'}})) +#> {'id': '1234567890', 'user': {'id': 42}} +``` The `True` indicates that we want to exclude or include an entire key, just as if we included it in a set. Of course, the same can be done at any depth level. @@ -144,7 +353,93 @@ Special care must be taken when including or excluding fields from a list or tup `dict` and related methods expect integer keys for element-wise inclusion or exclusion. To exclude a field from **every** member of a list or tuple, the dictionary key `'__all__'` can be used as follows: -{!.tmp_examples/exporting_models_exclude2.md!} +```py +import datetime +from typing import List + +from pydantic import BaseModel, SecretStr + + +class Country(BaseModel): + name: str + phone_code: int + + +class Address(BaseModel): + post_code: int + country: Country + + +class CardDetails(BaseModel): + number: SecretStr + expires: datetime.date + + +class Hobby(BaseModel): + name: str + info: str + + +class User(BaseModel): + first_name: str + second_name: str + address: Address + card_details: CardDetails + hobbies: List[Hobby] + + +user = User( + first_name='John', + second_name='Doe', + address=Address(post_code=123456, country=Country(name='USA', phone_code=1)), + card_details=CardDetails( + number='4212934504460000', expires=datetime.date(2020, 5, 1) + ), + hobbies=[ + Hobby(name='Programming', info='Writing code and stuff'), + Hobby(name='Gaming', info='Hell Yeah!!!'), + ], +) + +exclude_keys = { + 'second_name': True, + 'address': {'post_code': True, 'country': {'phone_code'}}, + 'card_details': True, + # You can exclude fields from specific members of a tuple/list by index: + 'hobbies': {-1: {'info'}}, +} + +include_keys = { + 'first_name': True, + 'address': {'country': {'name'}}, + 'hobbies': {0: True, -1: {'name'}}, +} + +# would be the same as user.model_dump(exclude=exclude_keys) in this case: +print(user.model_dump(include=include_keys)) +""" +{ + 'first_name': 'John', + 'address': {'country': {'name': 'USA'}}, + 'hobbies': [{'name': 'Programming', 'info': 'Writing code and stuff'}], +} +""" + +# To exclude a field from all members of a nested list or tuple, use "__all__": +print(user.model_dump(exclude={'hobbies': {'__all__': {'info'}}})) +""" +{ + 'first_name': 'John', + 'second_name': 'Doe', + 'address': {'post_code': 123456, 'country': {'name': 'USA', 'phone_code': 1}}, + 'card_details': { + 'number': SecretStr('**********'), + 'expires': datetime.date(2020, 5, 1), + }, + 'hobbies': [{'name': 'Programming'}, {'name': 'Gaming'}], +} +""" +``` The same holds for the `json` and `copy` methods. @@ -152,7 +447,32 @@ The same holds for the `json` and `copy` methods. In addition to the explicit arguments `exclude` and `include` passed to `dict`, `json` and `copy` methods, we can also pass the `include`/`exclude` arguments directly to the `Field` constructor or the equivalent `field` entry in the models `Config` class: -{!.tmp_examples/exporting_models_exclude3.md!} +```py +from pydantic import BaseModel, Field, SecretStr + + +class User(BaseModel): + id: int + username: str + password: SecretStr = Field(..., exclude=True) + + +class Transaction(BaseModel): + id: str + user: User = Field(exclude={'username'}) + value: int = Field(exclude=True) + + +t = Transaction( + id='1234567890', + user=User(id=42, username='JohnDoe', password='hashedpassword'), + value=9876543210, +) + +print(t.model_dump()) +#> {'id': '1234567890'} +# TODO this is wrong! not all of "user" should be excluded +``` In the case where multiple strategies are used, `exclude`/`include` fields are merged according to the following rules: @@ -163,8 +483,56 @@ Note that while merging settings, `exclude` entries are merged by computing the The resulting merged exclude settings: -{!.tmp_examples/exporting_models_exclude4.md!} +```py +from pydantic import BaseModel, Field, SecretStr + + +class User(BaseModel): + id: int + username: str # overridden by explicit exclude + password: SecretStr = Field(exclude=True) + + +class Transaction(BaseModel): + id: str + user: User + value: int + + +t = Transaction( + id='1234567890', + user=User(id=42, username='JohnDoe', password='hashedpassword'), + value=9876543210, +) + +print(t.model_dump(exclude={'value': True, 'user': {'username'}})) +#> {'id': '1234567890', 'user': {'id': 42}} +``` are the same as using merged include settings as follows: -{!.tmp_examples/exporting_models_exclude5.md!} +```py +from pydantic import BaseModel, Field, SecretStr + + +class User(BaseModel): + id: int = Field(..., include=True) + username: str = Field(..., include=True) # overridden by explicit include + password: SecretStr + + +class Transaction(BaseModel): + id: str + user: User + value: int + + +t = Transaction( + id='1234567890', + user=User(id=42, username='JohnDoe', password='hashedpassword'), + value=9876543210, +) + +print(t.model_dump(include={'id': True, 'user': {'id'}})) +#> {'id': '1234567890', 'user': {'id': 42}} +``` diff --git a/docs/usage/model_config.md b/docs/usage/model_config.md index e507bf5421..a169bf0a9a 100644 --- a/docs/usage/model_config.md +++ b/docs/usage/model_config.md @@ -1,12 +1,66 @@ Behaviour of _pydantic_ can be controlled via the `Config` class on a model or a _pydantic_ dataclass. -{!.tmp_examples/model_config_main.md!} +```py +from pydantic import BaseModel, ConfigDict, ValidationError + + +class Model(BaseModel): + model_config = ConfigDict(str_max_length=10) + v: str + + +try: + m = Model(v='x' * 20) +except ValidationError as e: + print(e) + """ + 1 validation error for Model + v + String should have at most 10 characters [type=string_too_long, input_value='xxxxxxxxxxxxxxxxxxxx', input_type=str] + """ +``` Also, you can specify config options as model class kwargs: -{!.tmp_examples/model_config_class_kwargs.md!} +```py +from pydantic import BaseModel, Extra, ValidationError + + +class Model(BaseModel, extra=Extra.forbid): + a: str + + +try: + Model(a='spam', b='oh no') +except ValidationError as e: + print(e) + """ + 1 validation error for Model + b + Extra inputs are not permitted [type=extra_forbidden, input_value='oh no', input_type=str] + """ +``` Similarly, if using the `@dataclass` decorator: -{!.tmp_examples/model_config_dataclass.md!} +```py +from datetime import datetime + +from pydantic import ValidationError +from pydantic.dataclasses import dataclass + + +@dataclass(config=dict(str_max_length=10, validate_assignment=True)) +class User: + id: int + name: str = 'John Doe' + signup_ts: datetime = None + + +user = User(id='42', signup_ts='2032-06-21T12:00') +try: + user.name = 'x' * 20 +except ValidationError as e: + print(e) +``` ## Options @@ -140,14 +194,47 @@ with the following means (see [#4093](https://github.com/pydantic/pydantic/pull/ If you wish to change the behaviour of _pydantic_ globally, you can create your own custom `BaseModel` with custom `Config` since the config is inherited -{!.tmp_examples/model_config_change_globally_custom.md!} +```py +from pydantic import BaseModel as PydanticBaseModel + + +class BaseModel(PydanticBaseModel): + model_config = dict(arbitrary_types_allowed=True) + + +class MyClass: + """A random class""" + + +class Model(BaseModel): + x: MyClass +``` ## Alias Generator If data source field names do not match your code style (e. g. CamelCase fields), you can automatically generate aliases using `alias_generator`: -{!.tmp_examples/model_config_alias_generator.md!} +```py +from pydantic import BaseModel + + +def to_camel(string: str) -> str: + return ''.join(word.capitalize() for word in string.split('_')) + + +class Voice(BaseModel): + model_config = dict(alias_generator=to_camel) + name: str + language_code: str + + +voice = Voice(Name='Filiz', LanguageCode='tr-TR') +print(voice.language_code) +#> tr-TR +print(voice.model_dump(by_alias=True)) +#> {'Name': 'Filiz', 'LanguageCode': 'tr-TR'} +``` Here camel case refers to ["upper camel case"](https://en.wikipedia.org/wiki/Camel_case) aka pascal case e.g. `CamelCase`. If you'd like instead to use lower camel case e.g. `camelCase`, @@ -175,22 +262,116 @@ the selected value is determined as follows (in descending order of priority): For example: -{!.tmp_examples/model_config_alias_precedence.md!} +```py +from pydantic import BaseModel, Field + + +class Voice(BaseModel): + name: str = Field(None, alias='ActorName') + language_code: str = None + mood: str = None + + +def alias_generator(string: str) -> str: + # this is the same as `alias_generator = to_camel` above + return ''.join(word.capitalize() for word in string.split('_')) + + +class Character(Voice): + model_config = dict(alias_generator=alias_generator) + act: int = 1 + + +print(Character.model_json_schema(by_alias=True)) +""" +{ + 'type': 'object', + 'properties': { + 'ActorName': {'type': 'string', 'default': None, 'title': 'Actorname'}, + 'LanguageCode': {'type': 'string', 'default': None, 'title': 'Languagecode'}, + 'Mood': {'type': 'string', 'default': None, 'title': 'Mood'}, + 'Act': {'type': 'integer', 'default': 1, 'title': 'Act'}, + }, + 'title': 'Character', +} +""" +``` ## Smart Union +**TODO: Smart Union behaviour has roughly become the default, this needs to be moved to the stuff on unions** + By default, as explained [here](types.md#unions), _pydantic_ tries to validate (and coerce if it can) in the order of the `Union`. So sometimes you may have unexpected coerced data. -{!.tmp_examples/model_config_smart_union_off.md!} +```py +from typing import Union + +from pydantic import BaseModel + + +class Foo(BaseModel): + pass + + +class Bar(BaseModel): + pass + + +class Model(BaseModel): + x: Union[str, int] + y: Union[Foo, Bar] + + +print(Model(x=1, y=Bar())) +#> x=1 y=Bar() +``` To prevent this, you can enable `Config.smart_union`. _Pydantic_ will then check all allowed types before even trying to coerce. Know that this is of course slower, especially if your `Union` is quite big. -{!.tmp_examples/model_config_smart_union_on.md!} +```py +from typing import Union + +from pydantic import BaseModel + + +class Foo(BaseModel): + pass + + +class Bar(BaseModel): + pass + + +class Model(BaseModel): + x: Union[str, int] + y: Union[Foo, Bar] + + +print(Model(x=1, y=Bar())) +#> x=1 y=Bar() +``` !!! warning Note that this option **does not support compound types yet** (e.g. differentiate `List[int]` and `List[str]`). This option will be improved further once a strict mode is added in _pydantic_ and will probably be the default behaviour in v2! -{!.tmp_examples/model_config_smart_union_on_edge_case.md!} +```py +from typing import List, Union + +from pydantic import BaseModel + + +class Model(BaseModel): + x: Union[List[str], List[int]] + + +# Expected coercion +print(Model(x=[1, '2'])) +#> x=[1, 2] + +# Unexpected coercion +print(Model(x=[1, 2])) +#> x=[1, 2] +``` diff --git a/docs/usage/models.md b/docs/usage/models.md index bd79334cb0..6142dbb014 100644 --- a/docs/usage/models.md +++ b/docs/usage/models.md @@ -20,47 +20,60 @@ of the resultant model instance will conform to the field types defined on the m ## Basic model usage -```py +```py group="basic-model" from pydantic import BaseModel + class User(BaseModel): id: int - name = 'Jane Doe' + name: str = 'Jane Doe' ``` + `User` here is a model with two fields `id` which is an integer and is required, and `name` which is a string and is not required (it has a default value). The type of `name` is inferred from the default value, and so a type annotation is not required (however note [this](#field-ordering) warning about field order when some fields do not have type annotations). -```py + +```py group="basic-model" user = User(id='123') -user_x = User(id='123.45') ``` + `user` here is an instance of `User`. Initialisation of the object will perform all parsing and validation, if no `ValidationError` is raised, you know the resulting model instance is valid. -```py + +```py group="basic-model" assert user.id == 123 -assert user_x.id == 123 -assert isinstance(user_x.id, int) # Note that 123.45 was casted to an int and its value is 123 +assert isinstance(user.id, int) +# Note that 123.45 was cast to an int and its value is 123 ``` + More details on the casting in the case of `user_x` can be found in [Data Conversion](#data-conversion). Fields of a model can be accessed as normal attributes of the user object. The string '123' has been cast to an int as per the field type -```py + +```py group="basic-model" assert user.name == 'Jane Doe' ``` + `name` wasn't set when user was initialised, so it has the default value -```py + +```py group="basic-model" assert user.__fields_set__ == {'id'} ``` + The fields which were supplied when user was initialised. -```py -assert user.model_dump() == dict(user) == {'id': 123, 'name': 'Jane Doe'} + +```py group="basic-model" +assert user.model_dump() == {'id': 123, 'name': 'Jane Doe'} ``` + Either `.model_dump()` or `dict(user)` will provide a dict of fields, but `.model_dump()` can take numerous other arguments. -```py + +```py group="basic-model" user.id = 321 assert user.id == 321 ``` + This model is mutable so field values can be changed. ### Model properties @@ -115,11 +128,37 @@ Models possess the following methods and attributes: More complex hierarchical data structures can be defined using models themselves as types in annotations. -{!.tmp_examples/models_recursive.md!} +```py +from typing import List, Optional + +from pydantic import BaseModel + + +class Foo(BaseModel): + count: int + size: Optional[float] = None + + +class Bar(BaseModel): + apple = 'x' + banana = 'y' + + +class Spam(BaseModel): + foo: Foo + bars: List[Bar] + + +m = Spam(foo={'count': 4}, bars=[{'apple': 'x1'}, {'apple': 'x2'}]) +print(m) +#> foo=Foo(count=4, size=None) bars=[Bar(), Bar()] +print(m.model_dump()) +#> {'foo': {'count': 4, 'size': None}, 'bars': [{}, {}]} +``` For self-referencing models, see [postponed annotations](postponed_annotations.md#self-referencing-models). -## ORM Mode (aka Arbitrary Class Instances) +## "From Attributes" (aka ORM Mode/Arbitrary Class Instances) Pydantic models can be created from arbitrary class instances to support models that map to ORM objects. @@ -130,14 +169,85 @@ To do this: The example here uses SQLAlchemy, but the same approach should work for any ORM. -{!.tmp_examples/models_orm_mode.md!} +```py +from typing import List + +from sqlalchemy import Column, Integer, String +from sqlalchemy.dialects.postgresql import ARRAY +from sqlalchemy.ext.declarative import declarative_base + +from pydantic import BaseModel, constr + +Base = declarative_base() + + +class CompanyOrm(Base): + __tablename__ = 'companies' + id = Column(Integer, primary_key=True, nullable=False) + public_key = Column(String(20), index=True, nullable=False, unique=True) + name = Column(String(63), unique=True) + domains = Column(ARRAY(String(255))) + + +class CompanyModel(BaseModel): + model_config = dict(from_attributes=True) + id: int + public_key: constr(max_length=20) + name: constr(max_length=63) + domains: List[constr(max_length=255)] + + +co_orm = CompanyOrm( + id=123, + public_key='foobar', + name='Testing', + domains=['example.com', 'foobar.com'], +) +print(co_orm) +#> <__main__.CompanyOrm object at 0x0123456789ab> +co_model = CompanyModel.model_validate(co_orm) +print(co_model) +#> id=123 public_key='foobar' name='Testing' domains=['example.com', 'foobar.com'] +``` ### Reserved names You may want to name a Column after a reserved SQLAlchemy field. In that case, Field aliases will be convenient: -{!.tmp_examples/models_orm_mode_reserved_name.md!} +```py +import typing + +import sqlalchemy as sa +from sqlalchemy.ext.declarative import declarative_base + +from pydantic import BaseModel, Field + + +class MyModel(BaseModel): + model_config = dict(from_attributes=True) + metadata: typing.Dict[str, str] = Field(alias='metadata_') + + +Base = declarative_base() + + +class SQLModel(Base): + __tablename__ = 'my_table' + id = sa.Column('id', sa.Integer, primary_key=True) + # 'metadata' is reserved by SQLAlchemy, hence the '_' + metadata_ = sa.Column('metadata', sa.JSON) + + +sql_model = SQLModel(metadata_={'key': 'val'}, id=1) + +pydantic_model = MyModel.model_validate(sql_model) + +print(pydantic_model.model_dump()) +#> {'metadata': {'key': 'val'}} +print(pydantic_model.model_dump(by_alias=True)) +#> {'metadata_': {'key': 'val'}} +``` !!! note The example above works because aliases have priority over field names for @@ -149,7 +259,47 @@ ORM instances will be parsed with `from_orm` recursively as well as at the top l Here a vanilla class is used to demonstrate the principle, but any ORM class could be used instead. -{!.tmp_examples/models_orm_mode_recursive.md!} +```py +from typing import List + +from pydantic import BaseModel + + +class PetCls: + def __init__(self, *, name: str, species: str): + self.name = name + self.species = species + + +class PersonCls: + def __init__(self, *, name: str, age: float = None, pets: List[PetCls]): + self.name = name + self.age = age + self.pets = pets + + +class Pet(BaseModel): + model_config = dict(from_attributes=True) + name: str + species: str + + +class Person(BaseModel): + model_config = dict(from_attributes=True) + name: str + age: float = None + pets: List[Pet] + + +bones = PetCls(name='Bones', species='dog') +orion = PetCls(name='Orion', species='cat') +anna = PersonCls(name='Anna', age=20, pets=[bones, orion]) +anna_model = Person.from_orm(anna) +print(anna_model) +""" +name='Anna' age=20.0 pets=[Pet(name='Bones', species='dog'), Pet(name='Orion', species='cat')] +""" +``` ### Data binding @@ -166,7 +316,49 @@ The `GetterDict` instance will be called for each field with a sentinel as a fal value is set). Returning this sentinel means that the field is missing. Any other value will be interpreted as the value of the field. -{!.tmp_examples/models_orm_mode_data_binding.md!} +```py test="xfail - GetterDict is removed, replace with a custom root_validator" +from typing import Any, Optional +from xml.etree.ElementTree import fromstring + +from pydantic import BaseModel +from pydantic.utils import GetterDict + +xmlstring = """ + + + + +""" + + +class UserGetter(GetterDict): + def get(self, key: str, default: Any) -> Any: + # element attributes + if key in {'Id', 'Status'}: + return self._obj.attrib.get(key, default) + + # element children + else: + try: + return self._obj.find(key).attrib['Value'] + except (AttributeError, KeyError): + return default + + +class User(BaseModel): + Id: int + Status: Optional[str] + FirstName: Optional[str] + LastName: Optional[str] + LoggedIn: bool + + class Config: + from_attributes = True + getter_dict = UserGetter + + +user = User.from_orm(fromstring(xmlstring)) +``` ## Error Handling @@ -210,7 +402,89 @@ Each error object contains: As a demonstration: -{!.tmp_examples/models_errors1.md!} +```py +from typing import List + +from pydantic import BaseModel, ValidationError, conint + + +class Location(BaseModel): + lat = 0.1 + lng = 10.1 + + +class Model(BaseModel): + is_required: float + gt_int: conint(gt=42) + list_of_ints: List[int] = None + a_float: float = None + recursive_model: Location = None + + +data = dict( + list_of_ints=['1', 2, 'bad'], + a_float='not a float', + recursive_model={'lat': 4.2, 'lng': 'New York'}, + gt_int=21, +) + +try: + Model(**data) +except ValidationError as e: + print(e) + """ + 4 validation errors for Location + is_required + Field required [type=missing, input_value={'list_of_ints': ['1', 2,...ew York'}, 'gt_int': 21}, input_type=dict] + gt_int + Input should be greater than 42 [type=greater_than, input_value=21, input_type=int] + list_of_ints -> 2 + Input should be a valid integer, unable to parse string as an integer [type=int_parsing, input_value='bad', input_type=str] + a_float + Input should be a valid number, unable to parse string as an number [type=float_parsing, input_value='not a float', input_type=str] + """ + +try: + Model(**data) +except ValidationError as e: + # print(e.json()) + # TODO set back to .json() once we add it + print(e.errors()) + """ + [ + { + 'type': 'missing', + 'loc': ('is_required',), + 'msg': 'Field required', + 'input': { + 'list_of_ints': ['1', 2, 'bad'], + 'a_float': 'not a float', + 'recursive_model': {'lat': 4.2, 'lng': 'New York'}, + 'gt_int': 21, + }, + }, + { + 'type': 'greater_than', + 'loc': ('gt_int',), + 'msg': 'Input should be greater than 42', + 'input': 21, + 'ctx': {'gt': 42}, + }, + { + 'type': 'int_parsing', + 'loc': ('list_of_ints', 2), + 'msg': 'Input should be a valid integer, unable to parse string as an integer', + 'input': 'bad', + }, + { + 'type': 'float_parsing', + 'loc': ('a_float',), + 'msg': 'Input should be a valid number, unable to parse string as an number', + 'input': 'not a float', + }, + ] + """ +``` ### Custom Errors @@ -218,11 +492,76 @@ In your custom data types or validators you should use `ValueError`, `TypeError` See [validators](validators.md) for more details on use of the `@validator` decorator. -{!.tmp_examples/models_errors2.md!} +```py +from pydantic import BaseModel, ValidationError, field_validator + + +class Model(BaseModel): + foo: str + + @field_validator('foo') + def value_must_equal_bar(cls, v): + if v != 'bar': + raise ValueError('value must be "bar"') + + return v + + +try: + Model(foo='ber') +except ValidationError as e: + print(e.errors()) + """ + [ + { + 'type': 'value_error', + 'loc': ('foo',), + 'msg': 'Value error, value must be "bar"', + 'input': 'ber', + 'ctx': {'error': 'value must be "bar"'}, + } + ] + """ +``` You can also define your own error classes, which can specify a custom error code, message template, and context: -{!.tmp_examples/models_errors3.md!} +```py +from pydantic_core import PydanticCustomError + +from pydantic import BaseModel, ValidationError, field_validator + + +class Model(BaseModel): + foo: str + + @field_validator('foo', allow_reuse=True) # TODO remove after #4436 + def value_must_equal_bar(cls, v): + if v != 'bar': + raise PydanticCustomError( + 'not_a_bar', + 'value is not "bar", got "{wrong_value}"', + dict(wrong_value=v), + ) + return v + + +try: + Model(foo='ber') +except ValidationError as e: + print(e.errors()) + """ + [ + { + 'type': 'not_a_bar', + 'loc': ('foo',), + 'msg': 'value is not "bar", got "ber"', + 'input': 'ber', + 'ctx': {'wrong_value': 'ber'}, + } + ] + """ +``` ## Helper Functions @@ -230,12 +569,39 @@ You can also define your own error classes, which can specify a custom error cod * **`model_validate`**: this is very similar to the `__init__` method of the model, except it takes a dict rather than keyword arguments. If the object passed is not a dict a `ValidationError` will be raised. -* **`parse_raw`**: this takes a *str* or *bytes* and parses it as *json*, then passes the result to `model_validate`. +* **`model_validate_json`**: this takes a *str* or *bytes* and parses it as *json*, then passes the result to `model_validate`. Parsing *pickle* data is also supported by setting the `content_type` argument appropriately. -* **`parse_file`**: this takes in a file path, reads the file and passes the contents to `parse_raw`. If `content_type` is omitted, - it is inferred from the file's extension. -{!.tmp_examples/models_parse.md!} +```py +from datetime import datetime + +from pydantic import BaseModel, ValidationError + + +class User(BaseModel): + id: int + name = 'John Doe' + signup_ts: datetime = None + + +m = User.model_validate({'id': 123, 'name': 'James'}) +print(m) +#> id=123 signup_ts=None + +try: + User.model_validate(['not', 'a', 'dict']) +except ValidationError as e: + print(e) + """ + 1 validation error for User + Input should be a valid dictionary [type=dict_type, input_value=['not', 'a', 'dict'], input_type=list] + """ + +# assumes json as no content type passed +m = User.model_validate_json('{"id": 123, "name": "James"}') +print(m) +#> id=123 signup_ts=None +``` !!! warning To quote the [official `pickle` docs](https://docs.python.org/3/library/pickle.html), @@ -256,7 +622,42 @@ as efficiently as possible (`model_construct()` is generally around 30x faster t `model_construct()` does not do any validation, meaning it can create models which are invalid. **You should only ever use the `model_construct()` method with data which has already been validated, or you trust.** -{!.tmp_examples/models_construct.md!} +```py +from pydantic import BaseModel + + +class User(BaseModel): + id: int + age: int + name: str = 'John Doe' + + +original_user = User(id=123, age=32) + +user_data = original_user.model_dump() +print(user_data) +#> {'id': 123, 'age': 32, 'name': 'John Doe'} +fields_set = original_user.__fields_set__ +print(fields_set) +#> {'age', 'id'} + +# ... +# pass user_data and fields_set to RPC or save to the database etc. +# ... + +# you can then create a new instance of User without +# re-running validation which would be unnecessary at this point: +new_user = User.model_construct(_fields_set=fields_set, **user_data) +print(repr(new_user)) +#> User(id=123, age=32, name='John Doe') +print(new_user.__fields_set__) +#> {'age', 'id'} + +# construct can be dangerous, only use it with validated data!: +bad_user = User.model_construct(id='dog') +print(repr(bad_user)) +#> User(id='dog', name='John Doe') +``` The `_fields_set` keyword argument to `model_construct()` is optional, but allows you to be more precise about which fields were originally set and which weren't. If it's omitted `__fields_set__` will just be the keys @@ -279,7 +680,50 @@ In order to declare a generic model, you perform the following steps: Here is an example using `GenericModel` to create an easily-reused HTTP response payload wrapper: -{!.tmp_examples/models_generics.md!} +```py test="xfail - needs always/validate default support" +from typing import Generic, List, Optional, TypeVar + +from pydantic import BaseModel, ValidationError, validator_function + +DataT = TypeVar('DataT') + + +class Error(BaseModel): + code: int + message: str + + +class DataModel(BaseModel): + numbers: List[int] + people: List[str] + + +class Response(BaseModel, Generic[DataT]): + data: Optional[DataT] + error: Optional[Error] + + @validator_function('error', always=True) + def check_consistency(cls, v, values): + if v is not None and values['data'] is not None: + raise ValueError('must not provide both data and error') + if v is None and values.get('data') is None: + raise ValueError('must provide data or error') + return v + + +data = DataModel(numbers=[1, 2, 3], people=[]) +error = Error(code=404, message='Not found') + +print(Response[int](data=1)) +print(Response[str](data='value')) +print(Response[str](data='value').model_dump()) +print(Response[DataModel](data=data).model_dump()) +print(Response[DataModel](error=error).model_dump()) +try: + Response[int](data='value') +except ValidationError as e: + print(e) +``` If you set `Config` or make use of `validator` in your generic model definition, it is applied to concrete subclasses in the same way as when inheriting from `BaseModel`. Any methods defined on @@ -295,20 +739,114 @@ you would expect mypy to provide if you were to declare the type without using ` To inherit from a GenericModel without replacing the `TypeVar` instance, a class must also inherit from `typing.Generic`: -{!.tmp_examples/models_generics_inheritance.md!} +```py +from typing import Generic, TypeVar + +from pydantic import BaseModel + +TypeX = TypeVar('TypeX') + + +class BaseClass(BaseModel, Generic[TypeX]): + X: TypeX + + +class ChildClass(BaseClass[TypeX], Generic[TypeX]): + # Inherit from Generic[TypeX] + pass + + +# Replace TypeX by int +print(ChildClass[int](X=1)) +#> X=1 +``` You can also create a generic subclass of a `GenericModel` that partially or fully replaces the type parameters in the superclass. -{!.tmp_examples/models_generics_inheritance_extend.md!} +```py +from typing import Generic, TypeVar + +from pydantic import BaseModel + +TypeX = TypeVar('TypeX') +TypeY = TypeVar('TypeY') +TypeZ = TypeVar('TypeZ') + + +class BaseClass(BaseModel, Generic[TypeX, TypeY]): + x: TypeX + y: TypeY + + +class ChildClass(BaseClass[int, TypeY], Generic[TypeY, TypeZ]): + z: TypeZ + + +# Replace TypeY by str +print(ChildClass[str, int](x=1, y='y', z=3)) +#> x=1 y='y' z=3 +``` If the name of the concrete subclasses is important, you can also override the default behavior: -{!.tmp_examples/models_generics_naming.md!} +```py +from typing import Any, Generic, Tuple, Type, TypeVar + +from pydantic import BaseModel + +DataT = TypeVar('DataT') + + +class Response(BaseModel, Generic[DataT]): + data: DataT + + @classmethod + def __concrete_name__(cls: Type[Any], params: Tuple[Type[Any], ...]) -> str: + return f'{params[0].__name__.title()}Response' + + +print(repr(Response[int](data=1))) +#> Response[int](data=1) +print(repr(Response[str](data='a'))) +#> Response[str](data='a') +``` Using the same TypeVar in nested models allows you to enforce typing relationships at different points in your model: -{!.tmp_examples/models_generics_nested.md!} +```py +from typing import Generic, TypeVar + +from pydantic import BaseModel, ValidationError + +T = TypeVar('T') + + +class InnerT(BaseModel, Generic[T]): + inner: T + + +class OuterT(BaseModel, Generic[T]): + outer: T + nested: InnerT[T] + + +nested = InnerT[int](inner=1) +print(OuterT[int](outer=1, nested=nested)) +#> outer=1 nested=InnerT[int](inner=1) +try: + nested = InnerT[str](inner='a') + print(OuterT[int](outer='a', nested=nested)) +except ValidationError as e: + print(e) + """ + 2 validation errors for InnerT[int] + outer + Input should be a valid integer, unable to parse string as an integer [type=int_parsing, input_value='a', input_type=str] + nested + Input should be a valid dictionary [type=dict_type, input_value=InnerT[str](inner='a'), input_type=InnerT[str]] + """ +``` Pydantic also treats `GenericModel` similarly to how it treats built-in generic types like `List` and `Dict` when it comes to leaving them unparameterized, or using bounded `TypeVar` instances: @@ -318,14 +856,59 @@ comes to leaving them unparameterized, or using bounded `TypeVar` instances: Also, like `List` and `Dict`, any parameters specified using a `TypeVar` can later be substituted with concrete types. -{!.tmp_examples/models_generics_typevars.md!} +```py +from typing import Generic, TypeVar + +from pydantic import BaseModel, ValidationError + +AT = TypeVar('AT') +BT = TypeVar('BT') + + +class Model(BaseModel, Generic[AT, BT]): + a: AT + b: BT + + +print(Model(a='a', b='a')) +#> a='a' b='a' + +IntT = TypeVar('IntT', bound=int) +typevar_model = Model[int, IntT] +print(typevar_model(a=1, b=1)) +#> a=1 b=1 +try: + typevar_model(a='a', b='a') +except ValidationError as exc: + print(exc) + """ + 2 validation errors for Model[int, ~IntT] + a + Input should be a valid integer, unable to parse string as an integer [type=int_parsing, input_value='a', input_type=str] + b + Input should be a valid integer, unable to parse string as an integer [type=int_parsing, input_value='a', input_type=str] + """ + +concrete_model = typevar_model[int] +print(concrete_model(a=1, b=1)) +#> a=1 b=1 +``` ## Dynamic model creation There are some occasions where the shape of a model is not known until runtime. For this *pydantic* provides the `create_model` method to allow models to be created on the fly. -{!.tmp_examples/models_dynamic_creation.md!} +```py +from pydantic import BaseModel, create_model + +DynamicFoobarModel = create_model('DynamicFoobarModel', foo=(str, ...), bar=123) + + +class StaticFoobarModel(BaseModel): + foo: str + bar: int = 123 +``` Here `StaticFoobarModel` and `DynamicFoobarModel` are identical. @@ -338,11 +921,50 @@ Fields are defined by either a tuple of the form `(, )` or special key word arguments `__config__` and `__base__` can be used to customise the new model. This includes extending a base model with extra fields. -{!.tmp_examples/models_dynamic_inheritance.md!} +```py +from pydantic import BaseModel, create_model + + +class FooModel(BaseModel): + foo: str + bar: int = 123 + + +BarModel = create_model( + 'BarModel', + apple='russet', + banana='yellow', + __base__=FooModel, +) +print(BarModel) +#> +print(BarModel.model_fields.keys()) +#> dict_keys(['foo', 'bar']) +``` You can also add validators by passing a dict to the `__validators__` argument. -{!.tmp_examples/models_dynamic_validators.md!} +```py test="xfail create_model validators" +from pydantic import ValidationError, create_model, validator + + +def username_alphanumeric(cls, v): + assert v.isalnum(), 'must be alphanumeric' + return v + + +validators = {'username_validator': validator('username')(username_alphanumeric)} + +UserModel = create_model('UserModel', username=(str, ...), __validators__=validators) + +user = UserModel(username='scolvin') +print(user) + +try: + UserModel(username='scolvi%n') +except ValidationError as e: + print(e) +``` ## Model creation from `NamedTuple` or `TypedDict` @@ -352,7 +974,29 @@ For this _pydantic_ provides `create_model_from_namedtuple` and `create_model_fr Those methods have the exact same keyword arguments as `create_model`. -{!.tmp_examples/models_from_typeddict.md!} +```py test="xfail need Validator to replace create_model_from_typeddict" +from typing_extensions import TypedDict + +from pydantic import ValidationError, create_model_from_typeddict + + +class User(TypedDict): + name: str + id: int + + +class Config: + extra = 'forbid' + + +UserM = create_model_from_typeddict(User, __config__=Config) +print(repr(UserM(name=123, id='3'))) + +try: + UserM(name=123, id='3', other='no') +except ValidationError as e: + print(e) +``` ## Custom Root Types @@ -362,7 +1006,25 @@ The root type can be any type supported by pydantic, and is specified by the typ The root value can be passed to the model `__init__` via the `__root__` keyword argument, or as the first and only argument to `model_validate`. -{!.tmp_examples/models_custom_root_field.md!} +```py test="xfail support/replace __root__" +import json +from typing import List + +from pydantic import BaseModel +from pydantic.json_schema import schema + + +class Pets(BaseModel): + __root__: List[str] + + +print(Pets(__root__=['dog', 'cat'])) +print(Pets(__root__=['dog', 'cat']).model_dump_json()) +print(Pets.model_validate(['dog', 'cat'])) +print(Pets.model_json_schema()) +pets_schema = schema([Pets]) +print(json.dumps(pets_schema, indent=2)) +``` If you call the `model_validate` method for a model with a custom root type with a *dict* as the first argument, the following logic is used: @@ -375,7 +1037,30 @@ the following logic is used: This is demonstrated in the following example: -{!.tmp_examples/models_custom_root_field_parse_obj.md!} +```py test="xfail support/replace __root__" +from typing import Dict, List + +from pydantic import BaseModel, ValidationError + + +class Pets(BaseModel): + __root__: List[str] + + +print(Pets.model_validate(['dog', 'cat'])) +print(Pets.model_validate({'__root__': ['dog', 'cat']})) # not recommended + + +class PetsByName(BaseModel): + __root__: Dict[str, str] + + +print(PetsByName.model_validate({'Otis': 'dog', 'Milo': 'cat'})) +try: + PetsByName.model_validate({'__root__': {'Otis': 'dog', 'Milo': 'cat'}}) +except ValidationError as e: + print(e) +``` !!! warning Calling the `model_validate` method on a dict with the single key `"__root__"` for non-mapping custom root types @@ -383,7 +1068,26 @@ This is demonstrated in the following example: If you want to access items in the `__root__` field directly or to iterate over the items, you can implement custom `__iter__` and `__getitem__` functions, as shown in the following example. -{!.tmp_examples/models_custom_root_access.md!} +```py test="xfail support/replace __root__" +from typing import List + +from pydantic import BaseModel + + +class Pets(BaseModel): + __root__: List[str] + + def __iter__(self): + return iter(self.__root__) + + def __getitem__(self, item): + return self.__root__[item] + + +pets = Pets.model_validate(['dog', 'cat']) +print(pets[0]) +print([pet for pet in pets]) +``` ## Faux Immutability @@ -394,7 +1098,32 @@ values of instance attributes will raise errors. See [model config](model_config Immutability in Python is never strict. If developers are determined/stupid they can always modify a so-called "immutable" object. -{!.tmp_examples/models_mutation.md!} +```py +from pydantic import BaseModel + + +class FooBarModel(BaseModel): + model_config = dict(frozen=True) + a: str + b: dict + + +foobar = FooBarModel(a='hello', b={'apple': 'pear'}) + +try: + foobar.a = 'different' +except TypeError as e: + print(e) + #> "FooBarModel" is frozen and does not support item assignment + +print(foobar.a) +#> hello +print(foobar.b) +#> {'apple': 'pear'} +foobar.b['apple'] = 'grape' +print(foobar.b) +#> {'apple': 'grape'} +``` Trying to change `a` caused an error, and `a` remains unchanged. However, the dict `b` is mutable, and the immutability of `foobar` doesn't stop `b` from being changed. @@ -404,7 +1133,20 @@ immutability of `foobar` doesn't stop `b` from being changed. Pydantic models can be used alongside Python's [Abstract Base Classes](https://docs.python.org/3/library/abc.html) (ABCs). -{!.tmp_examples/models_abc.md!} +```py +import abc + +from pydantic import BaseModel + + +class FooBarModel(BaseModel, abc.ABC): + a: str + b: int + + @abc.abstractmethod + def my_abstract_method(self): + pass +``` ## Field Ordering @@ -419,7 +1161,31 @@ Field order is important in models for the following reasons: As of **v1.0** all fields with annotations (whether annotation-only or with a default value) will precede all fields without an annotation. Within their respective groups, fields remain in the order they were defined. -{!.tmp_examples/models_field_order.md!} +```py +from pydantic import BaseModel, ValidationError + + +class Model(BaseModel): + a: int + b = 2 + c: int = 1 + d = 0 + e: float + + +print(Model.model_fields.keys()) +#> dict_keys(['a', 'c', 'e']) +m = Model(e=2, a=1) +print(m.model_dump()) +#> {'a': 1, 'c': 1, 'e': 2.0} +try: + Model(a='x', b='x', c='x', d='x', e='x') +except ValidationError as err: + error_locations = [e['loc'] for e in err.errors()] + +print(error_locations) +#> [('a',), ('c',), ('e',)] +``` !!! warning As demonstrated by the example above, combining the use of annotated and non-annotated fields @@ -433,7 +1199,15 @@ all fields without an annotation. Within their respective groups, fields remain To declare a field as required, you may declare it using just an annotation, or you may use an ellipsis (`...`) as the value: -{!.tmp_examples/models_required_fields.md!} +```py +from pydantic import BaseModel, Field + + +class Model(BaseModel): + a: int + b: int = ... + c: int = Field(...) +``` Where `Field` refers to the [field function](schema.md#field-customization). @@ -451,7 +1225,29 @@ with [mypy](mypy.md), and as of **v1.0** should be avoided in most cases. If you want to specify a field that can take a `None` value while still being required, you can use `Optional` with `...`: -{!.tmp_examples/models_required_field_optional.md!} +```py +from typing import Optional + +from pydantic import BaseModel, ValidationError + + +class Model(BaseModel): + a: Optional[int] + b: Optional[int] = None + + +print(Model(a=1)) +#> a=1 b=None +try: + Model(b=2) +except ValidationError as e: + print(e) + """ + 1 validation error for Model + a + Field required [type=missing, input_value={'b': 2}, input_type=dict] + """ +``` In this model, `a`, `b`, and `c` can take `None` as a value. But `a` is optional, while `b` and `c` are required. `b` and `c` require a value, even if the value is `None`. @@ -469,7 +1265,23 @@ To do this, you may want to use a `default_factory`. Example of usage: -{!.tmp_examples/models_default_factory.md!} +```py +from datetime import datetime +from uuid import UUID, uuid4 + +from pydantic import BaseModel, Field + + +class Model(BaseModel): + uid: UUID = Field(default_factory=uuid4) + updated: datetime = Field(default_factory=datetime.utcnow) + + +m1 = Model() +m2 = Model() +assert m1.uid != m2.uid +assert m1.updated != m2.updated +``` Where `Field` refers to the [field function](schema.md#field-customization). @@ -486,13 +1298,52 @@ automatically excluded from the model. If you need to vary or manipulate internal attributes on instances of the model, you can declare them using `PrivateAttr`: -{!.tmp_examples/private_attributes.md!} +```py +from datetime import datetime +from random import randint + +from pydantic import BaseModel, PrivateAttr + + +class TimeAwareModel(BaseModel): + _processed_at: datetime = PrivateAttr(default_factory=datetime.now) + _secret_value: str = PrivateAttr() + + def __init__(self, **data): + super().__init__(**data) + # this could also be done with default_factory + self._secret_value = randint(1, 5) + + +m = TimeAwareModel() +print(m._processed_at) +#> 2032-01-02 03:04:05.000006 +print(m._secret_value) +#> 3 +``` Private attribute names must start with underscore to prevent conflicts with model fields: both `_attr` and `__attr__` are supported. If `Config.underscore_attrs_are_private` is `True`, any non-ClassVar underscore attribute will be treated as private: -{!.tmp_examples/private_attributes_underscore_attrs_are_private.md!} +```py test="xfail what the hell is underscore_attrs_are_private?" +from typing import ClassVar + +from pydantic import BaseModel + + +class Model(BaseModel): + _class_var: ClassVar[str] = 'class var value' + _private_attr: str = 'private attr value' + + class Config: + underscore_attrs_are_private = True + + +print(Model._class_var) +print(Model._private_attr) +print(Model()._private_attr) +``` Upon class creation pydantic constructs `__slots__` filled with private attributes. @@ -505,7 +1356,25 @@ logic used to populate pydantic models in a more ad-hoc way. This function behav This is especially useful when you want to parse results into a type that is not a direct subclass of `BaseModel`. For example: -{!.tmp_examples/parse_obj_as.md!} +```py +from typing import List + +from pydantic import BaseModel, parse_obj_as + + +class Item(BaseModel): + id: int + name: str + + +# `item_data` could come from an API call, eg., via something like: +# item_data = requests.get('https://my-api.com/items').json() +item_data = [{'id': 1, 'name': 'My Item'}] + +items = parse_obj_as(List[Item], item_data) +print(items) +#> [Item(id=1, name='My Item')] +``` This function is capable of parsing data into any of the types pydantic can handle as fields of a `BaseModel`. @@ -518,7 +1387,18 @@ which are analogous to `BaseModel.parse_file` and `BaseModel.parse_raw`. and in some cases this may result in a loss of information. For example: -{!.tmp_examples/models_data_conversion.md!} +```py test="xfail this logic has failed" +from pydantic import BaseModel + + +class Model(BaseModel): + a: int + b: float + c: str + + +print(Model(a=3.1415, b=' 2.72 ', c=123).model_dump()) +``` This is a deliberate decision of *pydantic*, and in general it's the most useful approach. See [here](https://github.com/pydantic/pydantic/issues/578) for a longer discussion on the subject. @@ -529,13 +1409,45 @@ Nevertheless, [strict type checking](types.md#strict-types) is partially support All *pydantic* models will have their signature generated based on their fields: -{!.tmp_examples/models_signature.md!} +```py +import inspect + +from pydantic import BaseModel, Field + + +class FooModel(BaseModel): + id: int + name: str = None + description: str = 'Foo' + apple: int = Field(..., alias='pear') + + +print(inspect.signature(FooModel)) +#> (*, id: int, name: str = None, description: str = 'Foo', pear: int) -> None +``` An accurate signature is useful for introspection purposes and libraries like `FastAPI` or `hypothesis`. The generated signature will also respect custom `__init__` functions: -{!.tmp_examples/models_signature_custom_init.md!} +```py +import inspect + +from pydantic import BaseModel + + +class MyModel(BaseModel): + id: int + info: str = 'Foo' + + def __init__(self, id: int = 1, *, bar: str, **data) -> None: + """My custom init!""" + super().__init__(id=id, bar=bar, **data) + + +print(inspect.signature(MyModel)) +#> (id: int = 1, *, bar: str, info: str = 'Foo') -> None +``` To be included in the signature, a field's alias or name must be a valid Python identifier. *pydantic* prefers aliases over names, but may use field names if the alias is not a valid Python identifier. @@ -552,7 +1464,26 @@ In addition, the `**data` argument will always be present in the signature if `C *pydantic* supports structural pattern matching for models, as introduced by [PEP 636](https://peps.python.org/pep-0636/) in Python 3.10. -{!.tmp_examples/models_structural_pattern_matching.md!} +```py requires="3.10" lint="skip" +from pydantic import BaseModel + + +class Pet(BaseModel): + name: str + species: str + + +a = Pet(name='Bones', species='dog') + +match a: + # match `species` to 'dog', declare and initialize `dog_name` + case Pet(species='dog', name=dog_name): + print(f'{dog_name} is a dog') +#> Bones is a dog + # default case + case _: + print('No dog matched') +``` !!! note A match-case statement may seem as if it creates a new model, but don't be fooled; diff --git a/docs/usage/mypy.md b/docs/usage/mypy.md index 4d047c57bb..ab8264e8eb 100644 --- a/docs/usage/mypy.md +++ b/docs/usage/mypy.md @@ -1,7 +1,25 @@ *pydantic* models work with [mypy](http://mypy-lang.org/) provided you use the annotation-only version of required fields: -{!.tmp_examples/mypy_main.md!} +```py test="skip" +from datetime import datetime +from typing import List, Optional + +from pydantic import BaseModel, NoneStr + + +class Model(BaseModel): + age: int + first_name = 'John' + last_name: NoneStr = None + signup_ts: Optional[datetime] = None + list_of_ints: List[int] + + +m = Model(age=42, list_of_ints=[1, '2', b'3']) +print(m.middle_name) # not a model field! +Model() # will raise a validation error for age and list_of_ints +``` You can run your code through mypy with: diff --git a/docs/usage/postponed_annotations.md b/docs/usage/postponed_annotations.md index 8fbf805f0c..d6e7458ebe 100644 --- a/docs/usage/postponed_annotations.md +++ b/docs/usage/postponed_annotations.md @@ -4,7 +4,22 @@ Postponed annotations (as described in [PEP563](https://www.python.org/dev/peps/pep-0563/)) "just work". -{!.tmp_examples/postponed_annotations_main.md!} +```py requires="3.9" upgrade="skip" +from __future__ import annotations + +from typing import Any + +from pydantic import BaseModel + + +class Model(BaseModel): + a: list[int] + b: Any + + +print(Model(a=('1', 2, 3), b='ok')) +#> a=[1, 2, 3] b='ok' +``` Internally, *pydantic* will call a method similar to `typing.get_type_hints` to resolve annotations. @@ -16,7 +31,24 @@ In some cases, a `ForwardRef` won't be able to be resolved during model creation For example, this happens whenever a model references itself as a field type. When this happens, you'll need to call `update_forward_refs` after the model has been created before it can be used: -{!.tmp_examples/postponed_annotations_forward_ref.md!} +```py +from typing import ForwardRef + +from pydantic import BaseModel + +Foo = ForwardRef('Foo') + + +class Foo(BaseModel): + a: int = 123 + b: Foo = None + + +print(Foo()) +#> a=123 b=None +print(Foo(b={'a': '321'})) +#> a=123 b=Foo(a=321, b=None) +``` !!! warning To resolve strings (type names) into annotations (types), *pydantic* needs a namespace dict in which to @@ -25,11 +57,53 @@ When this happens, you'll need to call `update_forward_refs` after the model has For example, this works fine: -{!.tmp_examples/postponed_annotations_works.md!} +```py test="xfail - this should work" +from __future__ import annotations + +from pydantic import ( + BaseModel, + HttpUrl, # HttpUrl is defined in the module's global scope +) + + +def this_works(): + class Model(BaseModel): + a: HttpUrl + + print(Model(a='https://example.com')) + + +this_works() +``` While this will break: -{!.tmp_examples/postponed_annotations_broken.md!} +```py +from __future__ import annotations + +from pydantic import BaseModel +from pydantic.errors import PydanticUserError + + +def this_is_broken(): + from pydantic import HttpUrl # HttpUrl is defined in function local scope + + class Model(BaseModel): + a: HttpUrl + + try: + Model(a='https://example.com') + except PydanticUserError as e: + print(e) + + try: + Model.model_rebuild() + except NameError as e: + print(e) + + +this_is_broken() +``` Resolving this is beyond the call for *pydantic*: either remove the future import or declare the types globally. @@ -40,10 +114,40 @@ resolved after model creation. Within the model, you can refer to the not-yet-constructed model using a string: -{!.tmp_examples/postponed_annotations_self_referencing_string.md!} +```py +from pydantic import BaseModel + + +class Foo(BaseModel): + a: int = 123 + #: The sibling of `Foo` is referenced by string + sibling: 'Foo' = None + + +print(Foo()) +#> a=123 sibling=None +print(Foo(sibling={'a': '321'})) +#> a=123 sibling=Foo(a=321, sibling=None) +``` Since Python 3.7, you can also refer it by its type, provided you import `annotations` (see [above](postponed_annotations.md) for support depending on Python and *pydantic* versions). -{!.tmp_examples/postponed_annotations_self_referencing_annotations.md!} +```py +from __future__ import annotations + +from pydantic import BaseModel + + +class Foo(BaseModel): + a: int = 123 + #: The sibling of `Foo` is referenced directly by type + sibling: Foo = None + + +print(Foo()) +#> a=123 sibling=None +print(Foo(sibling={'a': '321'})) +#> a=123 sibling=Foo(a=321, sibling=None) +``` diff --git a/docs/usage/schema.md b/docs/usage/schema.md index 59966f5dec..cc0b26f9ef 100644 --- a/docs/usage/schema.md +++ b/docs/usage/schema.md @@ -1,6 +1,106 @@ *Pydantic* allows auto creation of JSON Schemas from models: -{!.tmp_examples/schema_main.md!} +```py output="json" +from enum import Enum + +from pydantic import BaseModel, Field + + +class FooBar(BaseModel): + count: int + size: float = None + + +class Gender(str, Enum): + male = 'male' + female = 'female' + other = 'other' + not_given = 'not_given' + + +class MainModel(BaseModel): + """ + This is the description of the main model + """ + + model_config = dict(title='Main') + + foo_bar: FooBar = Field(...) + gender: Gender = Field(None, alias='Gender') + snap: int = Field( + 42, + title='The Snap', + description='this is the value of snap', + gt=30, + lt=50, + ) + + +# this is equivalent to json.dumps(MainModel.model_json_schema(), indent=2): +print(MainModel.schema_json(indent=2)) +""" +{ + "type": "object", + "properties": { + "foo_bar": { + "$ref": "#/$defs/FooBar" + }, + "Gender": { + "allOf": [ + { + "$ref": "#/$defs/Gender" + } + ], + "default": null + }, + "snap": { + "type": "integer", + "exclusiveMaximum": 50, + "exclusiveMinimum": 30, + "default": 42, + "title": "The Snap", + "description": "this is the value of snap" + } + }, + "required": [ + "foo_bar" + ], + "title": "Main", + "description": "This is the description of the main model", + "$defs": { + "FooBar": { + "type": "object", + "properties": { + "count": { + "type": "integer", + "title": "Count" + }, + "size": { + "type": "number", + "default": null, + "title": "Size" + } + }, + "required": [ + "count" + ], + "title": "FooBar" + }, + "Gender": { + "enum": [ + "male", + "female", + "other", + "not_given" + ], + "title": "Gender", + "description": "An enumeration.", + "type": "string" + } + } +} +""" +``` The generated schemas are compliant with the specifications: @@ -36,7 +136,86 @@ apply the schema generation logic used for _pydantic_ models in a more ad-hoc wa These functions behave similarly to `BaseModel.model_json_schema` and `BaseModel.schema_json`, but work with arbitrary pydantic-compatible types. -{!.tmp_examples/schema_ad_hoc.md!} +```py requires="3.8" +from typing import Literal, Union + +from typing_extensions import Annotated + +from pydantic import BaseModel, Field, schema_json_of + + +class Cat(BaseModel): + pet_type: Literal['cat'] + cat_name: str + + +class Dog(BaseModel): + pet_type: Literal['dog'] + dog_name: str + + +Pet = Annotated[Union[Cat, Dog], Field(discriminator='pet_type')] + +print(schema_json_of(Pet, title='The Pet Schema', indent=2)) +""" +{ + "oneOf": [ + { + "$ref": "#/$defs/Cat" + }, + { + "$ref": "#/$defs/Dog" + } + ], + "discriminator": { + "propertyName": "pet_type", + "mapping": { + "cat": "#/$defs/Cat", + "dog": "#/$defs/Dog" + } + }, + "$defs": { + "Cat": { + "type": "object", + "properties": { + "pet_type": { + "const": "cat", + "title": "Pet Type" + }, + "cat_name": { + "type": "string", + "title": "Cat Name" + } + }, + "required": [ + "pet_type", + "cat_name" + ], + "title": "Cat" + }, + "Dog": { + "type": "object", + "properties": { + "pet_type": { + "const": "dog", + "title": "Pet Type" + }, + "dog_name": { + "type": "string", + "title": "Dog Name" + } + }, + "required": [ + "pet_type", + "dog_name" + ], + "title": "Dog" + } + }, + "title": "The Pet Schema" +} +""" +``` ## Field customization @@ -114,13 +293,62 @@ If *pydantic* finds constraints which are not being enforced, an error will be r constraint to appear in the schema, even though it's not being checked upon parsing, you can use variadic arguments to `Field()` with the raw schema attribute name: -{!.tmp_examples/schema_unenforced_constraints.md!} +```py +from pydantic import BaseModel, Field, PositiveInt + +try: + # this won't work since PositiveInt takes precedence over the + # constraints defined in Field meaning they're ignored + class Model(BaseModel): + foo: PositiveInt = Field(..., lt=10) + +except ValueError as e: + print(e) + + +# if you find yourself needing this, an alternative is to declare +# the constraints in Field (or you could use conint()) +# here both constraints will be enforced: +class ModelB(BaseModel): + # Here both constraints will be applied and the schema + # will be generated correctly + foo: int = Field(..., gt=0, lt=10) + + +print(ModelB.model_json_schema()) +""" +{ + 'type': 'object', + 'properties': { + 'foo': { + 'type': 'integer', + 'exclusiveMaximum': 10, + 'exclusiveMinimum': 0, + 'title': 'Foo', + } + }, + 'required': ['foo'], + 'title': 'ModelB', +} +""" +``` ### typing.Annotated Fields Rather than assigning a `Field` value, it can be specified in the type hint with `typing.Annotated`: -{!.tmp_examples/schema_annotated.md!} +```py +from uuid import uuid4 + +from typing_extensions import Annotated + +from pydantic import BaseModel, Field + + +class Foo(BaseModel): + id: Annotated[str, Field(default_factory=lambda: uuid4().hex)] + name: Annotated[str, Field(max_length=256)] = 'Bar' +``` `Field` can only be supplied once per field - an error will be raised if used in `Annotated` and as the assigned value. Defaults can be set outside `Annotated` as the assigned value or with `Field.default_factory` inside `Annotated` - the @@ -137,7 +365,40 @@ see [Custom Data Types](types.md#custom-data-types) for more details. *pydantic* will inspect the signature of `__modify_schema__` to determine whether the `field` argument should be included. -{!.tmp_examples/schema_with_field.md!} +```py output="json" test="xfail needs work" lint="skip" +from typing import Any, Callable, Dict, Generator, Optional + +from pydantic import BaseModel, Field +from pydantic_core.core_schema import ValidationInfo + + +class RestrictedAlphabetStr(str): + @classmethod + def __get_validators__(cls) -> Generator[Callable, None, None]: + yield cls.validate + + @classmethod + def validate(cls, value: str, info: ValidationInfo): + alphabet = field.field_info.extra['alphabet'] + if any(c not in alphabet for c in value): + raise ValueError(f'{value!r} is not restricted to {alphabet!r}') + return cls(value) + + @classmethod + def __pydantic_modify_json_schema__( + cls, field_schema: Dict[str, Any], field: Optional[ModelField] + ): + if field: + alphabet = field.field_info.extra['alphabet'] + field_schema['examples'] = [c * 3 for c in alphabet] + + +class MyModel(BaseModel): + value: RestrictedAlphabetStr = Field(alphabet='ABC') + + +print(MyModel.schema_json(indent=2)) +``` ## JSON Schema Types @@ -152,14 +413,78 @@ following priority order (when there is an equivalent available): The field schema mapping from Python / *pydantic* to JSON Schema is done as follows: -{!.tmp_schema_mappings.html!} +{{ schema_mappings_table }} ## Top-level schema generation You can also generate a top-level JSON Schema that only includes a list of models and related sub-models in its `definitions`: -{!.tmp_examples/schema_top_level.md!} +```py output="json" +import json + +from pydantic import BaseModel +from pydantic.json_schema import schema + + +class Foo(BaseModel): + a: str = None + + +class Model(BaseModel): + b: Foo + + +class Bar(BaseModel): + c: int + + +top_level_schema = schema([Model, Bar], title='My Schema') +print(json.dumps(top_level_schema, indent=2)) +""" +{ + "$defs": { + "Foo": { + "type": "object", + "properties": { + "a": { + "type": "string", + "default": null, + "title": "A" + } + }, + "title": "Foo" + }, + "Model": { + "type": "object", + "properties": { + "b": { + "$ref": "#/$defs/Foo" + } + }, + "required": [ + "b" + ], + "title": "Model" + }, + "Bar": { + "type": "object", + "properties": { + "c": { + "type": "integer", + "title": "C" + } + }, + "required": [ + "c" + ], + "title": "Bar" + } + }, + "title": "My Schema" +} +""" +``` ## Schema customization @@ -169,7 +494,25 @@ You can customize the generated `$ref` JSON location: the definitions are always This is useful if you need to extend or modify the JSON Schema default definitions location. E.g. with OpenAPI: -{!.tmp_examples/schema_custom.md!} +```py output="json" test="xfail - what happened to ref_prefix?" +import json + +from pydantic import BaseModel +from pydantic.json_schema import schema + + +class Foo(BaseModel): + a: int + + +class Model(BaseModel): + a: Foo + + +# Default location for OpenAPI +top_level_schema = schema([Model], ref_prefix='#/components/schemas/') +print(json.dumps(top_level_schema, indent=2)) +``` It's also possible to extend/override the generated JSON schema in a model. @@ -178,7 +521,29 @@ To do it, use the `Config` sub-class attribute `schema_extra`. For example, you could add `examples` to the JSON Schema: -{!.tmp_examples/schema_with_example.md!} +```py output="json" test="xfail - replace schema_extra" +from pydantic import BaseModel + + +class Person(BaseModel): + name: str + age: int + + class Config: + # TODO: This is no longer valid in v2; + # update example to use __pydantic_modify_json_schema__ + schema_extra = { + 'examples': [ + { + 'name': 'John Doe', + 'age': 25, + } + ] + } + + +print(Person.schema_json(indent=2)) +``` For more fine-grained control, you can alternatively set `schema_extra` to a callable and post-process the generated schema. @@ -189,4 +554,24 @@ The callable is expected to mutate the schema dictionary *in-place*; the return For example, the `title` key can be removed from the model's `properties`: -{!.tmp_examples/schema_extra_callable.md!} +```py output="json" test="xfail - replace schema_extra" +from typing import Any, Dict, Type + +from pydantic import BaseModel + + +class Person(BaseModel): + name: str + age: int + + class Config: + # TODO: This is no longer valid in v2; + # update example to use __pydantic_modify_json_schema__ + @staticmethod + def schema_extra(schema: Dict[str, Any], model: Type['Person']) -> None: + for prop in schema.get('properties', {}).values(): + prop.pop('title', None) + + +print(Person.schema_json(indent=2)) +``` diff --git a/docs/usage/types.md b/docs/usage/types.md index 2325ec73e5..6be3e4d157 100644 --- a/docs/usage/types.md +++ b/docs/usage/types.md @@ -187,7 +187,59 @@ with custom properties and validation. *pydantic* uses standard library `typing` types as defined in PEP 484 to define complex objects. -{!.tmp_examples/types_iterables.md!} +```py +from typing import Deque, Dict, FrozenSet, List, Optional, Sequence, Set, Tuple, Union + +from pydantic import BaseModel + + +class Model(BaseModel): + simple_list: list = None + list_of_ints: List[int] = None + + simple_tuple: tuple = None + tuple_of_different_types: Tuple[int, float, str, bool] = None + + simple_dict: dict = None + dict_str_float: Dict[str, float] = None + + simple_set: set = None + set_bytes: Set[bytes] = None + frozen_set: FrozenSet[int] = None + + str_or_bytes: Union[str, bytes] = None + none_or_str: Optional[str] = None + + sequence_of_ints: Sequence[int] = None + + compound: Dict[Union[str, bytes], List[Set[int]]] = None + + deque: Deque[int] = None + + +print(Model(simple_list=['1', '2', '3']).simple_list) +#> ['1', '2', '3'] +print(Model(list_of_ints=['1', '2', '3']).list_of_ints) +#> [1, 2, 3] + +print(Model(simple_dict={'a': 1, b'b': 2}).simple_dict) +#> {'a': 1, b'b': 2} +print(Model(dict_str_float={'a': 1, b'b': 2}).dict_str_float) +#> {'a': 1.0, 'b': 2.0} + +print(Model(simple_tuple=[1, 2, 3, 4]).simple_tuple) +#> (1, 2, 3, 4) +print(Model(tuple_of_different_types=[4, 3, '2', 1]).tuple_of_different_types) +#> (4, 3.0, '2', True) + +print(Model(sequence_of_ints=[1, 2, 3, 4]).sequence_of_ints) +#> [1, 2, 3, 4] +print(Model(sequence_of_ints=(1, 2, 3, 4)).sequence_of_ints) +#> (1, 2, 3, 4) + +print(Model(deque=[1, 2, 3]).deque) +#> deque([1, 2, 3]) +``` ### Infinite Generators @@ -198,7 +250,45 @@ validated with the sub-type of `Sequence` (e.g. `int` in `Sequence[int]`). But if you have a generator that you don't want to be consumed, e.g. an infinite generator or a remote data loader, you can define its type with `Iterable`: -{!.tmp_examples/types_infinite_generator.md!} +```py +from typing import Iterable + +from pydantic import BaseModel + + +class Model(BaseModel): + infinite: Iterable[int] + + +def infinite_ints(): + i = 0 + while True: + yield i + i += 1 + + +m = Model(infinite=infinite_ints()) +print(m) +""" +infinite=ValidatorIterator(index=0, schema=Some(Int(IntValidator { strict: false }))) +""" + +for i in m.infinite: + print(i) + #> 0 + #> 1 + #> 2 + #> 3 + #> 4 + #> 5 + #> 6 + #> 7 + #> 8 + #> 9 + #> 10 + if i == 10: + break +``` !!! warning `Iterable` fields only perform a simple check that the argument is iterable and @@ -219,7 +309,54 @@ generator or a remote data loader, you can define its type with `Iterable`: You can create a [validator](validators.md) to validate the first value in an infinite generator and still not consume it entirely. -{!.tmp_examples/types_infinite_generator_validate_first.md!} +```py test="xfail - what's going on here?" +import itertools +from typing import Iterable + +from pydantic import BaseModel, ValidationError, field_validator + + +class Model(BaseModel): + infinite: Iterable[int] + + @field_validator('infinite') + # You don't need to add the "ModelField", but it will help your + # editor give you completion and catch errors + def infinite_first_int(cls, iterable, field): + first_value = next(iterable) + if field.sub_fields: + # The Iterable had a parameter type, in this case it's int + # We use it to validate the first value + sub_field = field.sub_fields[0] + v, error = sub_field.validate(first_value, {}, loc='first_value') + if error: + raise ValidationError([error], cls) + # This creates a new generator that returns the first value and then + # the rest of the values from the (already started) iterable + return itertools.chain([first_value], iterable) + + +def infinite_ints(): + i = 0 + while True: + yield i + i += 1 + + +m = Model(infinite=infinite_ints()) +print(m) + + +def infinite_strs(): + while True: + yield from 'allthesingleladies' + + +try: + Model(infinite=infinite_strs()) +except ValidationError as e: + print(e) +``` ### Unions @@ -229,7 +366,37 @@ The `Union` type allows a model attribute to accept different types, e.g.: You may get unexpected coercion with `Union`; see below.
Know that you can also make the check slower but stricter by using [Smart Union](model_config.md#smart-union) -{!.tmp_examples/types_union_incorrect.md!} +```py +from typing import Union +from uuid import UUID + +from pydantic import BaseModel + + +class User(BaseModel): + id: Union[int, str, UUID] + name: str + + +user_01 = User(id=123, name='John Doe') +print(user_01) +#> id=123 name='John Doe' +print(user_01.id) +#> 123 +user_02 = User(id='1234', name='John Doe') +print(user_02) +#> id='1234' name='John Doe' +print(user_02.id) +#> 1234 +user_03_uuid = UUID('cf57432e-809e-4353-adbd-9d5c0d733868') +user_03 = User(id=user_03_uuid, name='John Doe') +print(user_03) +#> id=UUID('cf57432e-809e-4353-adbd-9d5c0d733868') name='John Doe' +print(user_03.id) +#> cf57432e-809e-4353-adbd-9d5c0d733868 +print(user_03_uuid.int) +#> 275603287559914445491632874575877060712 +``` However, as can be seen above, *pydantic* will attempt to 'match' any of the types defined under `Union` and will use the first one that matches. In the above example the `id` of `user_03` was defined as a `uuid.UUID` class (which @@ -250,7 +417,27 @@ followed by less specific types. In the above example, the `UUID` class should precede the `int` and `str` classes to preclude the unexpected representation as such: -{!.tmp_examples/types_union_correct.md!} +```py +from typing import Union +from uuid import UUID + +from pydantic import BaseModel + + +class User(BaseModel): + id: Union[UUID, int, str] + name: str + + +user_03_uuid = UUID('cf57432e-809e-4353-adbd-9d5c0d733868') +user_03 = User(id=user_03_uuid, name='John Doe') +print(user_03) +#> id=UUID('cf57432e-809e-4353-adbd-9d5c0d733868') name='John Doe' +print(user_03.id) +#> cf57432e-809e-4353-adbd-9d5c0d733868 +print(user_03_uuid.int) +#> 275603287559914445491632874575877060712 +``` !!! tip The type `Optional[x]` is a shorthand for `Union[x, None]`. @@ -273,7 +460,44 @@ Setting a discriminated union has many benefits: - only one explicit error is raised in case of failure - the generated JSON schema implements the [associated OpenAPI specification](https://github.com/OAI/OpenAPI-Specification/blob/master/versions/3.0.2.md#discriminatorObject) -{!.tmp_examples/types_union_discriminated.md!} +```py requires="3.8" +from typing import Literal, Union + +from pydantic import BaseModel, Field, ValidationError + + +class Cat(BaseModel): + pet_type: Literal['cat'] + meows: int + + +class Dog(BaseModel): + pet_type: Literal['dog'] + barks: float + + +class Lizard(BaseModel): + pet_type: Literal['reptile', 'lizard'] + scales: bool + + +class Model(BaseModel): + pet: Union[Cat, Dog, Lizard] = Field(..., discriminator='pet_type') + n: int + + +print(Model(pet={'pet_type': 'dog', 'barks': 3.14}, n=1)) +#> pet=Dog(pet_type='dog', barks=3.14) n=1 +try: + Model(pet={'pet_type': 'dog'}, n=1) +except ValidationError as e: + print(e) + """ + 1 validation error for Lizard + pet -> dog -> barks + Field required [type=missing, input_value={'pet_type': 'dog'}, input_type=dict] + """ +``` !!! note Using the [Annotated Fields syntax](../schema/#typingannotated-fields) can be handy to regroup @@ -290,13 +514,109 @@ Setting a discriminated union has many benefits: Only one discriminator can be set for a field but sometimes you want to combine multiple discriminators. In this case you can always create "intermediate" models with `__root__` and add your discriminator. -{!.tmp_examples/types_union_discriminated_nested.md!} +```py requires="3.8" +from typing import Literal, Union + +from typing_extensions import Annotated + +from pydantic import BaseModel, Field, ValidationError + + +class BlackCat(BaseModel): + pet_type: Literal['cat'] + color: Literal['black'] + black_name: str + + +class WhiteCat(BaseModel): + pet_type: Literal['cat'] + color: Literal['white'] + white_name: str + + +# Can also be written with a custom root type +# +# class Cat(BaseModel): +# __root__: Annotated[Union[BlackCat, WhiteCat], Field(discriminator='color')] + +Cat = Annotated[Union[BlackCat, WhiteCat], Field(discriminator='color')] + + +class Dog(BaseModel): + pet_type: Literal['dog'] + name: str + + +Pet = Annotated[Union[Cat, Dog], Field(discriminator='pet_type')] + + +class Model(BaseModel): + pet: Pet + n: int + + +m = Model(pet={'pet_type': 'cat', 'color': 'black', 'black_name': 'felix'}, n=1) +print(m) +#> pet=BlackCat(pet_type='cat', color='black', black_name='felix') n=1 +try: + Model(pet={'pet_type': 'cat', 'color': 'red'}, n='1') +except ValidationError as e: + print(e) + """ + 1 validation error for Dog + pet -> cat + Input tag 'red' found using 'color' does not match any of the expected tags: 'black', 'white' [type=union_tag_invalid, input_value={'pet_type': 'cat', 'color': 'red'}, input_type=dict] + """ +try: + Model(pet={'pet_type': 'cat', 'color': 'black'}, n='1') +except ValidationError as e: + print(e) + """ + 1 validation error for Dog + pet -> cat -> black -> black_name + Field required [type=missing, input_value={'pet_type': 'cat', 'color': 'black'}, input_type=dict] + """ +``` ### Enums and Choices *pydantic* uses Python's standard `enum` classes to define choices. -{!.tmp_examples/types_choices.md!} +```py +from enum import Enum, IntEnum + +from pydantic import BaseModel, ValidationError + + +class FruitEnum(str, Enum): + pear = 'pear' + banana = 'banana' + + +class ToolEnum(IntEnum): + spanner = 1 + wrench = 2 + + +class CookingModel(BaseModel): + fruit: FruitEnum = FruitEnum.pear + tool: ToolEnum = ToolEnum.spanner + + +print(CookingModel()) +#> fruit= tool= +print(CookingModel(tool=2, fruit='banana')) +#> fruit= tool= +try: + CookingModel(fruit='other') +except ValidationError as e: + print(e) + """ + 1 validation error for CookingModel + fruit + Input should be 'pear' or 'banana' [type=literal_error, input_value='other', input_type=str] + """ +``` ### Datetime Types @@ -338,7 +658,31 @@ types: * `[-][DD ][HH:MM]SS[.ffffff]` * `[±]P[DD]DT[HH]H[MM]M[SS]S` ([ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format for timedelta) -{!.tmp_examples/types_dt.md!} +```py +from datetime import date, datetime, time, timedelta + +from pydantic import BaseModel + + +class Model(BaseModel): + d: date = None + dt: datetime = None + t: time = None + td: timedelta = None + + +m = Model( + d=1679616000.0, + dt='2032-04-23T10:20:30.400+02:30', + t=time(4, 8, 16), + td='P3DT12H30M5S', +) + +print(m.model_dump()) +""" +{'d': datetime.date(2023, 3, 24), 'dt': datetime.datetime(2032, 4, 23, 10, 20, 30, 400000, tzinfo=TzInfo(+02:30)), 't': datetime.time(4, 8, 16), 'td': datetime.timedelta(days=3, seconds=45005)} +""" +``` ### Booleans @@ -362,13 +706,47 @@ A standard `bool` field will raise a `ValidationError` if the value is not one o Here is a script demonstrating some of these behaviors: -{!.tmp_examples/types_boolean.md!} +```py +from pydantic import BaseModel, ValidationError + + +class BooleanModel(BaseModel): + bool_value: bool + + +print(BooleanModel(bool_value=False)) +#> bool_value=False +print(BooleanModel(bool_value='False')) +#> bool_value=False +try: + BooleanModel(bool_value=[]) +except ValidationError as e: + print(str(e)) + """ + 1 validation error for BooleanModel + bool_value + Input should be a valid boolean [type=bool_type, input_value=[], input_type=list] + """ +``` ### Callable Fields can also be of type `Callable`: -{!.tmp_examples/types_callable.md!} +```py +from typing import Callable + +from pydantic import BaseModel + + +class Foo(BaseModel): + callback: Callable[[int], int] + + +m = Foo(callback=lambda x: x) +print(m) +#> callback= at 0x0123456789ab> +``` !!! warning Callable fields only perform a simple check that the argument is @@ -380,17 +758,97 @@ Fields can also be of type `Callable`: *pydantic* supports the use of `Type[T]` to specify that a field may only accept classes (not instances) that are subclasses of `T`. -{!.tmp_examples/types_type.md!} +```py +from typing import Type + +from pydantic import BaseModel, ValidationError + + +class Foo: + pass + + +class Bar(Foo): + pass + + +class Other: + pass + + +class SimpleModel(BaseModel): + just_subclasses: Type[Foo] + + +SimpleModel(just_subclasses=Foo) +SimpleModel(just_subclasses=Bar) +try: + SimpleModel(just_subclasses=Other) +except ValidationError as e: + print(e) + """ + 1 validation error for SimpleModel + just_subclasses + Input should be a subclass of Foo [type=is_subclass_of, input_value=, input_type=type] + """ +``` You may also use `Type` to specify that any class is allowed. -{!.tmp_examples/types_bare_type.md!} +```py upgrade="skip" +from typing import Type + +from pydantic import BaseModel, ValidationError + + +class Foo: + pass + + +class LenientSimpleModel(BaseModel): + any_class_goes: Type + + +LenientSimpleModel(any_class_goes=int) +LenientSimpleModel(any_class_goes=Foo) +try: + LenientSimpleModel(any_class_goes=Foo()) +except ValidationError as e: + print(e) + """ + 1 validation error for LenientSimpleModel + any_class_goes + Input should be a type [type=is_type, input_value=<__main__.Foo object at 0x0123456789ab>, input_type=Foo] + """ +``` ### TypeVar `TypeVar` is supported either unconstrained, constrained or with a bound. -{!.tmp_examples/types_typevar.md!} +```py +from typing import TypeVar + +from pydantic import BaseModel + +Foobar = TypeVar('Foobar') +BoundFloat = TypeVar('BoundFloat', bound=float) +IntStr = TypeVar('IntStr', int, str) + + +class Model(BaseModel): + a: Foobar # equivalent of ": Any" + b: BoundFloat # equivalent of ": float" + c: IntStr # equivalent of ": Union[int, str]" + + +print(Model(a=[1], b=4.2, c='x')) +#> a=[1] b=4.2 c='x' + +# a may be None +print(Model(a=None, b=1, c=1)) +#> a=None b=1.0 c=1 +``` ## Literal Type @@ -401,22 +859,140 @@ You may also use `Type` to specify that any class is allowed. *pydantic* supports the use of `typing.Literal` (or `typing_extensions.Literal` prior to Python 3.8) as a lightweight way to specify that a field may accept only specific literal values: -{!.tmp_examples/types_literal1.md!} +```py requires="3.8" +from typing import Literal + +from pydantic import BaseModel, ValidationError + + +class Pie(BaseModel): + flavor: Literal['apple', 'pumpkin'] + + +Pie(flavor='apple') +Pie(flavor='pumpkin') +try: + Pie(flavor='cherry') +except ValidationError as e: + print(str(e)) + """ + 1 validation error for Pie + flavor + Input should be 'apple' or 'pumpkin' [type=literal_error, input_value='cherry', input_type=str] + """ +``` One benefit of this field type is that it can be used to check for equality with one or more specific values without needing to declare custom validators: -{!.tmp_examples/types_literal2.md!} +```py requires="3.8" +from typing import ClassVar, List, Literal, Union + +from pydantic import BaseModel, ValidationError + + +class Cake(BaseModel): + kind: Literal['cake'] + required_utensils: ClassVar[List[str]] = ['fork', 'knife'] + + +class IceCream(BaseModel): + kind: Literal['icecream'] + required_utensils: ClassVar[List[str]] = ['spoon'] + + +class Meal(BaseModel): + dessert: Union[Cake, IceCream] + + +print(type(Meal(dessert={'kind': 'cake'}).dessert).__name__) +#> Cake +print(type(Meal(dessert={'kind': 'icecream'}).dessert).__name__) +#> IceCream +try: + Meal(dessert={'kind': 'pie'}) +except ValidationError as e: + print(str(e)) + """ + 2 validation errors for IceCream + dessert -> Cake -> kind + Input should be 'cake' [type=literal_error, input_value='pie', input_type=str] + dessert -> IceCream -> kind + Input should be 'icecream' [type=literal_error, input_value='pie', input_type=str] + """ +``` With proper ordering in an annotated `Union`, you can use this to parse types of decreasing specificity: -{!.tmp_examples/types_literal3.md!} +```py requires="3.8" +from typing import Literal, Optional, Union + +from pydantic import BaseModel + + +class Dessert(BaseModel): + kind: str + + +class Pie(Dessert): + kind: Literal['pie'] + flavor: Optional[str] + + +class ApplePie(Pie): + flavor: Literal['apple'] + + +class PumpkinPie(Pie): + flavor: Literal['pumpkin'] + + +class Meal(BaseModel): + dessert: Union[ApplePie, PumpkinPie, Pie, Dessert] + + +print(type(Meal(dessert={'kind': 'pie', 'flavor': 'apple'}).dessert).__name__) +#> ApplePie +print(type(Meal(dessert={'kind': 'pie', 'flavor': 'pumpkin'}).dessert).__name__) +#> PumpkinPie +print(type(Meal(dessert={'kind': 'pie'}).dessert).__name__) +#> Dessert +print(type(Meal(dessert={'kind': 'cake'}).dessert).__name__) +#> Dessert +``` ## Annotated Types ### NamedTuple -{!.tmp_examples/annotated_types_named_tuple.md!} +```py +from typing import NamedTuple + +from pydantic import BaseModel, ValidationError + + +class Point(NamedTuple): + x: int + y: int + + +class Model(BaseModel): + p: Point + + +print(Model(p=('1', '2'))) +#> p=Point(x=1, y=2) + +try: + Model(p=('1.3', '2')) +except ValidationError as e: + print(e) + """ + 1 validation error for Model + p -> arguments -> 0 + Input should be a valid integer, got a number with a fractional part [type=int_from_float, input_value='1.3', input_type=str] + """ +``` ### TypedDict @@ -427,7 +1003,64 @@ With proper ordering in an annotated `Union`, you can use this to parse types of We therefore recommend using [typing-extensions](https://pypi.org/project/typing-extensions/) with Python 3.8 as well. -{!.tmp_examples/annotated_types_typed_dict.md!} +```py +from typing_extensions import TypedDict + +from pydantic import BaseModel, Extra, ValidationError + + +# `total=False` means keys are non-required +class UserIdentity(TypedDict, total=False): + name: str + surname: str + + +class User(TypedDict): + identity: UserIdentity + age: int + + +class Model(BaseModel): + model_config = dict(extra=Extra.forbid) + u: User + + +print(Model(u={'identity': {'name': 'Smith', 'surname': 'John'}, 'age': '37'})) +#> u={'identity': {'name': 'Smith', 'surname': 'John'}, 'age': 37} + +print(Model(u={'identity': {'surname': 'John'}, 'age': '37'})) +#> u={'identity': {'surname': 'John'}, 'age': 37} + +print(Model(u={'identity': {}, 'age': '37'})) +#> u={'identity': {}, 'age': 37} + + +try: + Model(u={'identity': {'name': ['Smith'], 'surname': 'John'}, 'age': '24'}) +except ValidationError as e: + print(e) + """ + 1 validation error for Model + u -> identity -> name + Input should be a valid string [type=string_type, input_value=['Smith'], input_type=list] + """ + +try: + Model( + u={ + 'identity': {'name': 'Smith', 'surname': 'John'}, + 'age': '37', + 'email': 'john.smith@me.com', + } + ) +except ValidationError as e: + print(e) + """ + 1 validation error for Model + u -> email + Extra inputs are not permitted [type=extra_forbidden, input_value='john.smith@me.com', input_type=str] + """ +``` ## Pydantic Types @@ -609,12 +1242,100 @@ some nuance to this behavior, demonstrated in the examples below. > being worked on. **Good behavior:** -{!.tmp_examples/types_import_string_usage.md!} +```py +from pydantic import BaseModel, ImportString, ValidationError + + +class ImportThings(BaseModel): + obj: ImportString + + +# A string value will cause an automatic import +my_cos = ImportThings(obj='math.cos') + +# You can use the imported function as you would expect +cos_of_0 = my_cos.obj(0) +assert cos_of_0 == 1 + + +# A string whose value cannot be imported will raise an error +try: + ImportThings(obj='foo.bar') +except ValidationError as e: + print(e) + """ + 1 validation error for ImportThings + obj + Invalid python path: No module named 'foo' [type=import_error, input_value='foo.bar', input_type=str] + """ + + +# TODO sort out the module name here +# # An object defined in the current namespace can indeed be imported, +# # though you should probably avoid doing this (since the ordering of declaration +# # can have an impact on behavior). +# class Foo: +# bar = 1 +# +# +# # This now works +# my_foo = ImportThings(obj=Foo) +# # So does this +# my_foo_2 = ImportThings(obj='__main__.Foo') + + +# Actual python objects can be assigned as well +from math import cos # noqa: E402 + +my_cos = ImportThings(obj=cos) +my_cos_2 = ImportThings(obj='math.cos') +assert my_cos == my_cos_2 +``` **Serializing an `ImportString` type to json is possible with a [custom encoder](exporting_models.md#json_encoders) which accounts for the evaluated object:** -{!.tmp_examples/types_import_string_serialization.md!} +```py test="xfail - replace json_encoders" +from types import BuiltinFunctionType + +from pydantic import BaseModel, ImportString + + +# The following class will not successfully serialize to JSON +# Since "obj" is evaluated to an object, not a pydantic `ImportString` +class WithCustomEncodersBad(BaseModel): + obj: ImportString + + class Config: + json_encoders = {ImportString: lambda x: str(x)} + + +# Create an instance +m = WithCustomEncodersBad(obj='math.cos') + +try: + m.json() +except TypeError as e: + print(e) + +# Let's do some sanity checks to verify that m.obj is not an "ImportString" +print(isinstance(m.obj, ImportString)) +print(isinstance(m.obj, BuiltinFunctionType)) + + +# So now that we know that after an ImportString is evaluated by Pydantic +# it results in its underlying object, we can configure our json encoder +# to account for those specific types +class WithCustomEncodersGood(BaseModel): + obj: ImportString + + class Config: + json_encoders = {BuiltinFunctionType: lambda x: str(x)} + + +m = WithCustomEncodersGood(obj='math.cos') +print(m.json()) +``` ### URLs @@ -671,7 +1392,38 @@ For URI/URL validation the following types are available: The above types (which all inherit from `AnyUrl`) will attempt to give descriptive errors when invalid URLs are provided: -{!.tmp_examples/types_urls.md!} +```py +from pydantic import BaseModel, HttpUrl, ValidationError + + +class MyModel(BaseModel): + url: HttpUrl + + +m = MyModel(url='http://www.example.com') +print(m.url) +#> http://www.example.com/ + +try: + MyModel(url='ftp://invalid.url') +except ValidationError as e: + print(e) + """ + 1 validation error for MyModel + url + URL scheme should be 'http' or 'https' [type=url_scheme, input_value='ftp://invalid.url', input_type=str] + """ + +try: + MyModel(url='not a url') +except ValidationError as e: + print(e) + """ + 1 validation error for MyModel + url + Input should be a valid URL, relative URL without a base [type=url_parsing, input_value='not a url', input_type=str] + """ +``` If you require a custom URI/URL type, it can be created in a similar way to the types defined above. @@ -701,7 +1453,47 @@ the above types export the following properties: If further validation is required, these properties can be used by validators to enforce specific behaviour: -{!.tmp_examples/types_url_properties.md!} +```py +from pydantic import BaseModel, HttpUrl, PostgresDsn, ValidationError, field_validator + + +class MyModel(BaseModel): + url: HttpUrl + + +m = MyModel(url='http://www.example.com') + +# the repr() method for a url will display all properties of the url +print(repr(m.url)) +#> Url('http://www.example.com/') +print(m.url.scheme) +#> http +print(m.url.host) +#> www.example.com +print(m.url.port) +#> 80 + + +class MyDatabaseModel(BaseModel): + db: PostgresDsn + + @field_validator('db') + def check_db_name(cls, v): + assert v.path and len(v.path) > 1, 'database must be provided' + return v + + +m = MyDatabaseModel(db='postgres://user:pass@localhost:5432/foobar') +print(m.db) +#> postgres://user:pass@localhost:5432/foobar + +try: + MyDatabaseModel(db='postgres://user:pass@localhost:5432') +except ValidationError: + pass + # TODO the error output here is wrong! + # print(e) +``` #### International Domains @@ -709,7 +1501,24 @@ If further validation is required, these properties can be used by validators to [punycode](https://en.wikipedia.org/wiki/Punycode) (see [this article](https://www.xudongz.com/blog/2017/idn-phishing/) for a good description of why this is important): -{!.tmp_examples/types_url_punycode.md!} +```py +from pydantic import BaseModel, HttpUrl + + +class MyModel(BaseModel): + url: HttpUrl + + +m1 = MyModel(url='http://puny£code.com') +print(m1.url) +#> http://xn--punycode-eja.com/ +m2 = MyModel(url='https://www.аррӏе.com/') +print(m2.url) +#> https://www.xn--80ak6aa92e.com/ +m3 = MyModel(url='https://www.example.珠宝/') +print(m3.url) +#> https://www.example.xn--pbt977c/ +``` !!! warning @@ -744,7 +1553,40 @@ You can use the `Color` data type for storing colors as per - [HSL strings](https://developer.mozilla.org/en-US/docs/Web/CSS/color_value#HSL_colors) (e.g. `"hsl(270, 60%, 70%)"`, `"hsl(270, 60%, 70%, .5)"`) -{!.tmp_examples/types_color.md!} +```py +from pydantic import BaseModel, ValidationError +from pydantic.color import Color + +c = Color('ff00ff') +print(c.as_named()) +#> magenta +print(c.as_hex()) +#> #f0f +c2 = Color('green') +print(c2.as_rgb_tuple()) +#> (0, 128, 0) +print(c2.original()) +#> green +print(repr(Color('hsl(180, 100%, 50%)'))) +#> Color('cyan', rgb=(0, 255, 255)) + + +class Model(BaseModel): + color: Color + + +print(Model(color='purple')) +#> color=Color('purple', rgb=(128, 0, 128)) +try: + Model(color='hello') +except ValidationError as e: + print(e) + """ + 1 validation error for Model + color + value is not a valid color: string not recognised as a valid color [type=color_error, input_value='hello', input_type=str] + """ +``` `Color` has the following methods: @@ -790,7 +1632,56 @@ that you do not want to be visible in logging or tracebacks. `SecretStr` and `SecretBytes` can be initialized idempotently or by using `str` or `bytes` literals respectively. The `SecretStr` and `SecretBytes` will be formatted as either `'**********'` or `''` on conversion to json. -{!.tmp_examples/types_secret_types.md!} +```py test="xfail - replace json_encoders" +from pydantic import BaseModel, SecretBytes, SecretStr, ValidationError + + +class SimpleModel(BaseModel): + password: SecretStr + password_bytes: SecretBytes + + +sm = SimpleModel(password='IAmSensitive', password_bytes=b'IAmSensitiveBytes') + +# Standard access methods will not display the secret +print(sm) +print(sm.password) +print(sm.model_dump()) +print(sm.model_dump_json()) + +# Use get_secret_value method to see the secret's content. +print(sm.password.get_secret_value()) +print(sm.password_bytes.get_secret_value()) + +try: + SimpleModel(password=[1, 2, 3], password_bytes=[1, 2, 3]) +except ValidationError as e: + print(e) + + +# If you want the secret to be dumped as plain-text using the json method, +# you can use json_encoders in the Config class. +class SimpleModelDumpable(BaseModel): + password: SecretStr + password_bytes: SecretBytes + + class Config: + json_encoders = { + SecretStr: lambda v: v.get_secret_value() if v else None, + SecretBytes: lambda v: v.get_secret_value() if v else None, + } + + +sm2 = SimpleModelDumpable(password='IAmSensitive', password_bytes=b'IAmSensitiveBytes') + +# Standard access methods will not display the secret +print(sm2) +print(sm2.password) +print(sm2.model_dump()) + +# But the json method will +print(sm2.model_dump_json()) +``` ### Json Type @@ -798,14 +1689,94 @@ You can use `Json` data type to make *pydantic* first load a raw JSON string. It can also optionally be used to parse the loaded object into another type base on the type `Json` is parameterised with: -{!.tmp_examples/types_json_type.md!} +```py +from typing import Any, List + +from pydantic import BaseModel, Json, ValidationError + + +class AnyJsonModel(BaseModel): + json_obj: Json[Any] + + +class ConstrainedJsonModel(BaseModel): + json_obj: Json[List[int]] + + +print(AnyJsonModel(json_obj='{"b": 1}')) +#> json_obj={'b': 1} +print(ConstrainedJsonModel(json_obj='[1, 2, 3]')) +#> json_obj=[1, 2, 3] +try: + ConstrainedJsonModel(json_obj=12) +except ValidationError as e: + print(e) + """ + 1 validation error for ConstrainedJsonModel + json_obj + JSON input should be string, bytes or bytearray [type=json_type, input_value=12, input_type=int] + """ + +try: + ConstrainedJsonModel(json_obj='[a, b]') +except ValidationError as e: + print(e) + """ + 1 validation error for ConstrainedJsonModel + json_obj + Invalid JSON: expected value at line 1 column 2 [type=json_invalid, input_value='[a, b]', input_type=str] + """ + +try: + ConstrainedJsonModel(json_obj='["a", "b"]') +except ValidationError as e: + print(e) + """ + 2 validation errors for ConstrainedJsonModel + json_obj -> 0 + Input should be a valid integer, unable to parse string as an integer [type=int_parsing, input_value='a', input_type=str] + json_obj -> 1 + Input should be a valid integer, unable to parse string as an integer [type=int_parsing, input_value='b', input_type=str] + """ +``` ### Payment Card Numbers The `PaymentCardNumber` type validates [payment cards](https://en.wikipedia.org/wiki/Payment_card) (such as a debit or credit card). -{!.tmp_examples/types_payment_card_number.md!} +```py +from datetime import date + +from pydantic import BaseModel +from pydantic.types import PaymentCardBrand, PaymentCardNumber, constr + + +class Card(BaseModel): + name: constr(strip_whitespace=True, min_length=1) + number: PaymentCardNumber + exp: date + + @property + def brand(self) -> PaymentCardBrand: + return self.number.brand + + @property + def expired(self) -> bool: + return self.exp < date.today() + + +card = Card( + name='Georg Wilhelm Friedrich Hegel', + number='4000000000000002', + exp=date(2023, 9, 30), +) + +assert card.number.brand == PaymentCardBrand.visa +assert card.number.bin == '400000' +assert card.number.last4 == '0002' +assert card.number.masked == '400000******0002' +``` `PaymentCardBrand` can be one of the following based on the BIN: @@ -825,7 +1796,65 @@ The actual validation verifies the card number is: The value of numerous common types can be restricted using `con*` type functions: -{!.tmp_examples/types_constrained.md!} +```py +from decimal import Decimal + +from pydantic import ( + BaseModel, + Field, + NegativeFloat, + NegativeInt, + NonNegativeFloat, + NonNegativeInt, + NonPositiveFloat, + NonPositiveInt, + PositiveFloat, + PositiveInt, + conbytes, + condecimal, + confloat, + conint, + conlist, + conset, + constr, +) + + +class Model(BaseModel): + short_bytes: conbytes(min_length=2, max_length=10) + strict_bytes: conbytes(strict=True) + + upper_str: constr(to_upper=True) + lower_str: constr(to_lower=True) + short_str: constr(min_length=2, max_length=10) + regex_str: constr(pattern=r'^apple (pie|tart|sandwich)$') + strip_str: constr(strip_whitespace=True) + + big_int: conint(gt=1000, lt=1024) + mod_int: conint(multiple_of=5) + pos_int: PositiveInt + neg_int: NegativeInt + non_neg_int: NonNegativeInt + non_pos_int: NonPositiveInt + + big_float: confloat(gt=1000, lt=1024) + unit_interval: confloat(ge=0, le=1) + mod_float: confloat(multiple_of=0.5) + pos_float: PositiveFloat + neg_float: NegativeFloat + non_neg_float: NonNegativeFloat + non_pos_float: NonPositiveFloat + + short_list: conlist(int, min_length=1, max_length=4) + short_set: conset(int, min_length=1, max_length=4) + + decimal_positive: condecimal(gt=0) + decimal_negative: condecimal(lt=0) + decimal_max_digits_and_places: condecimal(max_digits=2, decimal_places=2) + mod_decimal: condecimal(multiple_of=Decimal('0.25')) + + bigger_int: int = Field(..., gt=10000) +``` Where `Field` refers to the [field function](schema.md#field-customization). @@ -932,7 +1961,81 @@ The following caveats apply: even though `bool` is a subclass of `int` in Python. Other subclasses will work. - `StrictFloat` (and the `strict` option of `ConstrainedFloat`) will not accept `int`. -{!.tmp_examples/types_strict.md!} +```py +from pydantic import ( + BaseModel, + StrictBool, + StrictBytes, + StrictInt, + ValidationError, + confloat, +) + + +class StrictBytesModel(BaseModel): + strict_bytes: StrictBytes + + +try: + StrictBytesModel(strict_bytes='hello world') +except ValidationError as e: + print(e) + """ + 1 validation error for StrictBytesModel + strict_bytes + Input should be a valid bytes [type=bytes_type, input_value='hello world', input_type=str] + """ + + +class StrictIntModel(BaseModel): + strict_int: StrictInt + + +try: + StrictIntModel(strict_int=3.14159) +except ValidationError as e: + print(e) + """ + 1 validation error for StrictIntModel + strict_int + Input should be a valid integer [type=int_type, input_value=3.14159, input_type=float] + """ + + +class ConstrainedFloatModel(BaseModel): + constrained_float: confloat(strict=True, ge=0.0) + + +try: + ConstrainedFloatModel(constrained_float=3) +except ValidationError as e: + print(e) + +try: + ConstrainedFloatModel(constrained_float=-1.23) +except ValidationError as e: + print(e) + """ + 1 validation error for ConstrainedFloatModel + constrained_float + Input should be greater than or equal to 0 [type=greater_than_equal, input_value=-1.23, input_type=float] + """ + + +class StrictBoolModel(BaseModel): + strict_bool: StrictBool + + +try: + StrictBoolModel(strict_bool='False') +except ValidationError as e: + print(str(e)) + """ + 1 validation error for StrictBoolModel + strict_bool + Input should be a valid boolean [type=bool_type, input_value='False', input_type=str] + """ +``` ## ByteSize @@ -942,7 +2045,28 @@ raw bytes and print out human readable versions of the bytes as well. !!! info Note that `1b` will be parsed as "1 byte" and not "1 bit". -{!.tmp_examples/types_bytesize.md!} +```py +from pydantic import BaseModel, ByteSize + + +class MyModel(BaseModel): + size: ByteSize + + +print(MyModel(size=52000).size) +#> 52000 +print(MyModel(size='3000 KiB').size) +#> 3072000 + +m = MyModel(size='50 PB') +print(m.size.human_readable()) +#> 44.4PiB +print(m.size.human_readable(decimal=True)) +#> 50.0PB + +print(m.size.to('TiB')) +#> 45474.73508864641 +``` ## Custom Data Types @@ -957,7 +2081,77 @@ to get validators to parse and validate the input data. These validators have the same semantics as in [Validators](validators.md), you can declare a parameter `config`, `field`, etc. -{!.tmp_examples/types_custom_type.md!} +```py test="xfail - replace with Annoated[str, PostCodeLogic]" +import re + +from pydantic import BaseModel + +# https://en.wikipedia.org/wiki/Postcodes_in_the_United_Kingdom#Validation +post_code_regex = re.compile( + r'(?:' + r'([A-Z]{1,2}[0-9][A-Z0-9]?|ASCN|STHL|TDCU|BBND|[BFS]IQQ|PCRN|TKCA) ?' + r'([0-9][A-Z]{2})|' + r'(BFPO) ?([0-9]{1,4})|' + r'(KY[0-9]|MSR|VG|AI)[ -]?[0-9]{4}|' + r'([A-Z]{2}) ?([0-9]{2})|' + r'(GE) ?(CX)|' + r'(GIR) ?(0A{2})|' + r'(SAN) ?(TA1)' + r')' +) + + +class PostCode(str): + """ + Partial UK postcode validation. Note: this is just an example, and is not + intended for use in production; in particular this does NOT guarantee + a postcode exists, just that it has a valid format. + """ + + @classmethod + def __get_validators__(cls): + # one or more validators may be yielded which will be called in the + # order to validate the input, each validator will receive as an input + # the value returned from the previous validator + yield cls.validate + + @classmethod + def __pydantic_modify_json_schema__(cls, field_schema): + # __pydantic_modify_json_schema__ should mutate the dict it receives + # in place, the returned value will be ignored + field_schema.update( + # simplified regex here for brevity, see the wikipedia link above + pattern='^[A-Z]{1,2}[0-9][A-Z0-9]? ?[0-9][A-Z]{2}$', + # some example postcodes + examples=['SP11 9DG', 'w1j7bu'], + ) + + @classmethod + def validate(cls, v): + if not isinstance(v, str): + raise TypeError('string required') + m = post_code_regex.fullmatch(v.upper()) + if not m: + raise ValueError('invalid postcode format') + # you could also return a string here which would mean model.post_code + # would be a string, pydantic won't care but you could end up with some + # confusion since the value's type won't match the type annotation + # exactly + return cls(f'{m.group(1)} {m.group(2)}') + + def __repr__(self): + return f'PostCode({super().__repr__()})' + + +class Model(BaseModel): + post_code: PostCode + + +model = Model(post_code='sw8 5el') +print(model) +print(model.post_code) +print(Model.model_json_schema()) +``` Similar validation could be achieved using [`constr(regex=...)`](#constrained-types) except the value won't be formatted with a space, the schema would just include the full pattern and the returned value would be a vanilla string. @@ -969,14 +2163,108 @@ See [schema](schema.md) for more details on how the model's schema is generated. You can allow arbitrary types using the `arbitrary_types_allowed` config in the [Model Config](model_config.md). -{!.tmp_examples/types_arbitrary_allowed.md!} +```py +from pydantic import BaseModel, ValidationError + + +# This is not a pydantic model, it's an arbitrary class +class Pet: + def __init__(self, name: str): + self.name = name + + +class Model(BaseModel): + model_config = dict(arbitrary_types_allowed=True) + pet: Pet + owner: str + + +pet = Pet(name='Hedwig') +# A simple check of instance type is used to validate the data +model = Model(owner='Harry', pet=pet) +print(model) +#> pet=<__main__.Pet object at 0x0123456789ab> owner='Harry' +print(model.pet) +#> <__main__.Pet object at 0x0123456789ab> +print(model.pet.name) +#> Hedwig +print(type(model.pet)) +#> +try: + # If the value is not an instance of the type, it's invalid + Model(owner='Harry', pet='Hedwig') +except ValidationError as e: + print(e) + """ + 1 validation error for Model + pet + Input should be an instance of Pet [type=is_instance_of, input_value='Hedwig', input_type=str] + """ +# Nothing in the instance of the arbitrary type is checked +# Here name probably should have been a str, but it's not validated +pet2 = Pet(name=42) +model2 = Model(owner='Harry', pet=pet2) +print(model2) +#> pet=<__main__.Pet object at 0x0123456789ab> owner='Harry' +print(model2.pet) +#> <__main__.Pet object at 0x0123456789ab> +print(model2.pet.name) +#> 42 +print(type(model2.pet)) +#> +``` ### Undefined Types Warning You can suppress the Undefined Types Warning by setting `undefined_types_warning` to `False` in the [Model Config](model_config.md). -{!.tmp_examples/types_undefined_warning.md!} +```py test="xfail - what do we do with undefined_types_warning?" +from __future__ import annotations + +from pydantic import BaseModel + +# This example shows how Book and Person types reference each other. +# We will demonstrate how to suppress the undefined types warning +# when define such models. + + +class Book(BaseModel): + title: str + author: Person # note the `Person` type is not yet defined + + # Suppress undefined types warning so we can continue defining our models. + class Config: + undefined_types_warning = False + + +class Person(BaseModel): + name: str + books_read: list[Book] | None = None + + +# Now, we can rebuild the `Book` model, since the `Person` model is now defined. +# Note: there's no need to call `model_rebuild()` on `Person`, +# it's already complete. +Book.model_rebuild() + +# Let's create some instances of our models, to demonstrate that they work. +python_crash_course = Book( + title='Python Crash Course', + author=Person(name='Eric Matthes'), +) +jane_doe = Person(name='Jane Doe', books_read=[python_crash_course]) + +assert jane_doe.dict(exclude_unset=True) == { + 'name': 'Jane Doe', + 'books_read': [ + { + 'title': 'Python Crash Course', + 'author': {'name': 'Eric Matthes'}, + }, + ], +} +``` ### Generic Classes as Types @@ -995,4 +2283,89 @@ If the Generic class that you are using as a sub-type has a classmethod Because you can declare validators that receive the current `field`, you can extract the `sub_fields` (from the generic class type parameters) and validate data with them. -{!.tmp_examples/types_generics.md!} +```py test="xfail - what do we do with generic custom types" +from typing import Generic, TypeVar + +from pydantic import BaseModel, ValidationError +from pydantic.fields import ModelField + +AgedType = TypeVar('AgedType') +QualityType = TypeVar('QualityType') + + +# This is not a pydantic model, it's an arbitrary generic class +class TastingModel(Generic[AgedType, QualityType]): + def __init__(self, name: str, aged: AgedType, quality: QualityType): + self.name = name + self.aged = aged + self.quality = quality + + @classmethod + def __get_validators__(cls): + yield cls.validate + + @classmethod + # You don't need to add the "ModelField", but it will help your + # editor give you completion and catch errors + def validate(cls, v, field: ModelField): + if not isinstance(v, cls): + # The value is not even a TastingModel + raise TypeError('Invalid value') + if not field.sub_fields: + # Generic parameters were not provided so we don't try to validate + # them and just return the value as is + return v + aged_f = field.sub_fields[0] + quality_f = field.sub_fields[1] + errors = [] + # Here we don't need the validated value, but we want the errors + valid_value, error = aged_f.validate(v.aged, {}, loc='aged') + if error: + errors.append(error) + # Here we don't need the validated value, but we want the errors + valid_value, error = quality_f.validate(v.quality, {}, loc='quality') + if error: + errors.append(error) + if errors: + raise ValidationError(errors, cls) + # Validation passed without errors, return the same instance received + return v + + +class Model(BaseModel): + # for wine, "aged" is an int with years, "quality" is a float + wine: TastingModel[int, float] + # for cheese, "aged" is a bool, "quality" is a str + cheese: TastingModel[bool, str] + # for thing, "aged" is a Any, "quality" is Any + thing: TastingModel + + +model = Model( + # This wine was aged for 20 years and has a quality of 85.6 + wine=TastingModel(name='Cabernet Sauvignon', aged=20, quality=85.6), + # This cheese is aged (is mature) and has "Good" quality + cheese=TastingModel(name='Gouda', aged=True, quality='Good'), + # This Python thing has aged "Not much" and has a quality "Awesome" + thing=TastingModel(name='Python', aged='Not much', quality='Awesome'), +) +print(model) +print(model.wine.aged) +print(model.wine.quality) +print(model.cheese.aged) +print(model.cheese.quality) +print(model.thing.aged) +try: + # If the values of the sub-types are invalid, we get an error + Model( + # For wine, aged should be an int with the years, and quality a float + wine=TastingModel(name='Merlot', aged=True, quality='Kinda good'), + # For cheese, aged should be a bool, and quality a str + cheese=TastingModel(name='Gouda', aged='yeah', quality=5), + # For thing, no type parameters are declared, and we skipped validation + # in those cases in the Assessment.validate() function + thing=TastingModel(name='Python', aged='Not much', quality='Awesome'), + ) +except ValidationError as e: + print(e) +``` diff --git a/docs/usage/validation_decorator.md b/docs/usage/validation_decorator.md index d0dc1474b7..abbf6f89ee 100644 --- a/docs/usage/validation_decorator.md +++ b/docs/usage/validation_decorator.md @@ -11,16 +11,66 @@ boilerplate. Example of usage: -{!.tmp_examples/validation_decorator_main.md!} +```py +from pydantic import ValidationError, validate_arguments + + +@validate_arguments +def repeat(s: str, count: int, *, separator: bytes = b'') -> bytes: + b = s.encode() + return separator.join(b for _ in range(count)) + + +a = repeat('hello', 3) +print(a) +#> b'hellohellohello' + +b = repeat('x', '4', separator=' ') +print(b) +#> b'x x x x' + +try: + c = repeat('hello', 'wrong') +except ValidationError as exc: + print(exc) + """ + 1 validation error for Repeat + count + Input should be a valid integer, unable to parse string as an integer [type=int_parsing, input_value='wrong', input_type=str] + """ +``` ## Argument Types Argument types are inferred from type annotations on the function, arguments without a type decorator are considered -as `Any`. Since `validate_arguments` internally uses a standard `BaseModel`, all types listed in -[types](types.md) can be validated, including *pydantic* models and [custom types](types.md#custom-data-types). +as `Any`. All types listed in [types](types.md) can be validated, including *pydantic* models and +[custom types](types.md#custom-data-types). As with the rest of *pydantic*, types can be coerced by the decorator before they're passed to the actual function: -{!.tmp_examples/validation_decorator_types.md!} +```py test="no-print-intercept" +# TODO replace find_file with something that isn't affected the filesystem +import os +from pathlib import Path +from typing import Optional, Pattern + +from pydantic import DirectoryPath, validate_arguments + + +@validate_arguments +def find_file(path: DirectoryPath, regex: Pattern, max=None) -> Optional[Path]: + for i, f in enumerate(path.glob('**/*')): + if max and i > max: + return + if f.is_file() and regex.fullmatch(str(f.relative_to(path))): + return f + + +# note: this_dir is a string here +this_dir = os.path.dirname(__file__) + +print(find_file(this_dir, '^validation.*')) +print(find_file(this_dir, '^foobar.*', max=3)) +``` A few notes: @@ -44,7 +94,90 @@ combinations of these: To demonstrate all the above parameter types: -{!.tmp_examples/validation_decorator_parameter_types.md!} +```py requires="3.8" +from pydantic import validate_arguments + + +@validate_arguments +def pos_or_kw(a: int, b: int = 2) -> str: + return f'a={a} b={b}' + + +print(pos_or_kw(1)) +#> a=1 b=2 +print(pos_or_kw(a=1)) +#> a=1 b=2 +print(pos_or_kw(1, 3)) +#> a=1 b=3 +print(pos_or_kw(a=1, b=3)) +#> a=1 b=3 + + +@validate_arguments +def kw_only(*, a: int, b: int = 2) -> str: + return f'a={a} b={b}' + + +print(kw_only(a=1)) +#> a=1 b=2 +print(kw_only(a=1, b=3)) +#> a=1 b=3 + + +@validate_arguments +def pos_only(a: int, b: int = 2, /) -> str: # python 3.8 only + return f'a={a} b={b}' + + +print(pos_only(1)) +#> a=1 b=2 +print(pos_only(1, 2)) +#> a=1 b=2 + + +@validate_arguments +def var_args(*args: int) -> str: + return str(args) + + +print(var_args(1)) +#> (1,) +print(var_args(1, 2)) +#> (1, 2) +print(var_args(1, 2, 3)) +#> (1, 2, 3) + + +@validate_arguments +def var_kwargs(**kwargs: int) -> str: + return str(kwargs) + + +print(var_kwargs(a=1)) +#> {'a': 1} +print(var_kwargs(a=1, b=2)) +#> {'a': 1, 'b': 2} + + +@validate_arguments +def armageddon( + a: int, + /, # python 3.8 only + b: int, + c: int = None, + *d: int, + e: int, + f: int = None, + **g: int, +) -> str: + return f'a={a} b={b} c={c} d={d} e={e} f={f} g={g}' + + +print(armageddon(1, 2, e=3)) +#> a=1 b=2 c=None d=() e=3 f=None g={} +print(armageddon(1, 2, 3, 4, 5, 6, e=8, f=9, g=10, spam=11)) +#> a=1 b=2 c=3 d=(4, 5, 6) e=8 f=9 g={'g': 10, 'spam': 11} +``` ## Using Field to describe function arguments @@ -53,11 +186,54 @@ the field and validations. In general it should be used in a type hint with [Annotated](schema.md#typingannotated-fields), unless `default_factory` is specified, in which case it should be used as the default value of the field: -{!.tmp_examples/validation_decorator_field.md!} +```py +from datetime import datetime + +from typing_extensions import Annotated + +from pydantic import Field, ValidationError, validate_arguments + + +@validate_arguments +def how_many(num: Annotated[int, Field(gt=10)]): + return num + + +try: + how_many(1) +except ValidationError as e: + print(e) + """ + 1 validation error for HowMany + num + Input should be greater than 10 [type=greater_than, input_value=1, input_type=int] + """ + + +@validate_arguments +def when(dt: datetime = Field(default_factory=datetime.now)): + return dt + + +print(type(when())) +#> +``` The [alias](model_config.md#alias-precedence) can be used with the decorator as normal. -{!.tmp_examples/validation_decorator_field_alias.md!} +```py +from typing_extensions import Annotated + +from pydantic import Field, validate_arguments + + +@validate_arguments +def how_many(num: Annotated[int, Field(gt=10, alias='number')]): + return num + + +how_many(number=42) +``` ## Usage with mypy @@ -73,20 +249,107 @@ By default, arguments validation is done by directly calling the decorated funct But what if you wanted to validate them without *actually* calling the function? To do that you can call the `validate` method bound to the decorated function. -{!.tmp_examples/validation_decorator_validate.md!} +```py +from pydantic import ValidationError, validate_arguments + + +@validate_arguments +def slow_sum(a: int, b: int) -> int: + print(f'Called with a={a}, b={b}') + #> Called with a=1, b=1 + return a + b + + +slow_sum(1, 1) + +slow_sum.validate(2, 2) + +try: + slow_sum.validate(1, 'b') +except ValidationError as exc: + print(exc) + """ + 1 validation error for SlowSum + b + Input should be a valid integer, unable to parse string as an integer [type=int_parsing, input_value='b', input_type=str] + """ +``` ## Raw function The raw function which was decorated is accessible, this is useful if in some scenarios you trust your input arguments and want to call the function in the most performant way (see [notes on performance](#performance) below): -{!.tmp_examples/validation_decorator_raw_function.md!} +```py +from pydantic import validate_arguments + + +@validate_arguments +def repeat(s: str, count: int, *, separator: bytes = b'') -> bytes: + b = s.encode() + return separator.join(b for _ in range(count)) + + +a = repeat('hello', 3) +print(a) +#> b'hellohellohello' + +b = repeat.raw_function('good bye', 2, separator=b', ') +print(b) +#> b'good bye, good bye' +``` ## Async Functions `validate_arguments` can also be used on async functions: -{!.tmp_examples/validation_decorator_async.md!} +```py +class Connection: + async def execute(self, sql, *args): + return 'testing@example.com' + + +conn = Connection() +# ignore-above +import asyncio + +from pydantic import PositiveInt, ValidationError, validate_arguments + + +@validate_arguments +async def get_user_email(user_id: PositiveInt): + # `conn` is some fictional connection to a database + email = await conn.execute('select email from users where id=$1', user_id) + if email is None: + raise RuntimeError('user not found') + else: + return email + + +async def main(): + email = await get_user_email(123) + print(email) + #> testing@example.com + try: + await get_user_email(-4) + except ValidationError as exc: + print(exc.errors()) + """ + [ + { + 'type': 'greater_than', + 'loc': ('user_id',), + 'msg': 'Input should be greater than 0', + 'input': -4, + 'ctx': {'gt': 0}, + } + ] + """ + + +asyncio.run(main()) +# requires: `conn.execute()` that will return `'testing@example.com'` +``` ## Custom Config @@ -100,7 +363,42 @@ setting the `Config` sub-class in normal models. Configuration is set using the `config` keyword argument to the decorator, it may be either a config class or a dict of properties which are converted to a class later. -{!.tmp_examples/validation_decorator_config.md!} +```py +from pydantic import ValidationError, validate_arguments + + +class Foobar: + def __init__(self, v: str): + self.v = v + + def __add__(self, other: 'Foobar') -> str: + return f'{self} + {other}' + + def __str__(self) -> str: + return f'Foobar({self.v})' + + +@validate_arguments(config=dict(arbitrary_types_allowed=True)) +def add_foobars(a: Foobar, b: Foobar): + return a + b + + +c = add_foobars(Foobar('a'), Foobar('b')) +print(c) +#> Foobar(a) + Foobar(b) + +try: + add_foobars(1, 2) +except ValidationError as e: + print(e) + """ + 2 validation errors for AddFoobars + a + Input should be an instance of Foobar [type=is_instance_of, input_value=1, input_type=int] + b + Input should be an instance of Foobar [type=is_instance_of, input_value=2, input_type=int] + """ +``` ## Limitations diff --git a/docs/usage/validators.md b/docs/usage/validators.md index 2fc75f5c7e..c6142ce972 100644 --- a/docs/usage/validators.md +++ b/docs/usage/validators.md @@ -1,6 +1,61 @@ Custom validation and complex relationships between objects can be achieved using the `validator` decorator. -{!.tmp_examples/validators_simple.md!} +```py +from pydantic_core.core_schema import FieldValidationInfo + +from pydantic import BaseModel, ValidationError, field_validator + + +class UserModel(BaseModel): + name: str + username: str + password1: str + password2: str + + @field_validator('name') + def name_must_contain_space(cls, v): + if ' ' not in v: + raise ValueError('must contain a space') + return v.title() + + @field_validator('password2') + def passwords_match(cls, v, info: FieldValidationInfo): + if 'password1' in info.data and v != info.data['password1']: + raise ValueError('passwords do not match') + return v + + @field_validator('username') + def username_alphanumeric(cls, v): + assert v.isalnum(), 'must be alphanumeric' + return v + + +user = UserModel( + name='samuel colvin', + username='scolvin', + password1='zxcvbn', + password2='zxcvbn', +) +print(user) +#> name='Samuel Colvin' username='scolvin' password1='zxcvbn' password2='zxcvbn' + +try: + UserModel( + name='samuel', + username='scolvin', + password1='zxcvbn', + password2='zxcvbn2', + ) +except ValidationError as e: + print(e) + """ + 2 validation errors for UserModel + name + Value error, must contain a space [type=value_error, input_value='samuel', input_type=str] + password2 + Value error, passwords do not match [type=value_error, input_value='zxcvbn2', input_type=str] + """ +``` A few things to note on validators: @@ -34,7 +89,54 @@ A few things to note on validators: Validators can do a few more complex things: -{!.tmp_examples/validators_pre_item.md!} +```py test="xfail - we need annotated validators for this examples" +from typing import List + +from pydantic import BaseModel, ValidationError, field_validator + + +class DemoModel(BaseModel): + square_numbers: List[int] = [] + cube_numbers: List[int] = [] + + @field_validator('square_numbers', 'cube_numbers', mode='before') + def split_str(cls, v): + if isinstance(v, str): + return v.split('|') + return v + + @field_validator('cube_numbers', 'square_numbers') + def check_sum(cls, v): + if sum(v) > 42: + raise ValueError('sum of numbers greater than 42') + return v + + @field_validator('square_numbers') # TODO replace with Annotated + def check_squares(cls, v): + assert v**0.5 % 1 == 0, f'{v} is not a square number' + return v + + @field_validator('cube_numbers') # TODO replace with Annotated + def check_cubes(cls, v): + # 64 ** (1 / 3) == 3.9999999999999996 (!) + # this is not a good way of checking cubes + assert v ** (1 / 3) % 1 == 0, f'{v} is not a cubed number' + return v + + +print(DemoModel(square_numbers=[1, 4, 9])) +print(DemoModel(square_numbers='1|4|16')) +print(DemoModel(square_numbers=[16], cube_numbers=[8, 27])) +try: + DemoModel(square_numbers=[1, 4, 2]) +except ValidationError as e: + print(e) + +try: + DemoModel(cube_numbers=[27, 27]) +except ValidationError as e: + print(e) +``` A few more things to note: @@ -49,7 +151,45 @@ A few more things to note: If using a validator with a subclass that references a `List` type field on a parent class, using `each_item=True` will cause the validator not to run; instead, the list must be iterated over programmatically. -{!.tmp_examples/validators_subclass_each_item.md!} +```py test="xfail - we need annotated validators for this examples" +from typing import List + +from pydantic import BaseModel, ValidationError, validator + + +class ParentModel(BaseModel): + names: List[str] + + +class ChildModel(ParentModel): + @validator('names', each_item=True) + def check_names_not_empty(cls, v): + assert v != '', 'Empty strings are not allowed.' + return v + + +# This will NOT raise a ValidationError because the validator was not called +try: + child = ChildModel(names=['Alice', 'Bob', 'Eve', '']) +except ValidationError as e: + print(e) +else: + print('No ValidationError caught.') + + +class ChildModel2(ParentModel): + @validator('names') + def check_names_not_empty(cls, v): + for name in v: + assert name != '', 'Empty strings are not allowed.' + return v + + +try: + child = ChildModel2(names=['Alice', 'Bob', 'Eve', '']) +except ValidationError as e: + print(e) +``` ## Validate Always @@ -57,7 +197,23 @@ For performance reasons, by default validators are not called for fields when a However there are situations where it may be useful or required to always call the validator, e.g. to set a dynamic default value. -{!.tmp_examples/validators_always.md!} +```py test="xfail - we need default value validation" +from datetime import datetime + +from pydantic import BaseModel, validator + + +class DemoModel(BaseModel): + ts: datetime = None + + @validator('ts', pre=True, always=True) + def set_ts_now(cls, v): + return v or datetime.now() + + +print(DemoModel()) +print(DemoModel(ts='2017-11-08T14:00')) +``` You'll often want to use this together with `pre`, since otherwise with `always=True` *pydantic* would try to validate the default `None` which would cause an error. @@ -70,7 +226,33 @@ then call it from multiple decorators. Obviously, this entails a lot of repetit boiler plate code. To circumvent this, the `allow_reuse` parameter has been added to `pydantic.validator` in **v1.2** (`False` by default): -{!.tmp_examples/validators_allow_reuse.md!} +```py +from pydantic import BaseModel, field_validator + + +def normalize(name: str) -> str: + return ' '.join((word.capitalize()) for word in name.split(' ')) + + +class Producer(BaseModel): + name: str + + # validators + normalize_name = field_validator('name', allow_reuse=True)(normalize) + + +class Consumer(BaseModel): + name: str + + # validators + normalize_name = field_validator('name', allow_reuse=True)(normalize) + + +jane_doe = Producer(name='JaNe DOE') +john_doe = Consumer(name='joHN dOe') +assert jane_doe.name == 'Jane Doe' +assert john_doe.name == 'John Doe' +``` As it is obvious, repetition has been reduced and the models become again almost declarative. @@ -84,7 +266,54 @@ declarative. Validation can also be performed on the entire model's data. -{!.tmp_examples/validators_root.md!} +```py +from pydantic import BaseModel, ValidationError, root_validator + + +class UserModel(BaseModel): + username: str + password1: str + password2: str + + @root_validator(pre=True) + def check_card_number_omitted(cls, values): + assert 'card_number' not in values, 'card_number should not be included' + return values + + @root_validator(skip_on_failure=True) + def check_passwords_match(cls, values): + pw1, pw2 = values.get('password1'), values.get('password2') + if pw1 is not None and pw2 is not None and pw1 != pw2: + raise ValueError('passwords do not match') + return values + + +print(UserModel(username='scolvin', password1='zxcvbn', password2='zxcvbn')) +#> username='scolvin' password1='zxcvbn' password2='zxcvbn' +try: + UserModel(username='scolvin', password1='zxcvbn', password2='zxcvbn2') +except ValidationError as e: + print(e) + """ + 1 validation error for UserModel + Value error, passwords do not match [type=value_error, input_value={'username': 'scolvin', '... 'password2': 'zxcvbn2'}, input_type=dict] + """ + +try: + UserModel( + username='scolvin', + password1='zxcvbn', + password2='zxcvbn', + card_number='1234', + ) +except ValidationError as e: + print(e) + """ + 1 validation error for UserModel + Assertion failed, card_number should not be included + assert 'card_number' not in {'card_number': '1234', 'password1': 'zxcvbn', 'password2': 'zxcvbn', 'username': 'scolvin'} [type=assertion_error, input_value={'username': 'scolvin', '..., 'card_number': '1234'}, input_type=dict] + """ +``` As with field validators, root validators can have `pre=True`, in which case they're called before field validation occurs (and are provided with the raw input data), or `pre=False` (the default), in which case @@ -107,4 +336,22 @@ In this case you should set `check_fields=False` on the validator. Validators also work with *pydantic* dataclasses. -{!.tmp_examples/validators_dataclass.md!} +```py test="xfail - currently decorators are messing up dataclass field defaults #5271" +from datetime import datetime + +from pydantic import field_validator +from pydantic.dataclasses import dataclass + + +@dataclass +class DemoDataclass: + ts: datetime = None + + @field_validator('ts', mode='before') + def set_ts_now(cls, v): + return v or datetime.now() + + +print(DemoDataclass()) +print(DemoDataclass(ts='2017-11-08T14:00')) +``` diff --git a/docs/visual_studio_code.md b/docs/visual_studio_code.md index d8af73835a..3d85f09929 100644 --- a/docs/visual_studio_code.md +++ b/docs/visual_studio_code.md @@ -111,8 +111,7 @@ class Quest(BaseModel): quest = Quest( - title='To seek the Holy Grail', - knight={'title': 'Sir Lancelot', 'age': 23} + title='To seek the Holy Grail', knight={'title': 'Sir Lancelot', 'age': 23} ) ``` @@ -169,6 +168,7 @@ You can also create a variable with the value you want to use and declare it's t ```Python hl_lines="1 11-12" from typing import Any + from pydantic import BaseModel @@ -196,6 +196,7 @@ This way, the type declaration of the value is overriden inline, without requiri ```Python hl_lines="1 11" from typing import Any, cast + from pydantic import BaseModel @@ -227,12 +228,10 @@ from pydantic import BaseModel class Knight(BaseModel): + model_config = dict(frozen=True) title: str age: int color: str = 'blue' - - class Config: - frozen = True ``` or passed as keyword arguments when defining the model class: @@ -267,7 +266,10 @@ from pydantic import BaseModel, Field class Knight(BaseModel): title: str = Field(default='Sir Lancelot') # this is okay - age: int = Field(23) # this works fine at runtime but will case an error for pyright + age: int = Field( + 23 + ) # this works fine at runtime but will case an error for pyright + lance = Knight() # error: Argument missing for parameter "age" ``` diff --git a/mkdocs.yml b/mkdocs.yml index a5f043363a..6e997e4da0 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -72,8 +72,6 @@ nav: markdown_extensions: - tables -- markdown_include.include: - base_path: docs - toc: permalink: true title: Page contents @@ -91,7 +89,10 @@ plugins: - search - exclude: glob: - - _build/* - - build/* - - examples/* - - requirements.txt + - plugins/* + - __pycache__/* +- mkdocs-simple-hooks: + hooks: + on_pre_build: 'docs.plugins.main:on_pre_build' + on_files: 'docs.plugins.main:on_files' + on_page_markdown: 'docs.plugins.main:on_page_markdown' diff --git a/pydantic/_internal/_generate_schema.py b/pydantic/_internal/_generate_schema.py index 5fa55ba3b7..9a1b15949c 100644 --- a/pydantic/_internal/_generate_schema.py +++ b/pydantic/_internal/_generate_schema.py @@ -180,6 +180,7 @@ def generate_config(config: ConfigDict, cls: type[Any]) -> core_schema.CoreConfi strict=config['strict'], ser_json_timedelta=config['ser_json_timedelta'], ser_json_bytes=config['ser_json_bytes'], + from_attributes=config['from_attributes'], ) str_max_length = config.get('str_max_length') if str_max_length is not None: diff --git a/requirements/docs.in b/requirements/docs.in index 348bfa49e8..dc897ab18c 100644 --- a/requirements/docs.in +++ b/requirements/docs.in @@ -1,10 +1,8 @@ autoflake -ansi2html -markdown-include mdx-truly-sane-lists mkdocs mkdocs-exclude mkdocs-material -orjson -sqlalchemy -ujson +mkdocs-simple-hooks +pyupgrade +tomli diff --git a/requirements/docs.txt b/requirements/docs.txt index e512244d82..ce6ec419f9 100644 --- a/requirements/docs.txt +++ b/requirements/docs.txt @@ -1,11 +1,9 @@ # -# This file is autogenerated by pip-compile with Python 3.10 -# by the following command: +# This file is autogenerated by pip-compile with python 3.10 +# To update, run: # # pip-compile --output-file=requirements/docs.txt --resolver=backtracking requirements/docs.in # -ansi2html==1.8.0 - # via -r requirements/docs.in autoflake==2.0.2 # via -r requirements/docs.in certifi==2022.12.7 @@ -26,13 +24,10 @@ jinja2==3.1.2 # mkdocs-material markdown==3.3.7 # via - # markdown-include # mdx-truly-sane-lists # mkdocs # mkdocs-material # pymdown-extensions -markdown-include==0.8.1 - # via -r requirements/docs.in markupsafe==2.1.2 # via jinja2 mdx-truly-sane-lists==1.3 @@ -44,13 +39,14 @@ mkdocs==1.4.2 # -r requirements/docs.in # mkdocs-exclude # mkdocs-material + # mkdocs-simple-hooks mkdocs-exclude==1.0.2 # via -r requirements/docs.in -mkdocs-material==9.1.3 +mkdocs-material==9.1.4 # via -r requirements/docs.in mkdocs-material-extensions==1.1.1 # via mkdocs-material -orjson==3.8.8 +mkdocs-simple-hooks==0.1.5 # via -r requirements/docs.in packaging==23.0 # via mkdocs @@ -62,6 +58,8 @@ pymdown-extensions==9.10 # via mkdocs-material python-dateutil==2.8.2 # via ghp-import +pyupgrade==3.3.1 + # via -r requirements/docs.in pyyaml==6.0 # via # mkdocs @@ -69,20 +67,18 @@ pyyaml==6.0 # pyyaml-env-tag pyyaml-env-tag==0.1 # via mkdocs -regex==2023.3.22 +regex==2022.10.31 # via mkdocs-material requests==2.28.2 # via mkdocs-material six==1.16.0 # via python-dateutil -sqlalchemy==2.0.7 - # via -r requirements/docs.in +tokenize-rt==5.0.0 + # via pyupgrade tomli==2.0.1 - # via autoflake -typing-extensions==4.5.0 - # via sqlalchemy -ujson==5.7.0 - # via -r requirements/docs.in + # via + # -r requirements/docs.in + # autoflake urllib3==1.26.15 # via requests watchdog==3.0.0 diff --git a/requirements/linting.txt b/requirements/linting.txt index d6e75e8bfc..c1b71b755b 100644 --- a/requirements/linting.txt +++ b/requirements/linting.txt @@ -1,6 +1,6 @@ # -# This file is autogenerated by pip-compile with Python 3.10 -# by the following command: +# This file is autogenerated by pip-compile with python 3.10 +# To update, run: # # pip-compile --output-file=requirements/linting.txt --resolver=backtracking requirements/linting.in # @@ -48,7 +48,7 @@ pyupgrade==3.3.1 # via -r requirements/linting.in pyyaml==6.0 # via pre-commit -ruff==0.0.258 +ruff==0.0.259 # via -r requirements/linting.in sortedcontainers==2.4.0 # via hypothesis diff --git a/requirements/pyproject-all.txt b/requirements/pyproject-all.txt index 2d6501d90e..9f8a5137a3 100644 --- a/requirements/pyproject-all.txt +++ b/requirements/pyproject-all.txt @@ -1,6 +1,6 @@ # -# This file is autogenerated by pip-compile with Python 3.10 -# by the following command: +# This file is autogenerated by pip-compile with python 3.10 +# To update, run: # # pip-compile --extra=email --output-file=requirements/pyproject-all.txt --resolver=backtracking pyproject.toml # diff --git a/requirements/pyproject-min.txt b/requirements/pyproject-min.txt index f775a2effc..efa35332b8 100644 --- a/requirements/pyproject-min.txt +++ b/requirements/pyproject-min.txt @@ -1,6 +1,6 @@ # -# This file is autogenerated by pip-compile with Python 3.10 -# by the following command: +# This file is autogenerated by pip-compile with python 3.10 +# To update, run: # # pip-compile --output-file=requirements/pyproject-min.txt --resolver=backtracking pyproject.toml # diff --git a/requirements/testing-extra.in b/requirements/testing-extra.in index ecfaadf646..0eb53c0151 100644 --- a/requirements/testing-extra.in +++ b/requirements/testing-extra.in @@ -1,2 +1,6 @@ +# used when generate devtools docs example +ansi2html devtools hypothesis +# used in docs tests +sqlalchemy==1.4.0 diff --git a/requirements/testing-extra.txt b/requirements/testing-extra.txt index 8a2fe3b10c..1a54e51172 100644 --- a/requirements/testing-extra.txt +++ b/requirements/testing-extra.txt @@ -1,9 +1,11 @@ # -# This file is autogenerated by pip-compile with Python 3.10 -# by the following command: +# This file is autogenerated by pip-compile with python 3.10 +# To update, run: # # pip-compile --output-file=requirements/testing-extra.txt --resolver=backtracking requirements/testing-extra.in # +ansi2html==1.8.0 + # via -r requirements/testing-extra.in asttokens==2.2.1 # via devtools attrs==22.2.0 @@ -14,9 +16,13 @@ exceptiongroup==1.1.1 # via hypothesis executing==1.2.0 # via devtools +greenlet==2.0.2 + # via sqlalchemy hypothesis==6.70.0 # via -r requirements/testing-extra.in six==1.16.0 # via asttokens sortedcontainers==2.4.0 # via hypothesis +sqlalchemy==1.4.0 + # via -r requirements/testing-extra.in diff --git a/requirements/testing.in b/requirements/testing.in index db3be4c311..b47b2478a6 100644 --- a/requirements/testing.in +++ b/requirements/testing.in @@ -3,3 +3,4 @@ dirty-equals pytest pytest-mock pytest-pretty +pytest-examples diff --git a/requirements/testing.txt b/requirements/testing.txt index ecec30e131..e077ad3881 100644 --- a/requirements/testing.txt +++ b/requirements/testing.txt @@ -1,11 +1,15 @@ # -# This file is autogenerated by pip-compile with Python 3.10 -# by the following command: +# This file is autogenerated by pip-compile with python 3.10 +# To update, run: # # pip-compile --output-file=requirements/testing.txt --resolver=backtracking requirements/testing.in # attrs==22.2.0 # via pytest +black==23.1.0 + # via pytest-examples +click==8.1.3 + # via black coverage[toml]==7.2.2 # via -r requirements/testing.in dirty-equals==0.5.0 @@ -18,8 +22,16 @@ markdown-it-py==2.2.0 # via rich mdurl==0.1.2 # via markdown-it-py +mypy-extensions==1.0.0 + # via black packaging==23.0 - # via pytest + # via + # black + # pytest +pathspec==0.11.1 + # via black +platformdirs==3.1.1 + # via black pluggy==1.0.0 # via pytest pygments==2.14.0 @@ -27,8 +39,11 @@ pygments==2.14.0 pytest==7.2.2 # via # -r requirements/testing.in + # pytest-examples # pytest-mock # pytest-pretty +pytest-examples==0.0.8 + # via -r requirements/testing.in pytest-mock==3.10.0 # via -r requirements/testing.in pytest-pretty==1.1.1 @@ -37,8 +52,11 @@ pytz==2022.7.1 # via dirty-equals rich==13.3.2 # via pytest-pretty +ruff==0.0.259 + # via pytest-examples tomli==2.0.1 # via + # black # coverage # pytest typing-extensions==4.5.0 diff --git a/tests/test_discriminated_union.py b/tests/test_discriminated_union.py index c110bcf1de..4b07fd6420 100644 --- a/tests/test_discriminated_union.py +++ b/tests/test_discriminated_union.py @@ -101,6 +101,7 @@ class Dog(BaseModel): ] +@pytest.mark.xfail(reason='something weird with from_attributes being included in CoreConfig') def test_discriminated_union_validation(): class BlackCat(BaseModel): pet_type: Literal['cat'] @@ -279,6 +280,7 @@ class Model(BaseModel): assert isinstance(m.pet, WhiteCat) +@pytest.mark.xfail(reason='something weird with from_attributes being included in CoreConfig') def test_discriminated_union_basemodel_instance_value(): class A(BaseModel): foo: Literal['a'] @@ -293,6 +295,7 @@ class Top(BaseModel): assert isinstance(t, Top) +@pytest.mark.xfail(reason='something weird with from_attributes being included in CoreConfig') def test_discriminated_union_basemodel_instance_value_with_alias(): class A(BaseModel): literal: Literal['a'] = Field(alias='lit') diff --git a/tests/test_docs.py b/tests/test_docs.py new file mode 100644 index 0000000000..c599af2032 --- /dev/null +++ b/tests/test_docs.py @@ -0,0 +1,178 @@ +from __future__ import annotations as _annotations + +import os +import platform +import re +import subprocess +import sys +from datetime import datetime +from pathlib import Path +from tempfile import NamedTemporaryFile + +import pytest +from pytest_examples import CodeExample, EvalExample, find_examples + +index_main = None + + +def skip_docs_tests(): + if sys.platform not in {'linux', 'darwin'}: + return 'not in linux or macos' + + if platform.python_implementation() != 'CPython': + return 'not cpython' + + try: + import hypothesis # noqa: F401 + except ImportError: + return 'hypothesis not installed' + + try: + import devtools # noqa: F401 + except ImportError: + return 'devtools not installed' + + try: + import sqlalchemy # noqa: F401 + except ImportError: + return 'sqlalchemy not installed' + + try: + import ansi2html # noqa: F401 + except ImportError: + return 'ansi2html not installed' + + +class GroupModuleGlobals: + def __init__(self) -> None: + self.name = None + self.module_dict: dict[str, str] = {} + + def get(self, name: str | None): + if name is not None and name == self.name: + return self.module_dict + + def set(self, name: str | None, module_dict: dict[str, str]): + self.name = name + if self.name is None: + self.module_dict = None + else: + self.module_dict = module_dict + + +group_globals = GroupModuleGlobals() + + +class MockedDatetime(datetime): + @classmethod + def now(cls, *args, **kwargs): + return datetime(2032, 1, 2, 3, 4, 5, 6) + + +skip_reason = skip_docs_tests() + + +@pytest.mark.skipif(bool(skip_reason), reason=skip_reason or 'not skipping') +@pytest.mark.parametrize('example', find_examples('docs', skip=sys.platform == 'win32'), ids=str) +def test_docs_examples(example: CodeExample, eval_example: EvalExample, tmp_path: Path, mocker): # noqa: C901 + global index_main + if example.path.name == 'index.md': + if index_main is None: + index_main = example.source + else: + (tmp_path / 'index_main.py').write_text(index_main) + sys.path.append(str(tmp_path)) + + if example.path.name == 'devtools.md': + pytest.skip('tested below') + + prefix_settings = example.prefix_settings() + test_settings = prefix_settings.get('test') + lint_settings = prefix_settings.get('lint') + if test_settings == 'skip' and lint_settings == 'skip': + pytest.skip('both test and lint skipped') + + requires_settings = prefix_settings.get('requires') + if requires_settings: + major, minor = map(int, requires_settings.split('.')) + if sys.version_info < (major, minor): + pytest.skip(f'requires python {requires_settings}') + + group_name = prefix_settings.get('group') + + if '# ignore-above' in example.source: + eval_example.set_config(ruff_ignore=['E402']) + if group_name: + eval_example.set_config(ruff_ignore=['F821']) + + if lint_settings != 'skip': + if eval_example.update_examples: + eval_example.format(example) + else: + eval_example.lint(example) + + if test_settings == 'skip': + return + + group_name = prefix_settings.get('group') + d = group_globals.get(group_name) + + mocker.patch('datetime.datetime', MockedDatetime) + mocker.patch('random.randint', return_value=3) + os.environ['TZ'] = 'UTC' + + xfail = None + if test_settings and test_settings.startswith('xfail'): + xfail = test_settings[5:] + + try: + if test_settings == 'no-print-intercept': + d2 = eval_example.run(example, module_globals=d) + elif eval_example.update_examples: + d2 = eval_example.run_print_update(example, module_globals=d) + else: + d2 = eval_example.run_print_check(example, module_globals=d) + except Exception as e: + if xfail: + pytest.xfail(str(e)) + raise + else: + if xfail: + pytest.fail('expected xfail') + group_globals.set(group_name, d2) + + +@pytest.mark.skipif(bool(skip_reason), reason=skip_reason or 'not skipping') +@pytest.mark.parametrize('example', find_examples('docs/usage/devtools.md', skip=sys.platform == 'win32'), ids=str) +def test_docs_devtools_example(example: CodeExample, eval_example: EvalExample, tmp_path: Path): + from ansi2html import Ansi2HTMLConverter + + if eval_example.update_examples: + eval_example.format(example) + else: + eval_example.lint(example) + + with NamedTemporaryFile(mode='w', suffix='.py') as f: + f.write(example.source) + f.flush() + os.environ['PY_DEVTOOLS_HIGHLIGHT'] = 'true' + p = subprocess.run((sys.executable, f.name), stdout=subprocess.PIPE, check=True, encoding='utf8') + + conv = Ansi2HTMLConverter() + + # replace ugly file path with "devtools_example.py" + output = re.sub(r'/.+?\.py', 'devtools_example.py', p.stdout) + output_html = conv.convert(output, full=False) + output_html = ( + '\n' + f'{output_html}' + ) + output_file = Path('docs/plugins/devtools_output.html') + + if eval_example.update_examples: + output_file.write_text(output_html) + elif not output_file.exists(): + pytest.fail(f'output file {output_file} does not exist') + else: + assert output_html == output_file.read_text() diff --git a/tests/test_orm_mode.py b/tests/test_orm_mode.py index 0f4edd4a51..55956863fd 100644 --- a/tests/test_orm_mode.py +++ b/tests/test_orm_mode.py @@ -85,7 +85,6 @@ class PokemonDict(BaseModel): } -@pytest.mark.xfail(reason='working on V2') def test_from_attributes(): class PetCls: def __init__(self, *, name: str, species: str): @@ -132,7 +131,6 @@ class Pet(BaseModel): Pet.from_orm(None) -@pytest.mark.xfail(reason='working on V2') def test_object_with_getattr(): class FooGetAttr: def __getattr__(self, key: str): @@ -160,7 +158,6 @@ class ModelInvalid(BaseModel): ModelInvalid.from_orm(foo) -@pytest.mark.xfail(reason='working on V2') def test_properties(): class XyProperty: x = 4 @@ -290,7 +287,6 @@ class ModelB(Model): assert ModelB.from_orm(obj) == ModelB(b=ModelA(a=1)) -@pytest.mark.xfail(reason='working on V2') def test_nested_orm(): class User(BaseModel): model_config = ConfigDict(from_attributes=True)