diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index f9f656b7ca..d6f6125370 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -155,9 +155,9 @@ jobs: CONTEXT: ${{ runner.os }}-py${{ matrix.python-version }}-with-deps - name: store coverage files - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: - name: coverage + name: coverage-${{ matrix.os }}-${{ matrix.python-version }} path: coverage test-fastapi: @@ -169,7 +169,7 @@ jobs: - uses: actions/checkout@v4 - name: set up python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: '3.10' @@ -270,9 +270,9 @@ jobs: CONTEXT: linux-py${{ matrix.python-version }}-pyright - name: store coverage files - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: - name: coverage + name: coverage-${{ matrix.os }}-${{ matrix.python-version }}-mypy${{ matrix.mypy-version }} path: coverage coverage-combine: @@ -282,14 +282,15 @@ jobs: steps: - uses: actions/checkout@v4 - - uses: actions/setup-python@v4 + - uses: actions/setup-python@v5 with: python-version: '3.8' - name: get coverage files - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: - name: coverage + merge-multiple: true + pattern: coverage-* path: coverage - run: pip install coverage[toml] @@ -300,7 +301,7 @@ jobs: - run: coverage html --show-contexts --title "pydantic coverage for ${{ github.sha }}" - name: Store coverage html - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: coverage-html path: htmlcov @@ -382,7 +383,7 @@ jobs: - uses: actions/checkout@v4 - name: set up python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: '3.10' @@ -408,7 +409,7 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/setup-python@v4 + - uses: actions/setup-python@v5 with: python-version: '3.10' - name: Install dependencies diff --git a/.github/workflows/dependencies-check.yml b/.github/workflows/dependencies-check.yml index 402260f440..d8db57827f 100644 --- a/.github/workflows/dependencies-check.yml +++ b/.github/workflows/dependencies-check.yml @@ -37,7 +37,7 @@ jobs: - uses: actions/checkout@v4 - name: set up python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} diff --git a/.github/workflows/upload-previews.yml b/.github/workflows/upload-previews.yml index 5ada8d18f8..2fd585162c 100644 --- a/.github/workflows/upload-previews.yml +++ b/.github/workflows/upload-previews.yml @@ -14,13 +14,13 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/setup-python@v4 + - uses: actions/setup-python@v5 with: python-version: '3.10' - run: pip install smokeshow - - uses: dawidd6/action-download-artifact@v2 + - uses: dawidd6/action-download-artifact@v3 with: workflow: ci.yml commit: ${{ github.event.workflow_run.head_sha }} diff --git a/docs/blog/pydantic-v2-alpha.md b/docs/blog/pydantic-v2-alpha.md deleted file mode 100644 index c0720f5961..0000000000 --- a/docs/blog/pydantic-v2-alpha.md +++ /dev/null @@ -1,200 +0,0 @@ -# Pydantic V2 Pre Release - - - ---- - -We're excited to announce the first alpha release of Pydantic V2! - -This first Pydantic V2 alpha is no April Fool's joke — for a start we missed our April 1st target date :cry:. -After a year's work, we invite you to explore the improvements we've made and give us your feedback. -We look forward to hearing your thoughts and working together to improve the library. - -For many of you, Pydantic is already a key part of your Python toolkit and needs no introduction — -we hope you'll find the improvements and additions in Pydantic V2 useful. - -If you're new to Pydantic: Pydantic is an open-source Python library that provides powerful data parsing and validation — -including type coercion and useful error messages when typing issues arise — and settings management capabilities. -See [the docs](../index.md) for examples of Pydantic at work. - -## Getting started with the Pydantic V2 alpha - -Your feedback will be a critical part of ensuring that we have made the right tradeoffs with the API changes in V2. - -To get started with the Pydantic V2 alpha, install it from PyPI. -We recommend using a virtual environment to isolate your testing environment: - -```bash -pip install --pre -U "pydantic>=2.0a1" -``` - -Note that there are still some rough edges and incomplete features, and while trying out the Pydantic V2 alpha releases you may experience errors. -We encourage you to try out the alpha releases in a test environment and not in production. -Some features are still in development, and we will continue to make changes to the API. - -If you do encounter any issues, please [create an issue in GitHub](https://github.com/pydantic/pydantic/issues) using the `bug V2` label. -This will help us to actively monitor and track errors, and to continue to improve the library’s performance. - -This will be the first of several upcoming alpha releases. As you evaluate our changes and enhancements, -we encourage you to share your feedback with us. - -Please let us know: - -* If you don't like the changes, so we can make sure Pydantic remains a library you enjoy using. -* If this breaks your usage of Pydantic so we can fix it, or at least describe a migration path. - -Thank you for your support, and we look forward to your feedback. - ---- - -## Headlines - -Here are some of the most interesting new features in the current Pydantic V2 alpha release. -For background on plans behind these features, see the earlier [Pydantic V2 Plan](pydantic-v2.md) blog post. - -The biggest change to Pydantic V2 is [`pydantic-core`](https://github.com/pydantic/pydantic-core) — -all validation logic has been rewritten in Rust and moved to a separate package, `pydantic-core`. -This has a number of big advantages: - -* **Performance** - Pydantic V2 is 5-50x faster than Pydantic V1. -* **Safety & maintainability** - We've made changes to the architecture that we think will help us maintain Pydantic V2 with far fewer bugs in the long term. - -With the use of `pydantic-core`, the majority of the logic in the Pydantic library is dedicated to generating -"pydantic core schema" — the schema used define the behaviour of the new, high-performance `pydantic-core` validators and serializers. - -### Ready for experimentation - -* **BaseModel** - the core of validation in Pydantic V1 remains, albeit with new method names. -* **Dataclasses** - Pydantic dataclasses are improved and ready to test. -* **Serialization** - dumping/serialization/marshalling is significantly more flexible, and ready to test. -* **Strict mode** - one of the biggest additions in Pydantic V2 is strict mode, which is ready to test. -* **JSON Schema** - generation of JSON Schema is much improved and ready to test. -* **Generic Models** - are much improved and ready to test. -* **Recursive Models** - and validation of recursive data structures is much improved and ready to test. -* **Custom Types** - custom types have a new interface and are ready to test. -* **Custom Field Modifiers** - used via `Annotated[]` are working and in use in Pydantic itself. -* **Validation without a BaseModel** - the new `TypeAdapter` class allows validation without the need for a `BaseModel` class, and it's ready to test. -* **TypedDict** - we now have full support for `TypedDict` via `TypeAdapter`, it's ready to test. - -### Still under construction - -* **Documentation** - we're working hard on full documentation for V2, but it's not ready yet. -* **Conversion Table** - a big addition to the documentation will be a conversion table showing how types are coerced, this is a WIP. -* **BaseSettings** - `BaseSettings` will move to a separate `pydantic-settings` package, it's not yet ready to test. - **Notice:** since `pydantic-settings` is not yet ready to release, there's no support for `BaseSettings` in the first alpha release. -* **validate_arguments** - the `validate_arguments` decorator remains and is working, but hasn't been updated yet. -* **Hypothesis Plugin** - the Hypothesis plugin is yet to be updated. -* **computed fields** - we know a lot of people are waiting for this, we will include it in Pydantic V2. -* **Error messages** - could use some love, and links to docs in error messages are still to be added. -* **Migration Guide** - we have some pointers below, but this needs completing. - -## Migration Guide - -**Please note:** this is just the beginning of a migration guide. We'll work hard up to the final release to prepare -a full migration guide, but for now the following pointers should be some help while experimenting with V2. - -### Changes to BaseModel - -* Various method names have been changed; `BaseModel` methods all start with `model_` now. - Where possible, we have retained the old method names to help ease migration, but calling them will result in `DeprecationWarning`s. - * Some of the built-in data loading functionality has been slated for removal. - In particular, `parse_raw` and `parse_file` are now deprecated. You should load the data and then pass it to `model_validate`. -* The `from_orm` method has been removed; you can now just use `model_validate` (equivalent to `parse_obj` from Pydantic V1) to achieve something similar, - as long as you've set `from_attributes=True` in the model config. -* The `__eq__` method has changed for models; models are no longer considered equal to the dicts. -* Custom `__init__` overrides won't be called. This should be replaced with a `@root_validator`. -* Due to inconsistency with the rest of the library, we have removed the special behavior of models - using the `__root__` field, and have disallowed the use of an attribute with this name to prevent confusion. - However, you can achieve equivalent behavior with a "standard" field name through the use of `@root_validator`, - `@model_serializer`, and `__pydantic_modify_json_schema__`. You can see an example of this - [here](https://github.com/pydantic/pydantic/blob/2b9459f20d094a46fa3093b43c34444240f03646/tests/test_parse.py#L95-L113). - -### Changes to Pydantic Dataclasses - -* The `__post_init__` in Pydantic dataclasses will now be called after validation, rather than before. -* We no longer support `extra='allow'` for Pydantic dataclasses, where extra attributes passed to the initializer would be - stored as extra fields on the dataclass. `extra='ignore'` is still supported for the purposes of allowing extra fields while parsing data; they just aren't stored. -* `__post_init_post_parse__` has been removed. -* Nested dataclasses no longer accept tuples as input, only dict. - -### Changes to Config - -* To specify config on a model, it is now deprecated to create a class called `Config` in the namespace of the parent `BaseModel` subclass. - Instead, you just need to set a class attribute called `model_config` to be a dict with the key/value pairs you want to be used as the config. - -The following config settings have been removed: - -* `allow_mutation` — this has been removed. You should be able to use [frozen](../api/config.md#pydantic.config.ConfigDict) equivalently (inverse of current use). -* `error_msg_templates`. -* `fields` — this was the source of various bugs, so has been removed. You should be able to use `Annotated` on fields to modify them as desired. -* `getter_dict` — `orm_mode` has been removed, and this implementation detail is no longer necessary. -* `schema_extra` — you should now use the `json_schema_extra` keyword argument to `pydantic.Field`. -* `smart_union`. -* `underscore_attrs_are_private` — the Pydantic V2 behavior is now the same as if this was always set to `True` in Pydantic V1. - -The following config settings have been renamed: - -* `allow_population_by_field_name` → `populate_by_name` -* `anystr_lower` → `str_to_lower` -* `anystr_strip_whitespace` → `str_strip_whitespace` -* `anystr_upper` → `str_to_upper` -* `keep_untouched` → `ignored_types` -* `max_anystr_length` → `str_max_length` -* `min_anystr_length` → `str_min_length` -* `orm_mode` → `from_attributes` -* `validate_all` → `validate_default` - -### Changes to Validators - -* Raising a `TypeError` inside a validator no longer produces a `ValidationError`, but just raises the `TypeError` directly. - This was necessary to prevent certain common bugs (such as calling functions with invalid signatures) from - being unintentionally converted into `ValidationError` and displayed to users. - If you really want `TypeError` to be converted to a `ValidationError` you should use a `try: except:` block that will catch it and do the conversion. -* `each_item` validators are deprecated and should be replaced with a type annotation using `Annotated` to apply a validator - or with a validator that operates on all items at the top level. -* Changes to `@validator`-decorated function signatures. -* The `stricturl` type has been removed. -* Root validators can no longer be run with `skip_on_failure=False`. - -### Changes to Validation of specific types - -* Integers outside the valid range of 64 bit integers will cause `ValidationError`s during parsing. - To work around this, use an `IsInstance` validator (more details to come). -* Subclasses of built-ins won't validate into their subclass types; you'll need to use an `IsInstance` validator to validate these types. - -### Changes to Generic models - -* While it does not raise an error at runtime yet, subclass checks for parametrized generics should no longer be used. - These will result in `TypeError`s and we can't promise they will work forever. However, it will be okay to do subclass checks against _non-parametrized_ generic models - -### Other changes - -* `GetterDict` has been removed, as it was just an implementation detail for `orm_mode`, which has been removed. - -### TypeAdapter - -Pydantic V1 didn't have good support for validation or serializing non-`BaseModel`. -To work with them you had to create a "root" model or use the utility functions in `pydantic.tools` (`parse_obj_as` and `schema_of`). -In Pydantic V2 this is _a lot_ easier: the `TypeAdapter` class lets you build an object that behaves almost like a `BaseModel` class which you can use for a lot of the use cases of root models and as a complete replacement for `parse_obj_as` and `schema_of`. - -```python -from typing import List - -from pydantic import TypeAdapter - -validator = TypeAdapter(List[int]) -assert validator.validate_python(['1', '2', '3']) == [1, 2, 3] -print(validator.json_schema()) -#> {'items': {'type': 'integer'}, 'type': 'array'} -``` - -Note that this API is provisional and may change before the final release of Pydantic V2. diff --git a/docs/blog/pydantic-v2-final.md b/docs/blog/pydantic-v2-final.md deleted file mode 100644 index cdb2c9f0db..0000000000 --- a/docs/blog/pydantic-v2-final.md +++ /dev/null @@ -1,40 +0,0 @@ -# Pydantic V2 Is Here! - - - ---- - -The last few months have involved a whirlwind of work, and we're finally ready to announce to official release of -Pydantic V2! - -## Getting started with Pydantic V2 - -To get started with Pydantic V2, install it from PyPI: - -```bash -pip install -U pydantic -``` - -Pydantic V2 is compatible with Python 3.7 and above. - -See [the docs](../index.md) for examples of Pydantic at work. - -## Migration guide - -If you are upgrading an existing project, you can use our extensive [migration guide](../migration.md) to understand -what has changed. - -If you do encounter any issues, please [create an issue in GitHub](https://github.com/pydantic/pydantic/issues/new?assignees=&labels=bug+V2%2Cunconfirmed&projects=&template=bug-v2.yml) -using the `bug V2` label. -This will help us to actively monitor and track errors, and to continue to improve the library’s performance. - -Thank you for your support, and we look forward to your feedback. diff --git a/docs/blog/pydantic-v2.md b/docs/blog/pydantic-v2.md deleted file mode 100644 index a979168dcc..0000000000 --- a/docs/blog/pydantic-v2.md +++ /dev/null @@ -1,953 +0,0 @@ -# Pydantic V2 Plan - - - ---- - -Updated late 10 Jul 2022, see [pydantic#4226](https://github.com/pydantic/pydantic/pull/4226). - ---- - -I've spoken to quite a few people about pydantic V2, and mention it in passing even more. - -I owe people a proper explanation of the plan for V2: - -* What we will add -* What we will remove -* What we will change -* How I'm intending to go about completing it and getting it released -* Some idea of timeframe :fearful: - -Here goes... - ---- - -Enormous thanks to -[Eric Jolibois](https://github.com/PrettyWood), [Laurence Watson](https://github.com/Rabscuttler), -[Sebastián Ramírez](https://github.com/tiangolo), [Adrian Garcia Badaracco](https://github.com/adriangb), -[Tom Hamilton Stubber](https://github.com/tomhamiltonstubber), [Zac Hatfield-Dodds](https://github.com/Zac-HD), -[Tom](https://github.com/czotomo) & [Hasan Ramezani](https://github.com/hramezani) -for reviewing this blog post, putting up with (and correcting) my horrible typos and making great suggestions -that have made this post and Pydantic V2 materially better. - ---- - -## Plan & Timeframe - -I'm currently taking a kind of sabbatical after leaving my last job to get pydantic V2 released. -Why? I ask myself that question quite often. -I'm very proud of how much pydantic is used, but I'm less proud of its internals. -Since it's something people seem to care about and use quite a lot -(26m downloads a month, used by 72k public repos, 10k stars). -I want it to be as good as possible. - -While I'm on the subject of why, how and my odd sabbatical: if you work for a large company who use pydantic a lot, -you might encourage the company to **sponsor me a meaningful amount**, -like [Salesforce did](https://twitter.com/samuel_colvin/status/1501288247670063104) -(if your organisation is not open to donations, I can also offer consulting services). -This is not charity, recruitment or marketing - the argument should be about how much the company will save if -pydantic is 10x faster, more stable and more powerful - it would be worth paying me 10% of that to make it happen. - -Before pydantic V2 can be released, we need to release pydantic V1.10 - there are lots of changes in the main -branch of pydantic contributed by the community, it's only fair to provide a release including those changes, -many of them will remain unchanged for V2, the rest will act as a requirement to make sure pydantic V2 includes -the capabilities they implemented. - -The basic road map for me is as follows: - -1. Implement a few more features in pydantic-core, and release a first version, see [below](#motivation-pydantic-core) -2. Work on getting pydantic V1.10 out - basically merge all open PRs that are finished -3. Release pydantic V1.10 -4. Delete all stale PRs which didn't make it into V1.10, apologise profusely to their authors who put their valuable - time into pydantic only to have their PRs closed :pray: - (and explain when and how they can rebase and recreate the PR) -5. Rename `master` to `main`, seems like a good time to do this -6. Change the main branch of pydantic to target V2 -7. Start tearing pydantic code apart and see how many existing tests can be made to pass -8. Rinse, repeat -9. Release pydantic V2 :tada: - -Plan is to have all this done by the end of October, definitely by the end of the year. - -### Breaking Changes & Compatibility :pray: - -While we'll do our best to avoid breaking changes, some things will break. - -As per the [greatest pun in modern TV history](https://youtu.be/ezAlySFluEk). - -> You can't make a Tomelette without breaking some Greggs. - -Where possible, if breaking changes are unavoidable, we'll try to provide warnings or errors to make sure those -changes are obvious to developers. - -## Motivation & `pydantic-core` - -Since pydantic's initial release, with the help of wonderful contributors -[Eric Jolibois](https://github.com/PrettyWood), -[Sebastián Ramírez](https://github.com/tiangolo), -[David Montague](https://github.com/dmontagu) and many others, the package and its usage have grown enormously. -The core logic however has remained mostly unchanged since the initial experiment. -It's old, it smells, it needs to be rebuilt. - -The release of version 2 is an opportunity to rebuild pydantic and correct many things that don't make sense - -**to make pydantic amazing :rocket:**. - -The core validation logic of pydantic V2 will be performed by a separate package -[pydantic-core](https://github.com/pydantic/pydantic-core) which I've been building over the last few months. -*pydantic-core* is written in Rust using the excellent [pyo3](https://pyo3.rs) library which provides rust bindings -for python. - -The motivation for building pydantic-core in Rust is as follows: - -1. **Performance**, see [below](#performance) -2. **Recursion and code separation** - with no stack and little-to-no overhead for extra function calls, - Rust allows pydantic-core to be implemented as a tree of small validators which call each other, - making code easier to understand and extend without harming performance -4. **Safety and complexity** - pydantic-core is a fairly complex piece of code which has to draw distinctions - between many different errors, Rust is great in situations like this, - it should minimise bugs (:fingers_crossed:) and allow the codebase to be extended for a long time to come - -!!! note - The python interface to pydantic shouldn't change as a result of using pydantic-core, instead - pydantic will use type annotations to build a schema for pydantic-core to use. - -pydantic-core is usable now, albeit with an unintuitive API, if you're interested, please give it a try. - -pydantic-core provides validators for common data types, -[see a list here](https://github.com/pydantic/pydantic-core/blob/main/pydantic_core/schema_types.py#L314). -Other, less commonly used data types will be supported via validator functions implemented in pydantic, in Python. - -See [pydantic-core#153](https://github.com/pydantic/pydantic-core/issues/153) -for a summary of what needs to be completed before its first release. - -## Headlines - -Here are some of the biggest changes expected in V2. - -### Performance :thumbsup: - -As a result of the move to Rust for the validation logic -(and significant improvements in how validation objects are structured) pydantic V2 will be significantly faster -than pydantic V1. - -Looking at the pydantic-core [benchmarks](https://github.com/pydantic/pydantic-core/tree/main/tests/benchmarks) -today, pydantic V2 is between 4x and 50x faster than pydantic V1.9.1. - -In general, pydantic V2 is about 17x faster than V1 when validating a model containing a range of common fields. - -### Strict Mode :thumbsup: - -People have long complained about pydantic for coercing data instead of throwing an error. -E.g. input to an `int` field could be `123` or the string `"123"` which would be converted to `123` -While this is very useful in many scenarios (think: URL parameters, environment variables, user input), -there are some situations where it's not desirable. - -pydantic-core comes with "strict mode" built in. With this, only the exact data type is allowed, e.g. passing -`"123"` to an `int` field would result in a validation error. - -This will allow pydantic V2 to offer a `strict` switch which can be set on either a model or a field. - -### Formalised Conversion Table :thumbsup: - -As well as complaints about coercion, another legitimate complaint was inconsistency around data conversion. - -In pydantic V2, the following principle will govern when data should be converted in "lax mode" (`strict=False`): - -> If the input data has a SINGLE and INTUITIVE representation, in the field's type, AND no data is lost -> during the conversion, then the data will be converted; otherwise a validation error is raised. -> There is one exception to this rule: string fields - -> virtually all data has an intuitive representation as a string (e.g. `repr()` and `str()`), therefore -> a custom rule is required: only `str`, `bytes` and `bytearray` are valid as inputs to string fields. - -Some examples of what that means in practice: - -| Field Type | Input | Single & Intuitive R. | All Data Preserved | Result | -|------------|-------------------------|-----------------------|--------------------|---------| -| `int` | `"123"` | :material-check: | :material-check: | Convert | -| `int` | `123.0` | :material-check: | :material-check: | Convert | -| `int` | `123.1` | :material-check: | :material-close: | Error | -| `date` | `"2020-01-01"` | :material-check: | :material-check: | Convert | -| `date` | `"2020-01-01T00:00:00"` | :material-check: | :material-check: | Convert | -| `date` | `"2020-01-01T12:00:00"` | :material-check: | :material-close: | Error | -| `int` | `b"1"` | :material-close: | :material-check: | Error | - -(For the last case converting `bytes` to an `int` could reasonably mean `int(bytes_data.decode())` or -`int.from_bytes(b'1', 'big/little')`, hence an error) - -In addition to the general rule, we'll provide a conversion table which defines exactly what data will be allowed -to which field types. See [the table below](#conversion-table) for a start on this. - -### Built in JSON support :thumbsup: - -pydantic-core can parse JSON directly into a model or output type, this both improves performance and avoids -issue with strictness - e.g. if you have a strict model with a `datetime` field, the input must be a -`datetime` object, but clearly that makes no sense when parsing JSON which has no `datatime` type. -Same with `bytes` and many other types. - -Pydantic V2 will therefore allow some conversion when validating JSON directly, even in strict mode -(e.g. `ISO8601 string -> datetime`, `str -> bytes`) even though this would not be allowed when validating -a python object. - -In future direct validation of JSON will also allow: - -* parsing in a separate thread while starting validation in the main thread -* line numbers from JSON to be included in the validation errors - -(These features will not be included in V2, but instead will hopefully be added later.) - -!!! note - Pydantic has always had special support for JSON, that is not going to change. - - While in theory other formats could be specifically supported, the overheads and development time are - significant and I don't think there's another format that's - used widely enough to be worth specific logic. Other formats can be parsed to python then validated, similarly - when serializing, data can be exported to a python object, then serialized, - see [below](#improvements-to-dumpingserializationexport). - -### Validation without a Model :thumbsup: - -In pydantic V1 the core of all validation was a pydantic model, this led to a significant performance penalty -and extra complexity when the output data type was not a model. - -pydantic-core operates on a tree of validators with no "model" type required at the base of that tree. -It can therefore validate a single `string` or `datetime` value, a `TypedDict` or a `Model` equally easily. - -This feature will provide significant addition performance improvements in scenarios like: - -* Adding validation to `dataclasses` -* Validating URL arguments, query strings, headers, etc. in FastAPI -* Adding validation to `TypedDict` -* Function argument validation -* Adding validation to your custom classes, decorators... - -In effect - anywhere where you don't care about a traditional model class instance. - -We'll need to add standalone methods for generating JSON Schema and dumping these objects to JSON, etc. - -### Required vs. Nullable Cleanup :thumbsup: - -Pydantic previously had a somewhat confused idea about "required" vs. "nullable". This mostly resulted from -my misgivings about marking a field as `Optional[int]` but requiring a value to be provided but allowing it to be -`None` - I didn't like using the word "optional" in relation to a field which was not optional. - -In pydantic V2, pydantic will move to match dataclasses, thus: - -```py title="Required vs. Nullable" test="skip" lint="skip" upgrade="skip" -from pydantic import BaseModel - - -class Foo(BaseModel): - f1: str # required, cannot be None - f2: str | None # required, can be None - same as Optional[str] / Union[str, None] - f3: str | None = None # not required, can be None - f4: str = 'Foobar' # not required, but cannot be None -``` - -### Validator Function Improvements :thumbsup: :thumbsup: :thumbsup: - -This is one of the changes in pydantic V2 that I'm most excited about, I've been talking about something -like this for a long time, see [pydantic#1984](https://github.com/pydantic/pydantic/issues/1984), but couldn't -find a way to do this until now. - -Fields which use a function for validation can be any of the following types: - -* **function before mode** - where the function is called before the inner validator is called -* **function after mode** - where the function is called after the inner validator is called -* **plain mode** - where there's no inner validator -* **wrap mode** - where the function takes a reference to a function which calls the inner validator, - and can therefore modify the input before inner validation, modify the output after inner validation, conditionally - not call the inner validator or catch errors from the inner validator and return a default value, or change the error - -An example how a wrap validator might look: - -```py title="Wrap mode validator function" test="skip" lint="skip" upgrade="skip" -from datetime import datetime -from pydantic import BaseModel, ValidationError, validator - - -class MyModel(BaseModel): - timestamp: datetime - - @validator('timestamp', mode='wrap') - def validate_timestamp(cls, v, handler): - if v == 'now': - # we don't want to bother with further validation, - # just return the new value - return datetime.now() - try: - return handler(v) - except ValidationError: - # validation failed, in this case we want to - # return a default value - return datetime(2000, 1, 1) -``` - -As well as being powerful, this provides a great "escape hatch" when pydantic validation doesn't do what you need. - -### More powerful alias(es) :thumbsup: - -pydantic-core can support alias "paths" as well as simple string aliases to flatten data as it's validated. - -Best demonstrated with an example: - -```py title="Alias paths" test="skip" lint="skip" upgrade="skip" -from pydantic import BaseModel, Field - - -class Foo(BaseModel): - bar: str = Field(aliases=[['baz', 2, 'qux']]) - - -data = { - 'baz': [ - {'qux': 'a'}, - {'qux': 'b'}, - {'qux': 'c'}, - {'qux': 'd'}, - ] -} - -foo = Foo(**data) -assert foo.bar == 'c' -``` - -`aliases` is a list of lists because multiple paths can be provided, if so they're tried in turn until a value is found. - -Tagged unions will use the same logic as `aliases` meaning nested attributes can be used to select a schema -to validate against. - -### Improvements to Dumping/Serialization/Export :thumbsup: :confused: - -(I haven't worked on this yet, so these ideas are only provisional) - -There has long been a debate about how to handle converting data when extracting it from a model. -One of the features people have long requested is the ability to convert data to JSON compliant types while -converting a model to a dict. - -My plan is to move data export into pydantic-core, with that, one implementation can support all export modes without -compromising (and hopefully significantly improving) performance. - -I see four different export/serialization scenarios: - -1. Extracting the field values of a model with no conversion, effectively `model.__dict__` but with the current filtering - logic provided by `.dict()` -2. Extracting the field values of a model recursively (effectively what `.dict()` does now) - sub-models are converted to - dicts, but other fields remain unchanged. -3. Extracting data and converting at the same time (e.g. to JSON compliant types) -4. Serializing data straight to JSON - -I think all 4 modes can be supported in a single implementation, with a kind of "3.5" mode where a python function -is used to convert the data as the user wishes. - -The current `include` and `exclude` logic is extremely complicated, but hopefully it won't be too hard to -translate it to Rust. - -We should also add support for `validate_alias` and `dump_alias` as well as the standard `alias` -to allow for customising field keys. - -### Validation Context :thumbsup: - -Pydantic V2 will add a new optional `context` argument to `model_validate` and `model_validate_json` -which will allow you to pass information not available when creating a model to validators. -See [pydantic#1549](https://github.com/pydantic/pydantic/issues/1549) for motivation. - -Here's an example of `context` might be used: - -```py title="Context during Validation" test="skip" lint="skip" upgrade="skip" -from pydantic import BaseModel, EmailStr, validator - - -class User(BaseModel): - email: EmailStr - home_country: str - - @validator('home_country') - def check_home_country(cls, v, context): - if v not in context['countries']: - raise ValueError('invalid country choice') - return v - - -async def add_user(post_data: bytes): - countries = set(await db_connection.fetch_all('select code from country')) - user = User.model_validate_json(post_data, context={'countries': countries}) - ... -``` - -!!! note - We (actually mostly Sebastián :wink:) will have to make some changes to FastAPI to fully leverage `context` - as we'd need some kind of dependency injection to build context before validation so models can still be passed as - arguments to views. I'm sure he'll be game. - -!!! warning - Although this will make it slightly easier to run synchronous IO (HTTP requests, DB. queries, etc.) - from within validators, I strongly advise you keep IO separate from validation - do it before and use context, - do it afterwards, avoid where possible making queries inside validation. - -### Model Namespace Cleanup :thumbsup: - -For years I've wanted to clean up the model namespace, -see [pydantic#1001](https://github.com/pydantic/pydantic/issues/1001). This would avoid confusing gotchas when field -names clash with methods on a model, it would also make it safer to add more methods to a model without risking -new clashes. - -After much deliberation (and even giving a lightning talk at the python language submit about alternatives, see -[this discussion](https://discuss.python.org/t/better-fields-access-and-allowing-a-new-character-at-the-start-of-identifiers/14529)). -I've decided to go with the simplest and clearest approach, at the expense of a bit more typing: - -All methods on models will start with `model_`, fields' names will not be allowed to start with `"model"` -(aliases can be used if required). - -This will mean `BaseModel` will have roughly the following signature. - -```{.py .annotate title="New BaseModel methods" test="skip" lint="skip" upgrade="skip"} -class BaseModel: - model_fields: List[FieldInfo] - """previously `__fields__`, although the format will change a lot""" - @classmethod - def model_validate(cls, data: Any, *, context=None) -> Self: # (1) - """ - previously `parse_obj()`, validate data - """ - @classmethod - def model_validate_json( - cls, - data: str | bytes | bytearray, - *, - context=None - ) -> Self: - """ - previously `parse_raw(..., content_type='application/json')` - validate data from JSON - """ - @classmethod - def model_is_instance(cls, data: Any, *, context=None) -> bool: # (2) - """ - new, check if data is value for the model - """ - @classmethod - def model_is_instance_json( - cls, - data: str | bytes | bytearray, - *, - context=None - ) -> bool: - """ - Same as `model_is_instance`, but from JSON - """ - def model_dump( - self, - include: ... = None, - exclude: ... = None, - by_alias: bool = False, - exclude_unset: bool = False, - exclude_defaults: bool = False, - exclude_none: bool = False, - mode: Literal['unchanged', 'dicts', 'json-compliant'] = 'unchanged', - converter: Callable[[Any], Any] | None = None - ) -> Any: - """ - previously `dict()`, as before - with new `mode` argument - """ - def model_dump_json(self, ...) -> str: - """ - previously `json()`, arguments as above - effectively equivalent to `json.dump(self.model_dump(..., mode='json'))`, - but more performant - """ - def model_json_schema(self, ...) -> dict[str, Any]: - """ - previously `schema()`, arguments roughly as before - JSON schema as a dict - """ - def model_update_forward_refs(self) -> None: - """ - previously `update_forward_refs()`, update forward references - """ - @classmethod - def model_construct( - self, - _fields_set: set[str] | None = None, - **values: Any - ) -> Self: - """ - previously `construct()`, arguments roughly as before - construct a model with no validation - """ - @classmethod - def model_customize_schema(cls, schema: dict[str, Any]) -> dict[str, Any]: - """ - new, way to customize validation, - e.g. if you wanted to alter how the model validates certain types, - or add validation for a specific type without custom types or - decorated validators - """ - class ModelConfig: - """ - previously `Config`, configuration class for models - """ -``` - -1. see [Validation Context](#validation-context) for more information on `context` -2. see [`is_instance` checks](#is_instance-like-checks) - -The following methods will be removed: - -* `.parse_file()` - was a mistake, should never have been in pydantic -* `.parse_raw()` - partially replaced by `.model_validate_json()`, the other functionality was a mistake -* `.from_orm()` - the functionality has been moved to config, see [other improvements](#other-improvements) below -* `.schema_json()` - mostly since it causes confusion between pydantic validation schema and JSON schema, - and can be replaced with just `json.dumps(m.model_json_schema())` -* `.copy()` instead we'll implement `__copy__` and let people use the `copy` module - (this removes some functionality) from `copy()` but there are bugs and ambiguities with the functionality anyway - -### Strict API & API documentation :thumbsup: - -When preparing for pydantic V2, we'll make a strict distinction between the public API and private functions & classes. -Private objects will be clearly identified as private via a `_internal` sub package to discourage use. - -The public API will have API documentation. I've recently been working with the wonderful -[mkdocstrings](https://github.com/mkdocstrings/mkdocstrings) package for both -[dirty-equals](https://dirty-equals.helpmanual.io/) and -[watchfiles](https://watchfiles.helpmanual.io/) documentation. I intend to use `mkdocstrings` to generate complete -API documentation for V2. - -This wouldn't replace the current example-based somewhat informal documentation style but instead will augment it. - -### Error descriptions :thumbsup: - -The way line errors (the individual errors within a `ValidationError`) are built has become much more sophisticated -in pydantic-core. - -There's a well-defined -[set of error codes and messages](https://github.com/pydantic/pydantic-core/blob/main/src/errors/kinds.rs). - -More will be added when other types are validated via pure python validators in pydantic. - -I would like to add a dedicated section to the documentation with extra information for each type of error. - -This would be another key in a line error: `documentation`, which would link to the appropriate section in the -docs. - -Thus, errors might look like: - -```py title="Line Errors Example" test="skip" lint="skip" upgrade="skip" -[ - { - 'kind': 'greater_than_equal', - 'loc': ['age'], - 'message': 'Value must be greater than or equal to 18', - 'input_value': 11, - 'context': {'ge': 18}, - 'documentation': 'https://pydantic.dev/errors/#greater_than_equal', - }, - { - 'kind': 'bool_parsing', - 'loc': ['is_developer'], - 'message': 'Value must be a valid boolean, unable to interpret input', - 'input_value': 'foobar', - 'documentation': 'https://pydantic.dev/errors/#bool_parsing', - }, -] -``` - -I own the `pydantic.dev` domain and will use it for at least these errors so that even if the docs URL -changes, the error will still link to the correct documentation. If developers don't want to show these errors to users, -they can always process the errors list and filter out items from each error they don't need or want. - -### No pure python implementation :frowning: - -Since pydantic-core is written in Rust, and I have absolutely no intention of rewriting it in python, -pydantic V2 will only work where a binary package can be installed. - -pydantic-core will provide binaries in PyPI for (at least): - -* **Linux**: `x86_64`, `aarch64`, `i686`, `armv7l`, `musl-x86_64` & `musl-aarch64` -* **MacOS**: `x86_64` & `arm64` (except python 3.7) -* **Windows**: `amd64` & `win32` -* **Web Assembly**: `wasm32` - (pydantic-core is [already](https://github.com/pydantic/pydantic-core/runs/7214195252?check_suite_focus=true) - compiled for wasm32 using emscripten and unit tests pass, except where cpython itself has - [problems](https://github.com/pyodide/pyodide/issues/2841)) - -Binaries for pypy are a work in progress and will be added if possible, -see [pydantic-core#154](https://github.com/pydantic/pydantic-core/issues/154). - -Other binaries can be added provided they can be (cross-)compiled on github actions. -If no binary is available from PyPI, pydantic-core can be compiled from source if Rust stable is available. - -The only place where I know this will cause problems is Raspberry Pi, which is a -[mess](https://github.com/piwheels/packages/issues/254) when it comes to packages written in Rust for Python. -Effectively, until that's fixed you'll likely have to install pydantic with -`pip install -i https://pypi.org/simple/ pydantic`. - -### Pydantic becomes a pure python package :thumbsup: - -Pydantic V1.X is a pure python code base but is compiled with cython to provide some performance improvements. -Since the "hot" code is moved to pydantic-core, pydantic itself can go back to being a pure python package. - -This should significantly reduce the size of the pydantic package and make unit tests of pydantic much faster. -In addition: - -* some constraints on pydantic code can be removed once it no-longer has to be compilable with cython -* debugging will be easier as you'll be able to drop straight into the pydantic codebase as you can with other, - pure python packages - -Some pieces of edge logic could get a little slower as they're no longer compiled. - -### `is_instance` like checks :thumbsup: - -Strict mode also means it makes sense to provide an `is_instance` method on models which effectively run -validation then throws away the result while avoiding the (admittedly small) overhead of creating and raising -an error or returning the validation result. - -To be clear, this isn't a real `isinstance` call, rather it is equivalent to - -```py title="is_instance" test="skip" lint="skip" upgrade="skip" -class BaseModel: - ... - - @classmethod - def model_is_instance(cls, data: Any) -> bool: - try: - cls(**data) - except ValidationError: - return False - else: - return True -``` - -### I'm dropping the word "parse" and just using "validate" :neutral_face: - -Partly due to the issues with the lack of strict mode, -I've gone back and forth between using the terms "parse" and "validate" for what pydantic does. - -While pydantic is not simply a validation library (and I'm sure some would argue validation is not strictly what it does), -most people use the word **"validation"**. - -It's time to stop fighting that, and use consistent names. - -The word "parse" will no longer be used except when talking about JSON parsing, see -[model methods](#model-namespace-cleanup) above. - -### Changes to custom field types :neutral_face: - -Since the core structure of validators has changed from "a list of validators to call one after another" to -"a tree of validators which call each other", the -[`__get_validators__`](https://docs.pydantic.dev/usage/types/#classes-with-__get_validators__) -way of defining custom field types no longer makes sense. - -Instead, we'll look for the attribute `__pydantic_validation_schema__` which must be a -pydantic-core compliant schema for validating data to this field type (the `function` -item can be a string, if so a function of that name will be taken from the class, see `'validate'` below). - -Here's an example of how a custom field type could be defined: - -```py title="New custom field types" test="skip" lint="skip" upgrade="skip" -from pydantic import ValidationSchema - - -class Foobar: - def __init__(self, value: str): - self.value = value - - __pydantic_validation_schema__: ValidationSchema = { - 'type': 'function', - 'mode': 'after', - 'function': 'validate', - 'schema': {'type': 'str'}, - } - - @classmethod - def validate(cls, value): - if 'foobar' in value: - return Foobar(value) - else: - raise ValueError('expected foobar') -``` - -What's going on here: `__pydantic_validation_schema__` defines a schema which effectively says: - -> Validate input data as a string, then call the `validate` function with that string, use the returned value -> as the final result of validation. - -`ValidationSchema` is just an alias to -[`pydantic_core.Schema`](https://github.com/pydantic/pydantic-core/blob/main/pydantic_core/_types.py#L291) -which is a type defining the schema for validation schemas. - -!!! note - pydantic-core schema has full type definitions although since the type is recursive, - mypy can't provide static type analysis, pyright however can. - -We can probably provide one or more helper functions to make `__pydantic_validation_schema__` easier to generate. - -## Other Improvements :thumbsup: - -Some other things which will also change, IMHO for the better: - -1. Recursive models with cyclic references - although recursive models were supported by pydantic V1, - data with cyclic references caused recursion errors, in pydantic-core cyclic references are correctly detected - and a validation error is raised -2. The reason I've been so keen to get pydantic-core to compile and run with wasm is that I want all examples - in the docs of pydantic V2 to be editable and runnable in the browser -3. Full support for `TypedDict`, including `total=False` - e.g. omitted keys, - providing validation schema to a `TypedDict` field/item will use `Annotated`, e.g. `Annotated[str, Field(strict=True)]` -4. `from_orm` has become `from_attributes` and is now defined at schema generation time - (either via model config or field config) -5. `input_value` has been added to each line error in a `ValidationError`, making errors easier to understand, - and more comprehensive details of errors to be provided to end users, - [pydantic#784](https://github.com/pydantic/pydantic/issues/784) -6. `on_error` logic in a schema which allows either a default value to be used in the event of an error, - or that value to be omitted (in the case of a `total=False` `TypedDict`), - [pydantic-core#151](https://github.com/pydantic/pydantic-core/issues/151) -7. `datetime`, `date`, `time` & `timedelta` validation is improved, see the - [speedate] Rust library I built specifically for this purpose for more details -8. Powerful "priority" system for optionally merging or overriding config in sub-models for nested schemas -9. Pydantic will support [annotated-types](https://github.com/annotated-types/annotated-types), - so you can do stuff like `Annotated[set[int], Len(0, 10)]` or `Name = Annotated[str, Len(1, 1024)]` -10. A single decorator for general usage - we should add a `validate` decorator which can be used: - * on functions (replacing `validate_arguments`) - * on dataclasses, `pydantic.dataclasses.dataclass` will become an alias of this - * on `TypedDict`s - * On any supported type, e.g. `Union[...]`, `Dict[str, Thing]` - * On Custom field types - e.g. anything with a `__pydantic_schema__` attribute -11. Easier validation error creation, I've often found myself wanting to raise `ValidationError`s outside - models, particularly in FastAPI - ([here](https://github.com/samuelcolvin/foxglove/blob/a4aaacf372178f345e5ff1d569ee8fd9d10746a4/foxglove/exceptions.py#L137-L149) - is one method I've used), we should provide utilities to generate these errors -12. Improve the performance of `__eq__` on models -13. Computed fields, these having been an idea for a long time in pydantic - we should get them right -14. Model validation that avoids instances of subclasses leaking data (particularly important for FastAPI), - see [pydantic-core#155](https://github.com/pydantic/pydantic-core/issues/155) -15. We'll now follow [semvar](https://semver.org/) properly and avoid breaking changes between minor versions, - as a result, major versions will become more common -16. Improve generics to use `M(Basemodel, Generic[T])` instead of `M(GenericModel, Generic[T])` - e.g. `GenericModel` - can be removed; this results from no-longer needing to compile pydantic code with cython - -## Removed Features & Limitations :frowning: - -The emoji here is just for variation, I'm not frowning about any of this, these changes are either good IMHO -(will make pydantic cleaner, easier to learn and easier to maintain) or irrelevant to 99.9+% of users. - -1. `__root__` custom root models are no longer necessary since validation on any supported data type is allowed - without a model -2. `.parse_file()` and `.parse_raw()`, partially replaced with `.model_validate_json()`, - see [model methods](#model-namespace-cleanup) -3. `.schema_json()` & `.copy()`, see [model methods](#model-namespace-cleanup) -4. `TypeError` are no longer considered as validation errors, but rather as internal errors, this is to better - catch errors in argument names in function validators. -5. Subclasses of builtin types like `str`, `bytes` and `int` are coerced to their parent builtin type, - this is a limitation of how pydantic-core converts these types to Rust types during validation, if you have a - specific need to keep the type, you can use wrap validators or custom type validation as described above -6. integers are represented in rust code as `i64`, meaning if you want to use ints where `abs(v) > 2^63 − 1` - (9,223,372,036,854,775,807), you'll need to use a [wrap validator](#validator-function-improvements) and your own logic -7. [Settings Management](https://docs.pydantic.dev/usage/settings/) ??? - I definitely don't want to - remove the functionality, but it's something of a historical curiosity that it lives within pydantic, - perhaps it should move to a separate package, perhaps installable alongside pydantic with - `pip install pydantic[settings]`? -8. The following `Config` properties will be removed or deprecated: - * `fields` - it's very old (it pre-dates `Field`), can be removed - * `allow_mutation` will be removed, instead `frozen` will be used - * `error_msg_templates`, it's not properly documented anyway, error messages can be customized with external logic if required - * `getter_dict` - pydantic-core has hardcoded `from_attributes` logic - * `json_loads` - again this is hard coded in pydantic-core - * `json_dumps` - possibly - * `json_encoders` - see the export "mode" discussion [above](#improvements-to-dumpingserializationexport) - * `underscore_attrs_are_private` we should just choose a sensible default - * `smart_union` - all unions are now "smart" -9. `dict(model)` functionality should be removed, there's a much clearer distinction now that in 2017 when I - implemented this between a model and a dict - -## Features Remaining :neutral_face: - -The following features will remain (mostly) unchanged: - -* JSONSchema, internally this will need to change a lot, but hopefully the external interface will remain unchanged -* `dataclass` support, again internals might change, but not the external interface -* `validate_arguments`, might be renamed, but otherwise remain -* hypothesis plugin, might be able to improve this as part of the general cleanup - -## Questions :question: - -I hope the explanation above is useful. I'm sure people will have questions and feedback; I'm aware -I've skipped over some features with limited detail (this post is already fairly long :sleeping:). - -To allow feedback without being overwhelmed, I've created a "Pydantic V2" category for -[discussions on github](https://github.com/pydantic/pydantic/discussions/categories/pydantic-v2) - please -feel free to create a discussion if you have any questions or suggestions. -We will endeavour to read and respond to everyone. - ---- - -## Implementation Details :nerd: - -(This is yet to be built, so these are nascent ideas which might change) - -At the center of pydantic v2 will be a `PydanticValidator` class which looks roughly like this -(note: this is just pseudo-code, it's not even valid python and is only supposed to be used to demonstrate the idea): - -```py title="PydanticValidator" test="skip" lint="skip" upgrade="skip" -# type identifying data which has been validated, -# as per pydantic-core, this can include "fields_set" data -ValidData = ... - -# any type we can perform validation for -AnyOutputType = ... - -class PydanticValidator: - def __init__(self, output_type: AnyOutputType, config: Config): - ... - def validate(self, input_data: Any) -> ValidData: - ... - def validate_json(self, input_data: str | bytes | bytearray) -> ValidData: - ... - def is_instance(self, input_data: Any) -> bool: - ... - def is_instance_json(self, input_data: str | bytes | bytearray) -> bool: - ... - def json_schema(self) -> dict: - ... - def dump( - self, - data: ValidData, - include: ... = None, - exclude: ... = None, - by_alias: bool = False, - exclude_unset: bool = False, - exclude_defaults: bool = False, - exclude_none: bool = False, - mode: Literal['unchanged', 'dicts', 'json-compliant'] = 'unchanged', - converter: Callable[[Any], Any] | None = None - ) -> Any: - ... - def dump_json(self, ...) -> str: - ... -``` - -This could be used directly, but more commonly will be used by the following: - -* `BaseModel` -* the `validate` decorator described above -* `pydantic.dataclasses.dataclass` (which might be an alias of `validate`) -* generics - -The aim will be to get pydantic V2 to a place were the vast majority of tests continue to pass unchanged. - -Thereby guaranteeing (as much as possible) that the external interface to pydantic and its behaviour are unchanged. - -## Conversion Table :material-table: - -The table below provisionally defines what input value types are allowed to which field types. - -**An updated and complete version of this table is available in [V2 conversion table](../concepts/conversion_table.md)**. - -!!!note - Some type conversion shown here is a significant departure from existing behavior, we may have to provide a config - flag for backwards compatibility for a few of them, however pydantic V2 cannot be entirely backward compatible, - see [pydantic-core#152](https://github.com/pydantic/pydantic-core/issues/152). - -| Field Type | Input | Mode | Input Source | Conditions | -|---------------|-------------|--------|--------------|-----------------------------------------------------------------------------| -| `str` | `str` | both | python, JSON | - | -| `str` | `bytes` | lax | python | assumes UTF-8, error on unicode decoding error | -| `str` | `bytearray` | lax | python | assumes UTF-8, error on unicode decoding error | -| `bytes` | `bytes` | both | python | - | -| `bytes` | `str` | both | JSON | - | -| `bytes` | `str` | lax | python | - | -| `bytes` | `bytearray` | lax | python | - | -| `int` | `int` | strict | python, JSON | max abs value 2^64 - `i64` is used internally, `bool` explicitly forbidden | -| `int` | `int` | lax | python, JSON | `i64` | -| `int` | `float` | lax | python, JSON | `i64`, must be exact int, e.g. `f % 1 == 0`, `nan`, `inf` raise errors | -| `int` | `Decimal` | lax | python, JSON | `i64`, must be exact int, e.g. `f % 1 == 0` | -| `int` | `bool` | lax | python, JSON | - | -| `int` | `str` | lax | python, JSON | `i64`, must be numeric only, e.g. `[0-9]+` | -| `float` | `float` | strict | python, JSON | `bool` explicitly forbidden | -| `float` | `float` | lax | python, JSON | - | -| `float` | `int` | lax | python, JSON | - | -| `float` | `str` | lax | python, JSON | must match `[0-9]+(\.[0-9]+)?` | -| `float` | `Decimal` | lax | python | - | -| `float` | `bool` | lax | python, JSON | - | -| `bool` | `bool` | both | python, JSON | - | -| `bool` | `int` | lax | python, JSON | allowed: `0, 1` | -| `bool` | `float` | lax | python, JSON | allowed: `0, 1` | -| `bool` | `Decimal` | lax | python, JSON | allowed: `0, 1` | -| `bool` | `str` | lax | python, JSON | allowed: `'f', 'n', 'no', 'off', 'false', 't', 'y', 'on', 'yes', 'true'` | -| `None` | `None` | both | python, JSON | - | -| `date` | `date` | both | python | - | -| `date` | `datetime` | lax | python | must be exact date, eg. no H, M, S, f | -| `date` | `str` | both | JSON | format `YYYY-MM-DD` | -| `date` | `str` | lax | python | format `YYYY-MM-DD` | -| `date` | `bytes` | lax | python | format `YYYY-MM-DD` (UTF-8) | -| `date` | `int` | lax | python, JSON | interpreted as seconds or ms from epoch, see [speedate], must be exact date | -| `date` | `float` | lax | python, JSON | interpreted as seconds or ms from epoch, see [speedate], must be exact date | -| `datetime` | `datetime` | both | python | - | -| `datetime` | `date` | lax | python | - | -| `datetime` | `str` | both | JSON | format `YYYY-MM-DDTHH:MM:SS.f` etc. see [speedate] | -| `datetime` | `str` | lax | python | format `YYYY-MM-DDTHH:MM:SS.f` etc. see [speedate] | -| `datetime` | `bytes` | lax | python | format `YYYY-MM-DDTHH:MM:SS.f` etc. see [speedate], (UTF-8) | -| `datetime` | `int` | lax | python, JSON | interpreted as seconds or ms from epoch, see [speedate] | -| `datetime` | `float` | lax | python, JSON | interpreted as seconds or ms from epoch, see [speedate] | -| `time` | `time` | both | python | - | -| `time` | `str` | both | JSON | format `HH:MM:SS.FFFFFF` etc. see [speedate] | -| `time` | `str` | lax | python | format `HH:MM:SS.FFFFFF` etc. see [speedate] | -| `time` | `bytes` | lax | python | format `HH:MM:SS.FFFFFF` etc. see [speedate], (UTF-8) | -| `time` | `int` | lax | python, JSON | interpreted as seconds, range 0 - 86399 | -| `time` | `float` | lax | python, JSON | interpreted as seconds, range 0 - 86399.9* | -| `time` | `Decimal` | lax | python, JSON | interpreted as seconds, range 0 - 86399.9* | -| `timedelta` | `timedelta` | both | python | - | -| `timedelta` | `str` | both | JSON | format ISO8601 etc. see [speedate] | -| `timedelta` | `str` | lax | python | format ISO8601 etc. see [speedate] | -| `timedelta` | `bytes` | lax | python | format ISO8601 etc. see [speedate], (UTF-8) | -| `timedelta` | `int` | lax | python, JSON | interpreted as seconds | -| `timedelta` | `float` | lax | python, JSON | interpreted as seconds | -| `timedelta` | `Decimal` | lax | python, JSON | interpreted as seconds | -| `dict` | `dict` | both | python | - | -| `dict` | `Object` | both | JSON | - | -| `dict` | `mapping` | lax | python | must implement the mapping interface and have an `items()` method | -| `TypedDict` | `dict` | both | python | - | -| `TypedDict` | `Object` | both | JSON | - | -| `TypedDict` | `Any` | both | python | builtins not allowed, uses `getattr`, requires `from_attributes=True` | -| `TypedDict` | `mapping` | lax | python | must implement the mapping interface and have an `items()` method | -| `list` | `list` | both | python | - | -| `list` | `Array` | both | JSON | - | -| `list` | `tuple` | lax | python | - | -| `list` | `set` | lax | python | - | -| `list` | `frozenset` | lax | python | - | -| `list` | `dict_keys` | lax | python | - | -| `tuple` | `tuple` | both | python | - | -| `tuple` | `Array` | both | JSON | - | -| `tuple` | `list` | lax | python | - | -| `tuple` | `set` | lax | python | - | -| `tuple` | `frozenset` | lax | python | - | -| `tuple` | `dict_keys` | lax | python | - | -| `set` | `set` | both | python | - | -| `set` | `Array` | both | JSON | - | -| `set` | `list` | lax | python | - | -| `set` | `tuple` | lax | python | - | -| `set` | `frozenset` | lax | python | - | -| `set` | `dict_keys` | lax | python | - | -| `frozenset` | `frozenset` | both | python | - | -| `frozenset` | `Array` | both | JSON | - | -| `frozenset` | `list` | lax | python | - | -| `frozenset` | `tuple` | lax | python | - | -| `frozenset` | `set` | lax | python | - | -| `frozenset` | `dict_keys` | lax | python | - | -| `is_instance` | `Any` | both | python | `isinstance()` check returns `True` | -| `is_instance` | - | both | JSON | never valid | -| `callable` | `Any` | both | python | `callable()` check returns `True` | -| `callable` | - | both | JSON | never valid | - -The `ModelClass` validator (use to create instances of a class) uses the `TypedDict` validator, then creates an instance -with `__dict__` and `__fields_set__` set, so same rules apply as `TypedDict`. - -[speedate]: https://docs.rs/speedate/latest/speedate/ diff --git a/docs/concepts/dataclasses.md b/docs/concepts/dataclasses.md index 712407a479..f59d07525a 100644 --- a/docs/concepts/dataclasses.md +++ b/docs/concepts/dataclasses.md @@ -250,7 +250,7 @@ import dataclasses from datetime import datetime from typing import Optional -from pydantic import BaseModel, ValidationError +from pydantic import BaseModel, ConfigDict, ValidationError @dataclasses.dataclass(frozen=True) @@ -265,6 +265,9 @@ class File: class Foo(BaseModel): + # Required so that pydantic revalidates the model attributes + model_config = ConfigDict(revalidate_instances='always') + file: File user: Optional[User] = None diff --git a/docs/contributing.md b/docs/contributing.md index a3ec77ae9d..e5219f7e96 100644 --- a/docs/contributing.md +++ b/docs/contributing.md @@ -6,6 +6,10 @@ vulnerability, please see our [security policy](https://github.com/pydantic/pyda To make it as simple as possible for us to help you, please include the output of the following call in your issue: +```bash +python -c "import pydantic.version; print(pydantic.version.version_info())" +``` +If you're using Pydantic prior to **v2.0** please use: ```bash python -c "import pydantic.utils; print(pydantic.utils.version_info())" ``` @@ -18,7 +22,7 @@ to your question or feature request. ## Pull Requests It should be extremely simple to get started and create a Pull Request. -Pydantic is released regularly so you should see your improvements release in a matter of days or weeks. +Pydantic is released regularly so you should see your improvements release in a matter of days or weeks 🚀. Unless your change is trivial (typo, docs tweak etc.), please create an issue to discuss the change before creating a pull request. diff --git a/docs/plugins/people.yml b/docs/plugins/people.yml index 6bedfab3aa..a1f9e3e58d 100644 --- a/docs/plugins/people.yml +++ b/docs/plugins/people.yml @@ -1,22 +1,32 @@ maintainers: -- login: Kludex - answers: 21 - prs: 111 - avatarUrl: https://avatars.githubusercontent.com/u/7353520?u=62adc405ef418f4b6c8caa93d3eb8ab107bc4927&v=4 - url: https://github.com/Kludex -- login: davidhewitt - answers: 1 - prs: 34 - avatarUrl: https://avatars.githubusercontent.com/u/1939362?u=b4b48981c3a097daaad16c4c5417aa7a3e5e32d9&v=4 - url: https://github.com/davidhewitt - login: samuelcolvin - answers: 278 + answers: 279 prs: 396 avatarUrl: https://avatars.githubusercontent.com/u/4039449?u=42eb3b833047c8c4b4f647a031eaef148c16d93f&v=4 url: https://github.com/samuelcolvin +- login: dmontagu + answers: 55 + prs: 294 + avatarUrl: https://avatars.githubusercontent.com/u/35119617?u=540f30c937a6450812628b9592a1dfe91bbe148e&v=4 + url: https://github.com/dmontagu +- login: hramezani + answers: 16 + prs: 189 + avatarUrl: https://avatars.githubusercontent.com/u/3122442?u=f387fc2dbc0c681f23e80e2ad705790fafcec9a2&v=4 + url: https://github.com/hramezani +- login: davidhewitt + answers: 1 + prs: 35 + avatarUrl: https://avatars.githubusercontent.com/u/1939362?u=b4b48981c3a097daaad16c4c5417aa7a3e5e32d9&v=4 + url: https://github.com/davidhewitt +- login: Kludex + answers: 22 + prs: 111 + avatarUrl: https://avatars.githubusercontent.com/u/7353520?u=62adc405ef418f4b6c8caa93d3eb8ab107bc4927&v=4 + url: https://github.com/Kludex - login: sydney-runkle answers: 27 - prs: 74 + prs: 76 avatarUrl: https://avatars.githubusercontent.com/u/54324534?u=3a4ffd00a8270b607922250d3a2d9c9af38b9cf9&v=4 url: https://github.com/sydney-runkle - login: adriangb @@ -24,32 +34,17 @@ maintainers: prs: 194 avatarUrl: https://avatars.githubusercontent.com/u/1755071?u=612704256e38d6ac9cbed24f10e4b6ac2da74ecb&v=4 url: https://github.com/adriangb -- login: dmontagu - answers: 55 - prs: 290 - avatarUrl: https://avatars.githubusercontent.com/u/35119617?u=540f30c937a6450812628b9592a1dfe91bbe148e&v=4 - url: https://github.com/dmontagu -- login: hramezani - answers: 15 - prs: 182 - avatarUrl: https://avatars.githubusercontent.com/u/3122442?u=f387fc2dbc0c681f23e80e2ad705790fafcec9a2&v=4 - url: https://github.com/hramezani -- login: lig - answers: 3 - prs: 49 - avatarUrl: https://avatars.githubusercontent.com/u/38705?v=4 - url: https://github.com/lig experts: - login: PrettyWood count: 142 avatarUrl: https://avatars.githubusercontent.com/u/18406791?u=20a4953f7d7e9d49d054b81e1582b08e87b2125f&v=4 url: https://github.com/PrettyWood - login: uriyyo - count: 37 + count: 43 avatarUrl: https://avatars.githubusercontent.com/u/32038156?u=56767a3f7b5f9df2a3c9cfbafb57f9ca3376d634&v=4 url: https://github.com/uriyyo - login: Viicos - count: 36 + count: 38 avatarUrl: https://avatars.githubusercontent.com/u/65306057?u=fcd677dc1b9bef12aa103613e5ccb3f8ce305af9&v=4 url: https://github.com/Viicos - login: lesnik512 @@ -65,8 +60,12 @@ experts: avatarUrl: https://avatars.githubusercontent.com/u/4216559?u=360a36fb602cded27273cbfc0afc296eece90662&v=4 url: https://github.com/nymous last_month_active: +- login: uriyyo + count: 6 + avatarUrl: https://avatars.githubusercontent.com/u/32038156?u=56767a3f7b5f9df2a3c9cfbafb57f9ca3376d634&v=4 + url: https://github.com/uriyyo - login: Viicos - count: 5 + count: 4 avatarUrl: https://avatars.githubusercontent.com/u/65306057?u=fcd677dc1b9bef12aa103613e5ccb3f8ce305af9&v=4 url: https://github.com/Viicos top_contributors: @@ -82,6 +81,10 @@ top_contributors: count: 71 avatarUrl: https://avatars.githubusercontent.com/u/370316?u=eb206070cfe47f242d5fcea2e6c7514f4d0f27f5&v=4 url: https://github.com/tpdorsey +- login: lig + count: 49 + avatarUrl: https://avatars.githubusercontent.com/u/38705?v=4 + url: https://github.com/lig - login: pyup-bot count: 46 avatarUrl: https://avatars.githubusercontent.com/u/16239342?u=8454ae029661131445080f023e1efccc29166485&v=4 @@ -191,6 +194,10 @@ top_reviewers: count: 207 avatarUrl: https://avatars.githubusercontent.com/u/18406791?u=20a4953f7d7e9d49d054b81e1582b08e87b2125f&v=4 url: https://github.com/PrettyWood +- login: lig + count: 103 + avatarUrl: https://avatars.githubusercontent.com/u/38705?v=4 + url: https://github.com/lig - login: tpdorsey count: 77 avatarUrl: https://avatars.githubusercontent.com/u/370316?u=eb206070cfe47f242d5fcea2e6c7514f4d0f27f5&v=4 @@ -203,6 +210,10 @@ top_reviewers: count: 27 avatarUrl: https://avatars.githubusercontent.com/u/36469655?u=f67d8fa6d67d35d2f5ebd5b15e24efeb41036fd3&v=4 url: https://github.com/Bobronium +- login: Viicos + count: 18 + avatarUrl: https://avatars.githubusercontent.com/u/65306057?u=fcd677dc1b9bef12aa103613e5ccb3f8ce305af9&v=4 + url: https://github.com/Viicos - login: Gr1N count: 17 avatarUrl: https://avatars.githubusercontent.com/u/1087619?u=cd78c4f602bf9f9667277dd0af9302a7fe9dd75a&v=4 @@ -211,10 +222,6 @@ top_reviewers: count: 17 avatarUrl: https://avatars.githubusercontent.com/u/1148665?u=b69e6fe797302f025a2d125e377e27f8ea0b8058&v=4 url: https://github.com/StephenBrown2 -- login: Viicos - count: 17 - avatarUrl: https://avatars.githubusercontent.com/u/65306057?u=fcd677dc1b9bef12aa103613e5ccb3f8ce305af9&v=4 - url: https://github.com/Viicos - login: alexmojaki count: 14 avatarUrl: https://avatars.githubusercontent.com/u/3627481?u=9bb2e0cf1c5ef3d0609d2e639a135b7b4ca8b463&v=4 diff --git a/docs/plugins/using.toml b/docs/plugins/using.toml index c78132743f..5e02347663 100644 --- a/docs/plugins/using.toml +++ b/docs/plugins/using.toml @@ -92,9 +92,6 @@ stars = 7908 repo = "NVlabs/SPADE" stars = 7407 [[libs]] -repo = "NVlabs/SPADE" -stars = 7407 -[[libs]] repo = "brycedrennan/imaginAIry" stars = 7217 [[libs]] diff --git a/mkdocs.yml b/mkdocs.yml index 4f99680e3d..618157d7e1 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -127,10 +127,7 @@ nav: - datamodel-code-generator: integrations/datamodel_code_generator.md - devtools: integrations/devtools.md - Rich: integrations/rich.md -- Blog: - - blog/pydantic-v2-final.md - - blog/pydantic-v2-alpha.md - - blog/pydantic-v2.md +- Blog: https://blog.pydantic.dev/ - Pydantic People: pydantic_people.md markdown_extensions: @@ -246,4 +243,7 @@ plugins: 'usage/types/sequence_iterable.md': 'api/standard_library_types.md#other-iterables' 'usage/types/set_types.md': 'api/standard_library_types.md#sets' 'usage/types/uuids.md': 'api/standard_library_types.md#uuid' + 'blog/pydantic-v2-alpha.md': 'https://blog.pydantic.dev/blog/2023/04/03/pydantic-v2-pre-release/' + 'blog/pydantic-v2-final.md': 'https://blog.pydantic.dev/blog/2023/06/30/pydantic-v2-is-here/' + 'blog/pydantic-v2.md': 'https://blog.pydantic.dev/blog/2022/07/10/pydantic-v2-plan/' - external-markdown: diff --git a/pdm.lock b/pdm.lock index 87a856c22d..6fa8141aa9 100644 --- a/pdm.lock +++ b/pdm.lock @@ -1184,14 +1184,14 @@ files = [ [[package]] name = "pydantic-extra-types" -version = "2.1.0" -requires_python = ">=3.7" +version = "2.4.0" +requires_python = ">=3.8" git = "https://github.com/pydantic/pydantic-extra-types.git" ref = "main" -revision = "843b753e9e8cb74e83cac55598719b39a4d5ef1f" +revision = "634487fa8dfa9e880c786d24e6ad72d7e2a99a07" summary = "Extra Pydantic types." dependencies = [ - "pydantic>=2.0.3", + "pydantic>=2.5.2", ] [[package]] diff --git a/pydantic/_internal/_fields.py b/pydantic/_internal/_fields.py index 7a3410e32d..1ce4e058c6 100644 --- a/pydantic/_internal/_fields.py +++ b/pydantic/_internal/_fields.py @@ -264,6 +264,10 @@ def collect_dataclass_fields( dataclass_fields: dict[str, dataclasses.Field] = cls.__dataclass_fields__ cls_localns = dict(vars(cls)) # this matches get_cls_type_hints_lenient, but all tests pass with `= None` instead + source_module = sys.modules.get(cls.__module__) + if source_module is not None: + types_namespace = {**source_module.__dict__, **(types_namespace or {})} + for ann_name, dataclass_field in dataclass_fields.items(): ann_type = _typing_extra.eval_type_lenient(dataclass_field.type, types_namespace, cls_localns) if is_classvar(ann_type): diff --git a/pydantic/_internal/_generate_schema.py b/pydantic/_internal/_generate_schema.py index c3ba519c27..0d8c71b71d 100644 --- a/pydantic/_internal/_generate_schema.py +++ b/pydantic/_internal/_generate_schema.py @@ -991,16 +991,21 @@ def _common_field_schema( # C901 evaluated = _typing_extra.eval_type_lenient(field_info.annotation, types_namespace, None) if evaluated is not field_info.annotation and not has_instance_in_type(evaluated, PydanticRecursiveRef): - field_info.annotation = evaluated + new_field_info = FieldInfo.from_annotation(evaluated) + field_info.annotation = new_field_info.annotation # Handle any field info attributes that may have been obtained from now-resolved annotations - new_field_info = FieldInfo.from_annotation(evaluated) for k, v in new_field_info._attributes_set.items(): # If an attribute is already set, it means it was set by assigning to a call to Field (or just a # default value), and that should take the highest priority. So don't overwrite existing attributes. - if k not in field_info._attributes_set: + # We skip over "attributes" that are present in the metadata_lookup dict because these won't + # actually end up as attributes of the `FieldInfo` instance. + if k not in field_info._attributes_set and k not in field_info.metadata_lookup: setattr(field_info, k, v) + # Finally, ensure the field info also reflects all the `_attributes_set` that are actually metadata. + field_info.metadata = [*new_field_info.metadata, *field_info.metadata] + source_type, annotations = field_info.annotation, field_info.metadata def set_discriminator(schema: CoreSchema) -> CoreSchema: diff --git a/pydantic/_internal/_model_construction.py b/pydantic/_internal/_model_construction.py index e90636f6f3..49da37c449 100644 --- a/pydantic/_internal/_model_construction.py +++ b/pydantic/_internal/_model_construction.py @@ -188,6 +188,11 @@ def wrapped_model_post_init(self: BaseModel, __context: Any) -> None: types_namespace=types_namespace, create_model_module=_create_model_module, ) + + # If this is placed before the complete_model_class call above, + # the generic computed fields return type is set to PydanticUndefined + cls.model_computed_fields = {k: v.info for k, v in cls.__pydantic_decorators__.computed_fields.items()} + # using super(cls, cls) on the next line ensures we only call the parent class's __pydantic_init_subclass__ # I believe the `type: ignore` is only necessary because mypy doesn't realize that this code branch is # only hit for _proper_ subclasses of BaseModel diff --git a/pydantic/config.py b/pydantic/config.py index 8743cda480..6b22586bd2 100644 --- a/pydantic/config.py +++ b/pydantic/config.py @@ -125,7 +125,7 @@ class User(BaseModel): attributes are hashable. Defaults to `False`. Note: - On V1, this setting was called `allow_mutation`, and was `True` by default. + On V1, the inverse of this setting was called `allow_mutation`, and was `True` by default. """ populate_by_name: bool diff --git a/pydantic/functional_validators.py b/pydantic/functional_validators.py index f5654967e6..0914528469 100644 --- a/pydantic/functional_validators.py +++ b/pydantic/functional_validators.py @@ -216,20 +216,21 @@ def __get_pydantic_core_schema__(self, source_type: Any, handler: _GetCoreSchema if TYPE_CHECKING: class _OnlyValueValidatorClsMethod(Protocol): - def __call__(self, __cls: Any, __value: Any) -> Any: + def __call__(self, cls: Any, value: Any, /) -> Any: ... class _V2ValidatorClsMethod(Protocol): - def __call__(self, __cls: Any, __input_value: Any, __info: _core_schema.ValidationInfo) -> Any: + def __call__(self, cls: Any, value: Any, info: _core_schema.ValidationInfo, /) -> Any: ... class _V2WrapValidatorClsMethod(Protocol): def __call__( self, - __cls: Any, - __input_value: Any, - __validator: _core_schema.ValidatorFunctionWrapHandler, - __info: _core_schema.ValidationInfo, + cls: Any, + value: Any, + handler: _core_schema.ValidatorFunctionWrapHandler, + info: _core_schema.ValidationInfo, + /, ) -> Any: ... @@ -379,7 +380,10 @@ class ModelWrapValidatorHandler(_core_schema.ValidatorFunctionWrapHandler, Proto """@model_validator decorated function handler argument type. This is used when `mode='wrap'`.""" def __call__( # noqa: D102 - self, input_value: Any, outer_location: str | int | None = None + self, + value: Any, + outer_location: str | int | None = None, + /, ) -> _ModelTypeCo: # pragma: no cover ... @@ -395,8 +399,9 @@ def __call__( # noqa: D102 # this can be a dict, a model instance # or anything else that gets passed to validate_python # thus validators _must_ handle all cases - __value: Any, - __handler: ModelWrapValidatorHandler[_ModelType], + value: Any, + handler: ModelWrapValidatorHandler[_ModelType], + /, ) -> _ModelType: ... @@ -410,9 +415,10 @@ def __call__( # noqa: D102 # this can be a dict, a model instance # or anything else that gets passed to validate_python # thus validators _must_ handle all cases - __value: Any, - __handler: ModelWrapValidatorHandler[_ModelType], - __info: _core_schema.ValidationInfo, + value: Any, + handler: ModelWrapValidatorHandler[_ModelType], + info: _core_schema.ValidationInfo, + /, ) -> _ModelType: ... @@ -427,7 +433,8 @@ def __call__( # noqa: D102 # this can be a dict, a model instance # or anything else that gets passed to validate_python # thus validators _must_ handle all cases - __value: Any, + value: Any, + /, ) -> Any: ... @@ -443,7 +450,8 @@ def __call__( # noqa: D102 # this can be a dict, a model instance # or anything else that gets passed to validate_python # thus validators _must_ handle all cases - __value: Any, + value: Any, + /, ) -> Any: ... @@ -456,8 +464,9 @@ def __call__( # noqa: D102 # this can be a dict, a model instance # or anything else that gets passed to validate_python # thus validators _must_ handle all cases - __value: Any, - __info: _core_schema.ValidationInfo, + value: Any, + info: _core_schema.ValidationInfo, + /, ) -> Any: ... @@ -471,8 +480,9 @@ def __call__( # noqa: D102 # this can be a dict, a model instance # or anything else that gets passed to validate_python # thus validators _must_ handle all cases - __value: Any, - __info: _core_schema.ValidationInfo, + value: Any, + info: _core_schema.ValidationInfo, + /, ) -> Any: ... diff --git a/pydantic/main.py b/pydantic/main.py index 59afce4e7c..59c62ad9e3 100644 --- a/pydantic/main.py +++ b/pydantic/main.py @@ -122,16 +122,22 @@ class BaseModel(metaclass=_model_construction.ModelMetaclass): __pydantic_serializer__: ClassVar[SchemaSerializer] __pydantic_validator__: ClassVar[SchemaValidator] + model_computed_fields: ClassVar[dict[str, ComputedFieldInfo]] + """A dictionary of computed field names and their corresponding `ComputedFieldInfo` objects.""" + # Instance attributes # Note: we use the non-existent kwarg `init=False` in pydantic.fields.Field below so that @dataclass_transform # doesn't think these are valid as keyword arguments to the class initializer. __pydantic_extra__: dict[str, Any] | None = _Field(init=False) # type: ignore __pydantic_fields_set__: set[str] = _Field(init=False) # type: ignore __pydantic_private__: dict[str, Any] | None = _Field(init=False) # type: ignore + else: # `model_fields` and `__pydantic_decorators__` must be set for # pydantic._internal._generate_schema.GenerateSchema.model_schema to work for a plain BaseModel annotation model_fields = {} + model_computed_fields = {} + __pydantic_decorators__ = _decorators.DecoratorInfos() __pydantic_parent_namespace__ = None # Prevent `BaseModel` from being instantiated directly: @@ -167,15 +173,6 @@ def __init__(self, /, **data: Any) -> None: # type: ignore # The following line sets a flag that we use to determine when `__init__` gets overridden by the user __init__.__pydantic_base_init__ = True - @property - def model_computed_fields(self) -> dict[str, ComputedFieldInfo]: - """Get the computed fields of this model instance. - - Returns: - A dictionary of computed field names and their corresponding `ComputedFieldInfo` objects. - """ - return {k: v.info for k, v in self.__pydantic_decorators__.computed_fields.items()} - @property def model_extra(self) -> dict[str, Any] | None: """Get extra fields set during validation. @@ -212,17 +209,19 @@ def model_construct(cls: type[Model], _fields_set: set[str] | None = None, **val """ m = cls.__new__(cls) fields_values: dict[str, Any] = {} - defaults: dict[str, Any] = {} # keeping this separate from `fields_values` helps us compute `_fields_set` + fields_set = set() + for name, field in cls.model_fields.items(): if field.alias and field.alias in values: fields_values[name] = values.pop(field.alias) + fields_set.add(name) elif name in values: fields_values[name] = values.pop(name) + fields_set.add(name) elif not field.is_required(): - defaults[name] = field.get_default(call_default_factory=True) + fields_values[name] = field.get_default(call_default_factory=True) if _fields_set is None: - _fields_set = set(fields_values.keys()) - fields_values.update(defaults) + _fields_set = fields_set _extra: dict[str, Any] | None = None if cls.model_config.get('extra') == 'allow': diff --git a/pydantic/type_adapter.py b/pydantic/type_adapter.py index 05e2222e35..feda5d839c 100644 --- a/pydantic/type_adapter.py +++ b/pydantic/type_adapter.py @@ -204,7 +204,7 @@ def __init__( except AttributeError: if module is None: f = sys._getframe(1) - module = cast(str, f.f_globals['__name__']) + module = cast(str, f.f_globals.get('__name__', '')) validator = create_schema_validator( core_schema, type, module, str(type), 'TypeAdapter', core_config, config_wrapper.plugin_settings ) # type: ignore diff --git a/pydantic/types.py b/pydantic/types.py index 76af7956ab..5d6b3a167c 100644 --- a/pydantic/types.py +++ b/pydantic/types.py @@ -1724,6 +1724,19 @@ def validate_brand(card_number: str) -> PaymentCardBrand: 'tib': 2**40, 'pib': 2**50, 'eib': 2**60, + 'bit': 1 / 8, + 'kbit': 10**3 / 8, + 'mbit': 10**6 / 8, + 'gbit': 10**9 / 8, + 'tbit': 10**12 / 8, + 'pbit': 10**15 / 8, + 'ebit': 10**18 / 8, + 'kibit': 2**10 / 8, + 'mibit': 2**20 / 8, + 'gibit': 2**30 / 8, + 'tibit': 2**40 / 8, + 'pibit': 2**50 / 8, + 'eibit': 2**60 / 8, } BYTE_SIZES.update({k.lower()[0]: v for k, v in BYTE_SIZES.items() if 'i' not in k}) byte_string_re = re.compile(r'^\s*(\d*\.?\d+)\s*(\w+)?', re.IGNORECASE) @@ -1821,11 +1834,13 @@ def human_readable(self, decimal: bool = False) -> str: return f'{num:0.1f}{final_unit}' def to(self, unit: str) -> float: - """Converts a byte size to another unit. + """Converts a byte size to another unit, including both byte and bit units. Args: - unit: The unit to convert to. Must be one of the following: B, KB, MB, GB, TB, PB, EiB, - KiB, MiB, GiB, TiB, PiB, EiB. + unit: The unit to convert to. Must be one of the following: B, KB, MB, GB, TB, PB, EB, + KiB, MiB, GiB, TiB, PiB, EiB (byte units) and + bit, kbit, mbit, gbit, tbit, pbit, ebit, + kibit, mibit, gibit, tibit, pibit, eibit (bit units). Returns: The byte size in the new unit. diff --git a/pydantic/v1/types.py b/pydantic/v1/types.py index 5881e74599..235bb1809f 100644 --- a/pydantic/v1/types.py +++ b/pydantic/v1/types.py @@ -1082,6 +1082,19 @@ def _get_brand(card_number: str) -> PaymentCardBrand: 'tib': 2**40, 'pib': 2**50, 'eib': 2**60, + 'bit': 1/8, + 'kbit': 10**3/8, + 'mbit': 10**6/8, + 'gbit': 10**9/8, + 'tbit': 10**12/8, + 'pbit': 10**15/8, + 'ebit': 10**18/8, + 'kibit': 2**10/8, + 'mibit': 2**20/8, + 'gibit': 2**30/8, + 'tibit': 2**40/8, + 'pibit': 2**50/8, + 'eibit': 2**60/8, } BYTE_SIZES.update({k.lower()[0]: v for k, v in BYTE_SIZES.items() if 'i' not in k}) byte_string_re = re.compile(r'^\s*(\d*\.?\d+)\s*(\w+)?', re.IGNORECASE) diff --git a/tests/test_annotated.py b/tests/test_annotated.py index a9abb3dd8b..6264454458 100644 --- a/tests/test_annotated.py +++ b/tests/test_annotated.py @@ -424,3 +424,49 @@ class AnnotatedPrivateFieldModel(BaseModel): with pytest.raises(AttributeError): assert model.bar + + +def test_min_length_field_info_not_lost(): + class AnnotatedFieldModel(BaseModel): + foo: 'Annotated[String, Field(min_length=3)]' = Field(description='hello') + + String = str + + AnnotatedFieldModel.model_rebuild() + + assert AnnotatedFieldModel(foo='000').foo == '000' + + with pytest.raises(ValidationError) as exc_info: + AnnotatedFieldModel(foo='00') + + assert exc_info.value.errors(include_url=False) == [ + { + 'loc': ('foo',), + 'input': '00', + 'ctx': {'min_length': 3}, + 'msg': 'String should have at least 3 characters', + 'type': 'string_too_short', + } + ] + + # Ensure that the inner annotation does not override the outer, even for metadata: + class AnnotatedFieldModel2(BaseModel): + foo: 'Annotated[String, Field(min_length=3)]' = Field(description='hello', min_length=2) + + AnnotatedFieldModel2(foo='00') + + class AnnotatedFieldModel4(BaseModel): + foo: 'Annotated[String, Field(min_length=3)]' = Field(description='hello', min_length=4) + + with pytest.raises(ValidationError) as exc_info: + AnnotatedFieldModel4(foo='00') + + assert exc_info.value.errors(include_url=False) == [ + { + 'loc': ('foo',), + 'input': '00', + 'ctx': {'min_length': 4}, + 'msg': 'String should have at least 4 characters', + 'type': 'string_too_short', + } + ] diff --git a/tests/test_computed_fields.py b/tests/test_computed_fields.py index 9a4377a21c..0bc0d0a7e2 100644 --- a/tests/test_computed_fields.py +++ b/tests/test_computed_fields.py @@ -62,6 +62,13 @@ def double_width(self) -> int: assert rect.model_dump() == {'width': 10, 'length': 5, 'area': 50, 'area2': 50} assert rect.model_dump_json() == '{"width":10,"length":5,"area":50,"area2":50}' + assert set(Rectangle.model_fields) == {'width', 'length'} + assert set(Rectangle.model_computed_fields) == {'area', 'area2'} + + assert Rectangle.model_computed_fields['area'].description == 'An awesome area' + assert Rectangle.model_computed_fields['area2'].title == 'Pikarea' + assert Rectangle.model_computed_fields['area2'].description == 'Another area' + def test_computed_fields_json_schema(): class Rectangle(BaseModel): diff --git a/tests/test_construction.py b/tests/test_construction.py index 95cce6afed..a3d13b817c 100644 --- a/tests/test_construction.py +++ b/tests/test_construction.py @@ -509,3 +509,13 @@ class Model(BaseModel): m = Model.model_construct(x=1, y=2) assert m.__pydantic_extra__ == {'y': 2} + + +def test_retain_order_of_fields(): + class MyModel(BaseModel): + a: str = 'a' + b: str + + m = MyModel.model_construct(b='b') + + assert m.model_dump_json() == '{"a":"a","b":"b"}' diff --git a/tests/test_plugins.py b/tests/test_plugins.py index 355a8aa46a..f1a15119b5 100644 --- a/tests/test_plugins.py +++ b/tests/test_plugins.py @@ -360,6 +360,28 @@ def new_schema_validator(self, schema, schema_type, schema_type_path, schema_kin TypeAdapter(List[str], module='provided_module_by_type_adapter') +def test_plugin_path_type_adapter_without_name_in_globals() -> None: + class CustomOnValidatePython(ValidatePythonHandlerProtocol): + pass + + class Plugin: + def new_schema_validator(self, schema, schema_type, schema_type_path, schema_kind, config, plugin_settings): + assert str(schema_type) == 'typing.List[str]' + assert schema_type_path == SchemaTypePath('', 'typing.List[str]') + assert schema_kind == 'TypeAdapter' + return CustomOnValidatePython(), None, None + + plugin = Plugin() + with install_plugin(plugin): + code = """ +from typing import List + +import pydantic +pydantic.TypeAdapter(List[str]) +""" + exec(code, {'bar': 'baz'}) + + def test_plugin_path_validate_call() -> None: class CustomOnValidatePython(ValidatePythonHandlerProtocol): pass diff --git a/tests/test_types.py b/tests/test_types.py index 5302b9abb1..e82d4911de 100644 --- a/tests/test_types.py +++ b/tests/test_types.py @@ -4444,6 +4444,8 @@ class FrozenSetModel(BaseModel): ('1.5 M', int(1.5e6), '1.4MiB', '1.5MB'), ('5.1kib', 5222, '5.1KiB', '5.2KB'), ('6.2EiB', 7148113328562451456, '6.2EiB', '7.1EB'), + ('8bit', 1, '1B', '1B'), + ('1kbit', 125, '125B', '125B'), ), ) def test_bytesize_conversions(input_value, output, human_bin, human_dec): @@ -4467,6 +4469,8 @@ class Model(BaseModel): assert m.size.to('MiB') == pytest.approx(1024) assert m.size.to('MB') == pytest.approx(1073.741824) assert m.size.to('TiB') == pytest.approx(0.0009765625) + assert m.size.to('bit') == pytest.approx(8589934592) + assert m.size.to('kbit') == pytest.approx(8589934.592) def test_bytesize_raises(): @@ -4487,6 +4491,9 @@ class Model(BaseModel): with pytest.raises(PydanticCustomError, match='byte unit'): m.size.to('bad_unit') + with pytest.raises(PydanticCustomError, match='byte unit'): + m.size.to('1ZiB') + def test_deque_success(): class Model(BaseModel):