-
-
Notifications
You must be signed in to change notification settings - Fork 285
/
model_components.py
308 lines (262 loc) · 9.59 KB
/
model_components.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
"""DataFrameModel components"""
from typing import (
Any,
Callable,
Dict,
Iterable,
Optional,
Set,
Tuple,
Type,
TypeVar,
Union,
cast,
)
from pandera.api.base.model_components import (
BaseCheckInfo,
BaseFieldInfo,
CheckArg,
to_checklist,
)
from pandera.api.checks import Check
from pandera.api.pyspark.column_schema import ColumnSchema
from pandera.api.pyspark.components import Column
from pandera.api.pyspark.types import PySparkDtypeInputTypes
from pandera.errors import SchemaInitError
AnyCallable = Callable[..., Any]
SchemaComponent = TypeVar("SchemaComponent", bound=ColumnSchema)
CHECK_KEY = "__check_config__"
DATAFRAME_CHECK_KEY = "__dataframe_check_config__"
class FieldInfo(BaseFieldInfo):
"""Captures extra information about a field.
*new in 0.5.0*
"""
def _to_schema_component(
self,
dtype: PySparkDtypeInputTypes,
component: Type[SchemaComponent],
checks: CheckArg = None,
**kwargs: Any,
) -> SchemaComponent:
if self.dtype_kwargs:
dtype = dtype(**self.dtype_kwargs) # type: ignore
checks = self.checks + to_checklist(checks)
return component(dtype, checks=checks, **kwargs) # type: ignore
def to_column(
self,
dtype: PySparkDtypeInputTypes,
checks: CheckArg = None,
required: bool = True,
name: str = None,
) -> Column:
"""Create a schema_components.Column from a field."""
return self._to_schema_component(
dtype,
Column,
nullable=self.nullable,
coerce=self.coerce,
regex=self.regex,
required=required,
name=name,
checks=checks,
title=self.title,
description=self.description,
metadata=self.metadata,
)
@property
def properties(self) -> Dict[str, Any]:
"""Get column properties."""
return {
"dtype": self.dtype_kwargs,
"checks": self.checks,
"nullable": self.nullable,
"coerce": self.coerce,
"name": self.name,
"regex": self.regex,
"title": self.title,
"description": self.description,
"metadata": self.metadata,
}
def Field(
*,
eq: Any = None,
ne: Any = None,
gt: Any = None,
ge: Any = None,
lt: Any = None,
le: Any = None,
in_range: Dict[str, Any] = None,
isin: Iterable = None,
notin: Iterable = None,
str_contains: Optional[str] = None,
str_endswith: Optional[str] = None,
str_length: Optional[Dict[str, Any]] = None,
str_matches: Optional[str] = None,
str_startswith: Optional[str] = None,
nullable: bool = False,
unique: bool = False,
coerce: bool = False,
regex: bool = False,
ignore_na: bool = True,
raise_warning: bool = False,
n_failure_cases: int = None,
alias: Any = None,
check_name: Optional[bool] = None,
dtype_kwargs: Optional[Dict[str, Any]] = None,
title: Optional[str] = None,
description: Optional[str] = None,
metadata: Optional[dict] = None,
**kwargs,
) -> Any:
"""Used to provide extra information about a field of a DataFrameModel.
*new in 0.16.0*
Some arguments apply only to numeric dtypes and some apply only to ``str``.
See the :ref:`User Guide <dataframe-models>` for more information.
The keyword-only arguments from ``eq`` to ``str_startswith`` are dispatched
to the built-in :py:class:`~pandera.api.checks.Check` methods.
:param nullable: Whether or not the column/index can contain null values.
:param unique: Whether column values should be unique. Currently Not supported
:param coerce: coerces the data type if ``True``.
:param regex: whether or not the field name or alias is a regex pattern.
:param ignore_na: whether or not to ignore null values in the checks.
:param raise_warning: raise a warning instead of an Exception.
:param n_failure_cases: report the first n unique failure cases. If None,
report all failure cases.
:param alias: The public name of the column/index.
:param check_name: Whether to check the name of the column/index during
validation. `None` is the default behavior, which translates to `True`
for columns and multi-index, and to `False` for a single index.
:param dtype_kwargs: The parameters to be forwarded to the type of the
field.
:param title: A human-readable label for the field.
:param description: An arbitrary textual description of the field.
:param metadata: An optional key-value data.
:param kwargs: Specify custom checks that have been registered with the
:class:`~pandera.extensions.register_check_method` decorator.
"""
# pylint:disable=C0103,W0613,R0914
check_kwargs = {
"ignore_na": ignore_na,
"raise_warning": raise_warning,
"n_failure_cases": n_failure_cases,
}
args = locals()
checks = []
check_dispatch = _check_dispatch()
for key in kwargs:
if key not in check_dispatch:
raise SchemaInitError(
f"custom check '{key}' is not available. Make sure you use "
"pandera.extensions.register_check_method decorator to "
"register your custom check method."
)
for arg_name, check_constructor in check_dispatch.items():
arg_value = args.get(arg_name, kwargs.get(arg_name))
if arg_value is None:
continue
if isinstance(arg_value, dict):
check_ = check_constructor(**arg_value, **check_kwargs)
else:
check_ = check_constructor(arg_value, **check_kwargs)
checks.append(check_)
if unique:
raise SchemaInitError(
"unique Field argument not yet implemented for pyspark"
)
return FieldInfo(
checks=checks or None,
nullable=nullable,
unique=unique,
coerce=coerce,
regex=regex,
check_name=check_name,
alias=alias,
title=title,
description=description,
dtype_kwargs=dtype_kwargs,
metadata=metadata,
)
def _check_dispatch():
return {
"eq": Check.equal_to,
"ne": Check.not_equal_to,
"gt": Check.greater_than,
"ge": Check.greater_than_or_equal_to,
"lt": Check.less_than,
"le": Check.less_than_or_equal_to,
"in_range": Check.in_range,
"isin": Check.isin,
"notin": Check.notin,
"str_contains": Check.str_contains,
"str_endswith": Check.str_endswith,
"str_matches": Check.str_matches,
"str_length": Check.str_length,
"str_startswith": Check.str_startswith,
**Check.REGISTERED_CUSTOM_CHECKS,
}
class CheckInfo(BaseCheckInfo): # pylint:disable=too-few-public-methods
"""Captures extra information about a Check."""
... # pylint:disable=unnecessary-ellipsis
class FieldCheckInfo(CheckInfo): # pylint:disable=too-few-public-methods
"""Captures extra information about a Check assigned to a field."""
def __init__(
self,
fields: Set[Union[str, FieldInfo]],
check_fn: AnyCallable,
regex: bool = False,
**check_kwargs: Any,
) -> None:
super().__init__(check_fn, **check_kwargs)
self.fields = fields
self.regex = regex
def _to_function_and_classmethod(
fn: Union[AnyCallable, classmethod],
) -> Tuple[AnyCallable, classmethod]:
if isinstance(fn, classmethod):
fn, method = fn.__func__, cast(classmethod, fn)
else:
method = classmethod(fn)
return fn, method
ClassCheck = Callable[[Union[classmethod, AnyCallable]], classmethod]
def check(*fields, regex: bool = False, **check_kwargs) -> ClassCheck:
"""Decorator to make DataFrameModel method a column check function.
*new in 0.16.0*
This indicates that the decorated method should be used to validate a field
(column). The method will be converted to a classmethod. Therefore
its signature must start with `cls` followed by regular check arguments.
See the :ref:`User Guide <schema-model-custom-check>` for more.
:param _fn: Method to decorate.
:param check_kwargs: Keywords arguments forwarded to Check.
"""
def _wrapper(fn: Union[classmethod, AnyCallable]) -> classmethod:
check_fn, check_method = _to_function_and_classmethod(fn)
check_kwargs.setdefault("description", fn.__doc__)
setattr(
check_method,
CHECK_KEY,
FieldCheckInfo(set(fields), check_fn, regex, **check_kwargs),
)
return check_method
return _wrapper
def dataframe_check(_fn=None, **check_kwargs) -> ClassCheck:
"""Decorator to make DataFrameModel method a dataframe-wide check function.
*new in 0.16.0*
Decorate a method on the DataFrameModel indicating that it should be used to
validate the DataFrame. The method will be converted to a classmethod.
Therefore its signature must start with `cls` followed by regular check
arguments. See the :ref:`User Guide <schema-model-dataframe-check>` for
more.
:param check_kwargs: Keywords arguments forwarded to Check.
"""
def _wrapper(fn: Union[classmethod, AnyCallable]) -> classmethod:
check_fn, check_method = _to_function_and_classmethod(fn)
check_kwargs.setdefault("description", fn.__doc__)
setattr(
check_method,
DATAFRAME_CHECK_KEY,
CheckInfo(check_fn, **check_kwargs),
)
return check_method
if _fn:
return _wrapper(_fn) # type: ignore
return _wrapper