/
class_schema.py
1467 lines (1226 loc) · 51.5 KB
/
class_schema.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
# Copyright 2023 The PyGlove Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Schema definition for symbolic classes and lists/dicts."""
import abc
import copy
import inspect
import sys
import types
from typing import Any, Callable, Dict, Iterable, List, Optional, Sequence, Set, Tuple, Type, Union
from pyglove.core import object_utils
class KeySpec(object_utils.Formattable, object_utils.JSONConvertible):
"""Interface for key specifications.
A key specification determines what keys are acceptable for a symbolic
field (see :class:`pyglove.Field`). Usually, symbolic attributes have an 1:1
relationship with symbolic fields. But in some cases (e.g. a dict with dynamic
keys), a field can be used to describe a group of symbolic attributes::
# A dictionary that accepts key 'x' with float value
# or keys started with 'foo' with int values.
d = pg.Dict(value_spec=pg.Dict([
('x', pg.typing.Float(min_value=0.0)),
(pg.typing.StrKey('foo.*'), pg.typing.Int())
]))
You may noticed that the code above pass a string 'x' for the key spec for a
field definition. The string is automatically converted to
:class:`pyglove.typing.ConstStrKey`.
PyGlove's Builtin key specifications are:
+---------------------------+----------------------------------------------+
| ``KeySpec`` type | Class |
+===========================+==============================================+
| Fixed string identifier | :class:`pyglove.typing.ConstStrKey` |
+---------------------------+----------------------------------------------+
| Dynamic string identifier | :class:`pyglove.typing.StrKey` |
+---------------------------+----------------------------------------------+
| Key of a list | :class:`pyglove.typing.ListKey` |
+---------------------------+----------------------------------------------+
| Key of a tuple | :class:`pyglove.typing.TupleKey` |
+---------------------------+----------------------------------------------+
In most scenarios, the user either use a string or a ``StrKey`` as the key
spec, while other ``KeySpec`` subclasses (e.g. ``ListKey`` and ``TupleKey``)
are used internally to constrain list size and tuple items.
"""
@property
@abc.abstractmethod
def is_const(self) -> bool:
"""Returns whether current key is const."""
@abc.abstractmethod
def match(self, key: Any) -> bool:
"""Returns whether current key specification can match a key."""
@abc.abstractmethod
def extend(self, base: 'KeySpec') -> 'KeySpec':
"""Extend base key specification and returns self.
NOTE(daiyip): When a ``Field`` extends a base Field (from a base schema),
it calls ``extend`` on both its ``KeySpec`` and ``ValueSpec``.
``KeySpec.extend`` is to determine whether the ``Field`` key is allowed to
be extended, and ``ValueSpec.extend`` is to determine the final
``ValueSpec`` after extension.
Args:
base: A base ``KeySpec`` object.
Returns:
An ``KeySpec`` object derived from this key spec by extending the base.
"""
@classmethod
def from_str(cls, key: str) -> 'KeySpec':
"""Get a concrete ValueSpec from annotation."""
del key
assert False, 'Overridden in `key_specs.py`.'
class ForwardRef(object_utils.Formattable):
"""Forward type reference."""
def __init__(self, module: types.ModuleType, name: str):
self._module = module
self._name = name
@property
def module(self) -> types.ModuleType:
"""Returns the module where the name is being referenced."""
return self._module
@property
def name(self) -> str:
"""Returns the name of the type reference."""
return self._name
@property
def qualname(self) -> str:
"""Returns the qualified name of the reference."""
return f'{self.module.__name__}.{self.name}'
def as_annotation(self) -> Union[Type[Any], str]:
"""Returns the forward reference as an annotation."""
return self.cls if self.resolved else self.name
@property
def resolved(self) -> bool:
"""Returns True if the symbol for the name is resolved.."""
return hasattr(self.module, self.name)
@property
def cls(self) -> Type[Any]:
"""Returns the resolved reference class.."""
reference = getattr(self.module, self.name, None)
if reference is None:
raise TypeError(
f'{self.name!r} does not exist in module {self.module.__name__!r}'
)
elif not inspect.isclass(reference):
raise TypeError(
f'{self.name!r} from module {self.module.__name__!r} is not a class.'
)
return reference
def format(self, *args, markdown: bool = False, **kwargs) -> str:
"""Format this object."""
details = object_utils.kvlist_str([
('module', self.module.__name__, None),
('name', self.name, None),
])
return object_utils.maybe_markdown_quote(
f'{self.__class__.__name__}({details})', markdown
)
def __eq__(self, other: Any) -> bool:
"""Operator==."""
if self is other:
return True
elif isinstance(other, ForwardRef):
return self.module is other.module and self.name == other.name
elif inspect.isclass(other):
return self.resolved and self.cls is other
def __ne__(self, other: Any) -> bool:
"""Operator!=."""
return not self.__eq__(other)
def __hash__(self) -> int:
return hash((self.module, self.name))
def __deepcopy__(self, memo) -> 'ForwardRef':
"""Override deep copy to avoid copying module."""
return ForwardRef(self.module, self.name)
class ValueSpec(object_utils.Formattable, object_utils.JSONConvertible):
"""Interface for value specifications.
A value specification defines what values are acceptable for a symbolic
field (see :class:`pyglove.Field`). When assignments take place on the
attributes for the field, the associated ValueSpec object will kick in to
intercept the process and take care of the following aspects:
* Type check
* Noneable check
* Value validation and transformation
* Default value assignment
See :meth:`.apply` for more details.
Different aspects of assignment interception are handled by the following
methods:
+-----------------------+-------------------------------------------------+
| Aspect name | Property/Method |
+=======================+=================================================+
| Type check | :attr:`.value_type` |
+-----------------------+-------------------------------------------------+
| Noneable check | :attr:`.is_noneable` |
+-----------------------+-------------------------------------------------+
| Type-specific value | :meth:`.apply` |
| validation and | |
| transformation | |
+-----------------------+-------------------------------------------------+
| User transform | :attr:`.transform` |
+-----------------------+-------------------------------------------------+
| Default value lookup | :attr:`.default` |
+-----------------------+-------------------------------------------------+
There are many ``ValueSpec`` subclasses, each correspond to a commonly used
Python type, e.g. `Bool`, `Int`, `Float` and etc. PyGlove's builtin value
specifications are:
+---------------------------+----------------------------------------------+
| ``ValueSpec`` type | Class |
+===========================+==============================================+
| bool | :class:`pyglove.typing.Bool` |
+---------------------------+----------------------------------------------+
| int | :class:`pyglove.typing.Int` |
+---------------------------+----------------------------------------------+
| float | :class:`pyglove.typing.Float` |
+---------------------------+----------------------------------------------+
| str | :class:`pyglove.typing.Str` |
+---------------------------+----------------------------------------------+
| enum | :class:`pyglove.typing.Enum` |
+---------------------------+----------------------------------------------+
| list | :class:`pyglove.typing.List` |
+---------------------------+----------------------------------------------+
| tuple | :class:`pyglove.typing.Tuple` |
+---------------------------+----------------------------------------------+
| dict | :class:`pyglove.typing.Dict`
|
+---------------------------+----------------------------------------------+
| instance of a class | :class:`pyglove.typing.Object` |
+---------------------------+----------------------------------------------+
| callable | :class:`pyglove.typing.Callable` |
+---------------------------+----------------------------------------------+
| functor | :class:`pyglove.typing.Functor` |
+---------------------------+----------------------------------------------+
| type | :class:`pyglove.typing.Type` |
+---------------------------+----------------------------------------------+
| union | :class:`pyglove.typing.Union` |
+---------------------------+----------------------------------------------+
| any | :class:`pyglove.typing.Any` |
+---------------------------+----------------------------------------------+
**Construction**
A value specification is an instance of a ``ValueSpec`` subclass. All
:class:`pyglove.ValueSpec` subclasses follow a common pattern to construct::
pg.typing.<ValueSpecClass>(
[validation-rules],
[default=<default>],
[transform=<transform>])
After creation, a ``ValueSpec`` object can be modified with chaining.
The code below creates an int specification with default value 1 and can
accept None::
pg.typing.Int().noneable().set_default(1)
**Usage**
To apply a value specification on an user input to get the accepted value,
:meth:`pyglove.ValueSpec.apply` shall be used::
value == pg.typing.Int(min_value=1).apply(4)
assert value == 4
**Extension**
Besides, a ``ValueSpec`` object can extend another ``ValueSpec`` object to
obtain a more restricted ``ValueSpec`` object. For example::
pg.typing.Int(min_value=1).extend(pg.typing.Int(max_value=5))
will end up with::
pg.typing.Int(min_value=1, max_value=5)
which will be useful when subclass adds additional restrictions to an
inherited symbolic attribute from its base class. For some use cases, a value
spec can be frozen to avoid subclass extensions::
pg.typing.Int().freeze(1)
``ValueSpec`` objects can be created and modified with chaining. For example::
pg.typing.Int().noneable().set_default(1)
The code above creates an int specification with default value 1 and can
accept None.
``ValueSpec`` object can also be derived from annotations.
For example, annotation below
@pg.members([
('a', pg.typing.List(pg.typing.Str)),
('b', pg.typing.Dict().set_default('key': 'value')),
('c', pg.typing.List(pg.typing.Any()).noneable()),
('x', pg.typing.Int()),
('y', pg.typing.Str().noneable()),
('z', pg.typing.Union(pg.typing.Int(), pg.typing.Float()))
])
can be writen as
@pg.members([
('a', list[str]),
('b', {'key': 'value}),
('c', Optional[list]),
('x', int),
('y', Optional[str]),
('z', Union[int, float])
])
"""
# pylint: disable=invalid-name
# List-type value spec class.
ListType: Type['ValueSpec']
# Dict-type value spec class.
DictType: Type['ValueSpec']
# Object-type value spec class.
ObjectType: Type['ValueSpec']
# pylint: enable=invalid-name
@property
@abc.abstractmethod
def value_type(self) -> Union[
Type[Any],
Tuple[Type[Any], ...]]: # pyformat: disable
"""Returns acceptable (resolved) value type(s)."""
@property
@abc.abstractmethod
def forward_refs(self) -> Set[ForwardRef]:
"""Returns forward referenes used by the value spec."""
@abc.abstractmethod
def noneable(self) -> 'ValueSpec':
"""Marks none-able and returns `self`."""
@property
@abc.abstractmethod
def is_noneable(self) -> bool:
"""Returns True if current value spec accepts None."""
@abc.abstractmethod
def set_default(self,
default: Any,
use_default_apply: bool = True) -> 'ValueSpec':
"""Sets the default value and returns `self`.
Args:
default: Default value.
use_default_apply: If True, invoke `apply` to the value, otherwise use
default value as is.
Returns:
ValueSpec itself.
Raises:
ValueError: If default value cannot be applied when use_default_apply
is set to True.
"""
@property
@abc.abstractmethod
def default(self) -> Any:
"""Returns the default value.
If no default is provided, MISSING_VALUE will be returned for non-dict
types. For Dict type, a dict that may contains nested MISSING_VALUE
will be returned.
"""
@property
def has_default(self) -> bool:
"""Returns True if the default value is provided."""
return self.default != object_utils.MISSING_VALUE
@abc.abstractmethod
def freeze(
self,
permanent_value: Any = object_utils.MISSING_VALUE,
apply_before_use: bool = True) -> 'ValueSpec':
"""Sets the default value using a permanent value and freezes current spec.
A frozen value spec will not accept any value that is not the default
value. A frozen value spec is useful when a subclass fixes the value of a
symoblic attribute and want to prevent it from being modified.
Args:
permanent_value: A permanent value used for current spec.
If MISSING_VALUE, freeze the value spec with current default value.
apply_before_use: If True, invoke `apply` on permanent value
when permanent_value is provided, otherwise use it as is.
Returns:
ValueSpec itself.
Raises:
ValueError if current default value is MISSING_VALUE and the permanent
value is not specified.
"""
@property
@abc.abstractmethod
def frozen(self) -> bool:
"""Returns True if current value spec is frozen."""
@property
@abc.abstractmethod
def annotation(self) -> Any:
"""Returns PyType annotation. MISSING_VALUE if annotation is absent."""
@property
@abc.abstractmethod
def transform(self) -> Optional[Callable[[Any], Any]]:
"""Returns a transform that will be applied on the input before apply."""
@abc.abstractmethod
def is_compatible(self, other: 'ValueSpec') -> bool:
"""Returns True if values acceptable to `other` is acceptable to this spec.
Args:
other: Other value spec.
Returns:
True if values that is applicable to the other value spec can be applied
to current spec. Otherwise False.
"""
@abc.abstractmethod
def extend(self, base: 'ValueSpec') -> 'ValueSpec':
"""Extends a base spec with current spec's rules.
Args:
base: Base ValueSpec to extend.
Returns:
ValueSpec itself.
Raises:
TypeError: When this value spec cannot extend from base.
"""
@abc.abstractmethod
def apply(
self,
value: Any,
allow_partial: bool = False,
child_transform: Optional[Callable[
[object_utils.KeyPath, 'Field', Any], Any]] = None,
root_path: Optional[object_utils.KeyPath] = None,
) -> Any:
"""Validates, completes and transforms the input value.
Here is the procedure of ``apply``::
(1). Choose the default value if the input value is ``MISSING_VALUE``
(2). Check whether the input value is None.
(2.a) Input value is None and ``value_spec.is_noneable()`` is False,
raises Error.
(2.b) Input value is not None or ``value_spec.is_noneable()`` is True,
goto step (3).
(3). Run ``value_spec.custom_apply`` if the input value is a
``CustomTyping`` instance.
(3.a). If ``value_spec.custom_apply`` returns a value that indicates to
proceed with standard apply, goto step (4).
(3.b). Else goto step (6)
(4). Check the input value type against the ``value_spec.value_type``.
(4.a). If their value type matches, go to step (5)
(4.b). Else if there is a converter registered between input value type
and the value spec's value type, perform the conversion, and go
to step (5). (see pg.typing.register_converter)
(4.c) Otherwise raises type mismatch.
(5). Perform type-specific and user validation and transformation.
For complex types such as Dict, List, Tuple, call `child_spec.apply`
recursively on the child fields.
(6). Perform user transform and returns final value
(invoked at Field.apply.)
Args:
value: Input value to apply.
allow_partial: If True, partial value is allowed. This is useful for
container types (dict, list, tuple).
child_transform: Function to transform child node values into final
values.
(NOTE: This transform will not be performed on current value. Instead
transform on current value is done by Field.apply, which has adequate
information to call transform with both KeySpec and ValueSpec).
root_path: Key path of current node.
Returns:
Final value:
* When allow_partial is set to False (default), only input value that
has no missing values can be applied.
* When allow_partial is set to True, missing fields will be placeheld
using MISSING_VALUE.
Raises:
KeyError: If additional key is found in value, or required key is missing
and allow_partial is set to False.
TypeError: If type of value is not the same as spec required.
ValueError: If value is not acceptable, or value is MISSING_VALUE while
allow_partial is set to False.
"""
@property
def type_resolved(self) -> bool:
"""Returns True if all forward references are resolved."""
return not any(not ref.resolved for ref in self.forward_refs)
def __ne__(self, other: Any) -> bool:
"""Operator !=."""
return not self.__eq__(other)
@classmethod
def from_annotation(
cls,
annotation: Any,
auto_typing=False,
accept_value_as_annotation=False) -> 'ValueSpec':
"""Gets a concrete ValueSpec from annotation."""
del annotation
assert False, 'Overridden in `annotation_conversion.py`.'
class Field(object_utils.Formattable, object_utils.JSONConvertible):
"""Class that represents the definition of one or a group of attributes.
``Field`` is held by a :class:`pyglove.Schema` object for defining the
name(s), the validation and transformation rules on its/their value(s) for a
single symbolic attribute or a set of symbolic attributes.
A ``Field`` is defined by a tuple of 4 items::
(key specification, value specification, doc string, field metadata)
For example::
(pg.typing.StrKey('foo.*'),
pg.typing.Int(),
'Attributes with foo',
{'user_data': 'bar'})
The key specification (or ``KeySpec``, class :class:`pyglove.KeySpec`) and
value specification (or ``ValueSpec``, class :class:`pyglove.ValueSpec`) are
required, while the doc string and the field metadata are optional. The
``KeySpec`` defines acceptable identifiers for this field, and the
``ValueSpec`` defines the attribute's value type, its default value,
validation rules and etc. The doc string serves as the description for the
field, and the field metadata can be used for attribute-based code generation.
``Field`` supports extension, which allows the subclass to add more
restrictions to a field inherited from the base class, or override its default
value. A field can be frozen if subclasses can no longer extend it.
See :class:`pyglove.KeySpec` and :class:`pyglove.ValueSpec` for details.
"""
__serialization_key__ = 'pyglove.typing.Field'
def __init__(
self,
key_spec: Union[KeySpec, str],
value_spec: ValueSpec,
description: Optional[str] = None,
metadata: Optional[Dict[str, Any]] = None):
"""Constructor.
Args:
key_spec: Key specification of the field. Can be a string or a KeySpec
instance.
value_spec: Value specification of the field.
description: Description of the field.
metadata: A dict of objects as metadata for the field.
Raises:
ValueError: metadata is not a dict.
"""
if isinstance(key_spec, str):
key_spec = KeySpec.from_str(key_spec)
assert isinstance(key_spec, KeySpec), key_spec
self._key = key_spec
self._value = value_spec
self._description = description
if metadata and not isinstance(metadata, dict):
raise ValueError('metadata must be a dict.')
self._metadata = metadata or {}
@classmethod
def from_annotation(
cls,
key: Union[str, KeySpec],
annotation: Any,
description: Optional[str] = None,
metadata: Optional[Dict[str, Any]] = None,
auto_typing=True) -> 'Field':
"""Gets a Field from annotation."""
del key, annotation, description, metadata, auto_typing
assert False, 'Overridden in `annotation_conversion.py`.'
@property
def description(self) -> Optional[str]:
"""Description of this field."""
return self._description
def set_description(self, description: str) -> None:
"""Sets the description for this field."""
self._description = description
@property
def key(self) -> KeySpec:
"""Key specification of this field."""
return self._key
@property
def value(self) -> ValueSpec:
"""Value specification of this field."""
return self._value
@property
def annotation(self) -> Any:
"""Type annotation for this field."""
return self._value.annotation
@property
def metadata(self) -> Dict[str, Any]:
"""Metadata of this field.
Metadata is defined as a dict type, so we can add multiple annotations
to a field.
userdata = field.metadata.get('userdata', None):
Returns:
Metadata of this field as a dict.
"""
return self._metadata
def extend(self, base_field: 'Field') -> 'Field':
"""Extend current field based on a base field."""
self.key.extend(base_field.key)
self.value.extend(base_field.value)
if not self._description:
self._description = base_field.description
if base_field.metadata:
metadata = copy.copy(base_field.metadata)
metadata.update(self.metadata)
self._metadata = metadata
return self
def apply(
self,
value: Any,
allow_partial: bool = False,
transform_fn: Optional[Callable[
[object_utils.KeyPath, 'Field', Any], Any]] = None,
root_path: Optional[object_utils.KeyPath] = None) -> Any:
"""Apply current field to a value, which validate and complete the value.
Args:
value: Value to validate against this spec.
allow_partial: Whether partial value is allowed. This is for dict or
nested dict values.
transform_fn: Function to transform applied value into final value.
root_path: Key path for root.
Returns:
final value.
When allow_partial is set to False (default), only fully qualified value
is acceptable. When allow_partial is set to True, missing fields will
be placeheld using MISSING_VALUE.
Raises:
KeyError: if additional key is found in value, or required key is missing
and allow_partial is set to False.
TypeError: if type of value is not the same as spec required.
ValueError: if value is not acceptable, or value is MISSING_VALUE while
allow_partial is set to False.
"""
value = self._value.apply(
value,
allow_partial=allow_partial,
child_transform=transform_fn,
root_path=root_path)
if transform_fn:
value = transform_fn(root_path, self, value)
return value
@property
def default_value(self) -> Any:
"""Returns the default value."""
return self._value.default
@property
def frozen(self) -> bool:
"""Returns True if current field's value is frozen."""
return self._value.frozen
def format(
self,
compact: bool = False,
verbose: bool = True,
root_indent: int = 0,
*,
markdown: bool = False,
**kwargs,
) -> str:
"""Format this field into a string."""
description = self._description
if not verbose and self._description and len(self._description) > 20:
description = self._description[:20] + '...'
metadata = object_utils.format(
self._metadata,
compact=compact,
verbose=verbose,
root_indent=root_indent + 1,
**kwargs)
if not verbose and len(metadata) > 24:
metadata = '{...}'
attr_str = object_utils.kvlist_str([
('key', self._key, None),
('value', self._value.format(
compact=compact,
verbose=verbose,
root_indent=root_indent + 1,
**kwargs), None),
('description', object_utils.quote_if_str(description), None),
('metadata', metadata, '{}')
])
return object_utils.maybe_markdown_quote(f'Field({attr_str})', markdown)
def to_json(self, **kwargs: Any) -> Dict[str, Any]:
return self.to_json_dict(
fields=dict(
key_spec=(self._key, None),
value_spec=(self._value, None),
description=(self._description, None),
metadata=(self._metadata, {}),
),
exclude_default=True,
**kwargs,
)
def __eq__(self, other: Any) -> bool:
"""Operator==."""
if self is other:
return True
return (isinstance(other, self.__class__) and self.key == other.key and
self.value == other.value and
self.description == other.description and
self.metadata == other.metadata)
def __ne__(self, other: Any) -> bool:
"""Operator!=."""
return not self.__eq__(other)
class Schema(object_utils.Formattable, object_utils.JSONConvertible):
"""Class that represents a schema.
PyGlove's runtime type system is based on the concept of ``Schema`` (
class :class:`pyglove.Schema`), which defines what symbolic attributes are
held by a symbolic type (e.g. a symbolic dict, a symbolic list or a symbolic
class) and what values each attribute accepts. A ``Schema`` object consists of
a list of ``Field`` (class :class:`pyglove.Field`), which define the
acceptable keys and their values for these attributes. A ``Schema`` object is
usually created automatically and associated with a symbolic type upon its
declaration, through decorators such as :func:`pyglove.members`,
:func:`pyglove.symbolize` or :func:`pyglove.functor`. For example::
@pg.members([
('x', pg.typing.Int(default=1)),
('y', pg.typing.Float().noneable())
])
class A(pg.Object):
pass
print(A.__schema__)
@pg.symbolize([
('a', pg.typing.Int()),
('b', pg.typing.Float())
])
def foo(a, b):
return a + b
print(foo.__schema__)
Implementation-wise it holds an ordered dictionary of a field key
(:class:`pyglove.KeySpec`) to its field definition (:class:`pyglove.Field`).
The key specification describes what keys/attributes are acceptable for the
field, and value specification within the ``Field`` describes the value type
of the field and their validation rules, default values, and etc.
Symbolic attributes can be inherited during subclassing. Accordingly, the
schema that defines a symbolic class' attributes can be inherited too by its
subclasses. The fields from the bases' schema will be carried over into the
subclasses' schema, while the subclass can override, by redefining that field
with the same key. The subclass cannot override its base classes' field with
arbitrary value specs, it must be overriding non-frozen fields with more
restrictive validation rules of the same type, or change their default values.
See :meth:`pyglove.ValueSpec.extend` for more details.
The code snippet below illustrates schema inheritance during subclassing::
@pg.members([
('x', pg.typing.Int(min_value=1)),
('y', pg.typing.Float()),
])
class A(pg.Object):
pass
@pg.members([
# Further restrict inherited 'x' by specifying the max value, as well
# as providing a default value.
('x', pg.typing.Int(max_value=5, default=2)),
('z', pg.typing.Str('foo').freeze())
])
class B(A):
pass
assert B.schema.fields.keys() == ['x', 'y', 'z']
@pg.members([
# Raises: 'z' is frozen in class B and cannot be extended further.
('z', pg.typing.Str())
])
class C(B):
pass
With a schema, an input dict can be validated and completed by the schema via
:meth:`apply`. If required a field is missing from the schema, and the
object's `allow_partial` is set to False, a ``KeyError`` will raise. Otherwise
a partially validated/transformed dict will be returned. Missing values in the
object will be placeheld by :const:`pyglove.MISSING_VALUE`.
"""
__serialization_key__ = 'pyglove.typing.Schema'
def __init__(
self,
fields: List[Field],
name: Optional[str] = None,
base_schema_list: Optional[List['Schema']] = None,
description: Optional[str] = None,
*,
allow_nonconst_keys: bool = False,
metadata: Optional[Dict[str, Any]] = None):
"""Constructor.
Args:
fields: A list of Field as the definition of the schema. The order of the
fields will be preserved.
name: Optional name of this schema. Useful for debugging.
base_schema_list: List of schema used as base. When present, fields
from these schema will be copied to this schema. Fields from the
latter schema will override those from the former ones.
description: Optional str as the description for the schema.
allow_nonconst_keys: Whether immediate fields can use non-const keys.
metadata: Optional dict of user objects as schema-level metadata.
Raises:
TypeError: Argument `fields` is not a list.
KeyError: If a field name contains characters ('.') which is not
allowed, or a field name from `fields` already exists in parent
schema.
ValueError: When failed to create ValueSpec from `fields`.
It could be an unsupported value type, default value doesn't conform
with value specification, etc.
"""
if not isinstance(fields, list):
raise TypeError(
f"Argument 'fields' must be a list. Encountered: {fields}."
)
self._name = name
self._allow_nonconst_keys = allow_nonconst_keys
self._fields = {f.key: f for f in fields}
self._description = description
self._metadata = metadata or {}
self._dynamic_field = None
for f in fields:
if not f.key.is_const:
self._dynamic_field = f
break
if base_schema_list:
base = Schema.merge(base_schema_list)
self.extend(base)
if not allow_nonconst_keys and self._dynamic_field is not None:
raise ValueError(
f'NonConstKey is not allowed in schema. '
f'Encountered \'{self._dynamic_field.key}\'.')
@classmethod
def merge(
cls,
schema_list: Sequence['Schema'],
name: Optional[str] = None,
description: Optional[str] = None
) -> 'Schema':
"""Merge multiple schemas into one.
For fields shared by multiple schemas, the first appeared onces will be
used in the merged schema.
Args:
schema_list: A list of schemas to merge.
name: (Optional) name of the merged schema.
description: (Optinoal) description of the schema.
Returns:
The merged schema.
"""
field_names = set()
fields = []
kw_field = None
for schema in schema_list:
for key, field in schema.fields.items():
if key.is_const and key not in field_names:
fields.append(field)
field_names.add(key)
elif not key.is_const and kw_field is None:
kw_field = field
if kw_field is not None:
fields.append(kw_field)
return Schema(
fields, name=name, description=description, allow_nonconst_keys=True
)
def extend(self, base: 'Schema') -> 'Schema':
"""Extend current schema based on a base schema."""
def _merge_field(
path,
parent_field: Field,
child_field: Field) -> Field:
"""Merge function on field with the same key."""
if parent_field != object_utils.MISSING_VALUE:
if object_utils.MISSING_VALUE == child_field:
if (not self._allow_nonconst_keys and not parent_field.key.is_const):
hints = object_utils.kvlist_str([
('base', object_utils.quote_if_str(base.name), None),
('path', path, None)
])
raise ValueError(
f'Non-const key {parent_field.key} is not allowed to be '
f'added to the schema. ({hints})')
return copy.deepcopy(parent_field)
else:
try:
child_field.extend(parent_field)
except Exception as e: # pylint: disable=broad-except
hints = object_utils.kvlist_str([
('base', object_utils.quote_if_str(base.name), None),
('path', path, None)
])
raise e.__class__(f'{e} ({hints})').with_traceback(
sys.exc_info()[2])
return child_field
self._fields = object_utils.merge([base.fields, self.fields], _merge_field)
self._metadata = object_utils.merge([base.metadata, self.metadata])
# Inherit dynamic field from base if it's not present in the child.
if self._dynamic_field is None:
for k, f in self._fields.items():
if not k.is_const:
self._dynamic_field = f
break
return self
def is_compatible(self, other: 'Schema') -> bool:
"""Returns whether current schema is compatible with the other schema.
NOTE(daiyip): schema A is compatible with schema B when:
schema A and schema B have the same keys, with compatible values specs.
Args:
other: Other schema.
Returns:
True if values that is acceptable to the other schema is acceptable to
current schema.
Raises:
TypeError: If `other` is not a schema object.