This repository has been archived by the owner on Dec 25, 2023. It is now read-only.
/
model.py
4011 lines (3296 loc) · 137 KB
/
model.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
#
# Copyright 2008 The ndb Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Model and Property classes and associated stuff.
A model class represents the structure of entities stored in the
datastore. Applications define model classes to indicate the
structure of their entities, then instantiate those model classes
to create entities.
All model classes must inherit (directly or indirectly) from Model.
Through the magic of metaclasses, straightforward assignments in the
model class definition can be used to declare the model's structure:
class Person(Model):
name = StringProperty()
age = IntegerProperty()
We can now create a Person entity and write it to the datastore:
p = Person(name='Arthur Dent', age=42)
k = p.put()
The return value from put() is a Key (see the documentation for
ndb/key.py), which can be used to retrieve the same entity later:
p2 = k.get()
p2 == p # Returns True
To update an entity, simple change its attributes and write it back
(note that this doesn't change the key):
p2.name = 'Arthur Philip Dent'
p2.put()
We can also delete an entity (by using the key):
k.delete()
The property definitions in the class body tell the system the names
and the types of the fields to be stored in the datastore, whether
they must be indexed, their default value, and more.
Many different Property types exist. Most are indexed by default, the
exceptions indicated in the list below:
- StringProperty: a short text string, limited to 500 bytes
- TextProperty: an unlimited text string; unindexed
- BlobProperty: an unlimited byte string; unindexed
- IntegerProperty: a 64-bit signed integer
- FloatProperty: a double precision floating point number
- BooleanProperty: a bool value
- DateTimeProperty: a datetime object. Note: App Engine always uses
UTC as the timezone
- DateProperty: a date object
- TimeProperty: a time object
- GeoPtProperty: a geographical location, i.e. (latitude, longitude)
- KeyProperty: a datastore Key value, optionally constrained to
referring to a specific kind
- UserProperty: a User object (for backwards compatibility only)
- StructuredProperty: a field that is itself structured like an
entity; see below for more details
- LocalStructuredProperty: like StructuredProperty but the on-disk
representation is an opaque blob; unindexed
- ComputedProperty: a property whose value is computed from other
properties by a user-defined function. The property value is
written to the datastore so that it can be used in queries, but the
value from the datastore is not used when the entity is read back
- GenericProperty: a property whose type is not constrained; mostly
used by the Expando class (see below) but also usable explicitly
- JsonProperty: a property whose value is any object that can be
serialized using JSON; the value written to the datastore is a JSON
representation of that object
- PickleProperty: a property whose value is any object that can be
serialized using Python's pickle protocol; the value written to the
datastore is the pickled representation of that object, using the
highest available pickle protocol
Most Property classes have similar constructor signatures. They
accept several optional keyword arguments:
- name=<string>: the name used to store the property value in the
datastore. Unlike the following options, this may also be given as
a positional argument
- indexed=<bool>: indicates whether the property should be indexed
(allowing queries on this property's value)
- repeated=<bool>: indicates that this property can have multiple
values in the same entity.
- write_empty_list<bool>: For repeated value properties, controls
whether properties with no elements (the empty list) is
written to Datastore. If true, written, if false, then nothing
is written to Datastore.
- required=<bool>: indicates that this property must be given a value
- default=<value>: a default value if no explicit value is given
- choices=<list of values>: a list or tuple of allowable values
- validator=<function>: a general-purpose validation function. It
will be called with two arguments (prop, value) and should either
return the validated value or raise an exception. It is also
allowed for the function to modify the value, but calling it again
on the modified value should not modify the value further. (For
example: a validator that returns value.strip() or value.lower() is
fine, but one that returns value + '$' is not.)
- verbose_name=<value>: A human readable name for this property. This
human readable name can be used for html form labels.
The repeated and required/default options are mutually exclusive: a
repeated property cannot be required nor can it specify a default
value (the default is always an empty list and an empty list is always
an allowed value), but a required property can have a default.
Some property types have additional arguments. Some property types
do not support all options.
Repeated properties are always represented as Python lists; if there
is only one value, the list has only one element. When a new list is
assigned to a repeated property, all elements of the list are
validated. Since it is also possible to mutate lists in place,
repeated properties are re-validated before they are written to the
datastore.
No validation happens when an entity is read from the datastore;
however property values read that have the wrong type (e.g. a string
value for an IntegerProperty) are ignored.
For non-repeated properties, None is always a possible value, and no
validation is called when the value is set to None. However for
required properties, writing the entity to the datastore requires
the value to be something other than None (and valid).
The StructuredProperty is different from most other properties; it
lets you define a sub-structure for your entities. The substructure
itself is defined using a model class, and the attribute value is an
instance of that model class. However it is not stored in the
datastore as a separate entity; instead, its attribute values are
included in the parent entity using a naming convention (the name of
the structured attribute followed by a dot followed by the name of the
subattribute). For example:
class Address(Model):
street = StringProperty()
city = StringProperty()
class Person(Model):
name = StringProperty()
address = StructuredProperty(Address)
p = Person(name='Harry Potter',
address=Address(street='4 Privet Drive',
city='Little Whinging'))
k.put()
This would write a single 'Person' entity with three attributes (as
you could verify using the Datastore Viewer in the Admin Console):
name = 'Harry Potter'
address.street = '4 Privet Drive'
address.city = 'Little Whinging'
Structured property types can be nested arbitrarily deep, but in a
hierarchy of nested structured property types, only one level can have
the repeated flag set. It is fine to have multiple structured
properties referencing the same model class.
It is also fine to use the same model class both as a top-level entity
class and as for a structured property; however queries for the model
class will only return the top-level entities.
The LocalStructuredProperty works similar to StructuredProperty on the
Python side. For example:
class Address(Model):
street = StringProperty()
city = StringProperty()
class Person(Model):
name = StringProperty()
address = LocalStructuredProperty(Address)
p = Person(name='Harry Potter',
address=Address(street='4 Privet Drive',
city='Little Whinging'))
k.put()
However the data written to the datastore is different; it writes a
'Person' entity with a 'name' attribute as before and a single
'address' attribute whose value is a blob which encodes the Address
value (using the standard"protocol buffer" encoding).
Sometimes the set of properties is not known ahead of time. In such
cases you can use the Expando class. This is a Model subclass that
creates properties on the fly, both upon assignment and when loading
an entity from the datastore. For example:
class SuperPerson(Expando):
name = StringProperty()
superpower = StringProperty()
razorgirl = SuperPerson(name='Molly Millions',
superpower='bionic eyes, razorblade hands',
rasta_name='Steppin\' Razor',
alt_name='Sally Shears')
elastigirl = SuperPerson(name='Helen Parr',
superpower='stretchable body')
elastigirl.max_stretch = 30 # Meters
You can inspect the properties of an expando instance using the
_properties attribute:
>>> print razorgirl._properties.keys()
['rasta_name', 'name', 'superpower', 'alt_name']
>>> print elastigirl._properties
{'max_stretch': GenericProperty('max_stretch'),
'name': StringProperty('name'),
'superpower': StringProperty('superpower')}
Note: this property exists for plain Model instances too; it is just
not as interesting for those.
The Model class offers basic query support. You can create a Query
object by calling the query() class method. Iterating over a Query
object returns the entities matching the query one at a time.
Query objects are fully described in the docstring for query.py, but
there is one handy shortcut that is only available through
Model.query(): positional arguments are interpreted as filter
expressions which are combined through an AND operator. For example:
Person.query(Person.name == 'Harry Potter', Person.age >= 11)
is equivalent to:
Person.query().filter(Person.name == 'Harry Potter', Person.age >= 11)
Keyword arguments passed to .query() are passed along to the Query()
constructor.
It is possible to query for field values of stuctured properties. For
example:
qry = Person.query(Person.address.city == 'London')
A number of top-level functions also live in this module:
- transaction() runs a function inside a transaction
- get_multi() reads multiple entities at once
- put_multi() writes multiple entities at once
- delete_multi() deletes multiple entities at once
All these have a corresponding *_async() variant as well.
The *_multi_async() functions return a list of Futures.
And finally these (without async variants):
- in_transaction() tests whether you are currently running in a transaction
- @transactional decorates functions that should be run in a transaction
There are many other interesting features. For example, Model
subclasses may define pre-call and post-call hooks for most operations
(get, put, delete, allocate_ids), and Property classes may be
subclassed to suit various needs. Documentation for writing a
Property subclass is in the docstring for the Property class.
"""
__author__ = 'guido@google.com (Guido van Rossum)'
import collections
import copy
import cPickle as pickle
import datetime
import logging
import zlib
from .google_imports import datastore
from .google_imports import datastore_errors
from .google_imports import datastore_query
from .google_imports import datastore_rpc
from .google_imports import datastore_types
from .google_imports import users
from .google_imports import entity_pb
from . import key as key_module # NOTE: 'key' is a common local variable name.
from . import utils
Key = key_module.Key # For export.
# NOTE: Property and Error classes are added later.
__all__ = ['Key', 'BlobKey', 'GeoPt', 'Rollback',
'Index', 'IndexState', 'IndexProperty',
'ModelAdapter', 'ModelAttribute',
'ModelKey', 'MetaModel', 'Model', 'Expando',
'transaction', 'transaction_async', 'in_transaction',
'transactional', 'transactional_async', 'transactional_tasklet',
'non_transactional',
'get_multi', 'get_multi_async',
'put_multi', 'put_multi_async',
'delete_multi', 'delete_multi_async',
'get_indexes', 'get_indexes_async',
'make_connection',
]
BlobKey = datastore_types.BlobKey
GeoPt = datastore_types.GeoPt
Rollback = datastore_errors.Rollback
class KindError(datastore_errors.BadValueError):
"""Raised when an implementation for a kind can't be found.
Also raised when the Kind is not an 8-bit string.
"""
class InvalidPropertyError(datastore_errors.Error):
"""Raised when a property is not applicable to a given use.
For example, a property must exist and be indexed to be used in a query's
projection or group by clause.
"""
# Mapping for legacy support.
BadProjectionError = InvalidPropertyError
class UnprojectedPropertyError(datastore_errors.Error):
"""Raised when getting a property value that's not in the projection."""
class ReadonlyPropertyError(datastore_errors.Error):
"""Raised when attempting to set a property value that is read-only."""
class ComputedPropertyError(ReadonlyPropertyError):
"""Raised when attempting to set a value to or delete a computed property."""
# Various imported limits.
_MAX_LONG = key_module._MAX_LONG
_MAX_STRING_LENGTH = datastore_types._MAX_STRING_LENGTH
# Map index directions to human-readable strings.
_DIR_MAP = {
entity_pb.Index_Property.ASCENDING: 'asc',
entity_pb.Index_Property.DESCENDING: 'desc',
}
# Map index states to human-readable strings.
_STATE_MAP = {
entity_pb.CompositeIndex.ERROR: 'error',
entity_pb.CompositeIndex.DELETED: 'deleting',
entity_pb.CompositeIndex.READ_WRITE: 'serving',
entity_pb.CompositeIndex.WRITE_ONLY: 'building',
}
class _NotEqualMixin(object):
"""Mix-in class that implements __ne__ in terms of __eq__."""
def __ne__(self, other):
"""Implement self != other as not(self == other)."""
eq = self.__eq__(other)
if eq is NotImplemented:
return NotImplemented
return not eq
class _NestedCounter(object):
""" A recursive counter for StructuredProperty deserialization.
Deserialization has some complicated rules to handle StructuredPropertys
that may or may not be empty. The simplest case is a leaf counter, where
the counter will return the index of the repeated value that last had this
leaf property written. When a non-leaf counter requested, this will return
the max of all its leaf values. This is due to the fact that the next index
that a full non-leaf property may be written to comes after all indices that
have part of that property written (otherwise, a partial entity would be
overwritten.
Consider an evaluation of the following structure:
class B(model.Model):
c = model.IntegerProperty()
d = model.IntegerProperty()
class A(model.Model):
b = model.StructuredProperty(B)
class Foo(model.Model):
# top-level model
a = model.StructuredProperty(A, repeated=True)
Foo(a=[A(b=None),
A(b=B(c=1)),
A(b=None),
A(b=B(c=2, d=3))])
This will result in a serialized structure:
1) a.b = None
2) a.b.c = 1
3) a.b.d = None
4) a.b = None
5) a.b.c = 2
6) a.b.d = 3
The counter state should be the following:
a | a.b | a.b.c | a.b.d
0) - - - -
1) @1 1 - -
2) @2 @2 2 -
3) @2 @2 2 2
4) @3 @3 3 3
5) @4 @4 4 3
6) @4 @4 4 4
Here, @ indicates that this counter value is actually a calculated value.
It is equal to the MAX of its sub-counters.
Counter values may get incremented multiple times while deserializing a
property. This will happen if a child counter falls behind,
for example in steps 2 and 3.
During an increment of a parent node, all child nodes values are incremented
to match that of the parent, for example in step 4.
"""
def __init__(self):
self.__counter = 0
self.__sub_counters = collections.defaultdict(_NestedCounter)
def get(self, parts=None):
if parts:
return self.__sub_counters[parts[0]].get(parts[1:])
if self.__is_parent_node():
return max(v.get() for v in self.__sub_counters.itervalues())
return self.__counter
def increment(self, parts=None):
if parts:
self.__make_parent_node()
return self.__sub_counters[parts[0]].increment(parts[1:])
if self.__is_parent_node():
# Move all children forward
value = self.get() + 1
self._set(value)
return value
self.__counter += 1
return self.__counter
def _set(self, value):
"""Updates all descendants to a specified value."""
if self.__is_parent_node():
for child in self.__sub_counters.itervalues():
child._set(value)
else:
self.__counter = value
def _absolute_counter(self):
# Used only for testing.
return self.__counter
def __is_parent_node(self):
return self.__counter == -1
def __make_parent_node(self):
self.__counter = -1
class IndexProperty(_NotEqualMixin):
"""Immutable object representing a single property in an index."""
@utils.positional(1)
def __new__(cls, name, direction):
"""Constructor."""
obj = object.__new__(cls)
obj.__name = name
obj.__direction = direction
return obj
@property
def name(self):
"""The property name being indexed, a string."""
return self.__name
@property
def direction(self):
"""The direction in the index for this property, 'asc' or 'desc'."""
return self.__direction
def __repr__(self):
"""Return a string representation."""
return '%s(name=%r, direction=%r)' % (self.__class__.__name__,
self.name,
self.direction)
def __eq__(self, other):
"""Compare two index properties for equality."""
if not isinstance(other, IndexProperty):
return NotImplemented
return self.name == other.name and self.direction == other.direction
def __hash__(self):
return hash((self.name, self.direction))
class Index(_NotEqualMixin):
"""Immutable object representing an index."""
@utils.positional(1)
def __new__(cls, kind, properties, ancestor):
"""Constructor."""
obj = object.__new__(cls)
obj.__kind = kind
obj.__properties = properties
obj.__ancestor = ancestor
return obj
@property
def kind(self):
"""The kind being indexed, a string."""
return self.__kind
@property
def properties(self):
"""A list of PropertyIndex objects giving the properties being indexed."""
return self.__properties
@property
def ancestor(self):
"""Whether this is an ancestor index, a bool."""
return self.__ancestor
def __repr__(self):
"""Return a string representation."""
parts = []
parts.append('kind=%r' % self.kind)
parts.append('properties=%r' % self.properties)
parts.append('ancestor=%s' % self.ancestor)
return '%s(%s)' % (self.__class__.__name__, ', '.join(parts))
def __eq__(self, other):
"""Compare two indexes."""
if not isinstance(other, Index):
return NotImplemented
return (self.kind == other.kind and
self.properties == other.properties and
self.ancestor == other.ancestor)
def __hash__(self):
return hash((self.kind, self.properties, self.ancestor))
class IndexState(_NotEqualMixin):
"""Immutable object representing and index and its state."""
@utils.positional(1)
def __new__(cls, definition, state, id):
"""Constructor."""
obj = object.__new__(cls)
obj.__definition = definition
obj.__state = state
obj.__id = id
return obj
@property
def definition(self):
"""An Index object describing the index."""
return self.__definition
@property
def state(self):
"""The index state, a string.
Possible values are 'error', 'deleting', 'serving' or 'building'.
"""
return self.__state
@property
def id(self):
"""The index ID, an integer."""
return self.__id
def __repr__(self):
"""Return a string representation."""
parts = []
parts.append('definition=%r' % self.definition)
parts.append('state=%r' % self.state)
parts.append('id=%d' % self.id)
return '%s(%s)' % (self.__class__.__name__, ', '.join(parts))
def __eq__(self, other):
"""Compare two index states."""
if not isinstance(other, IndexState):
return NotImplemented
return (self.definition == other.definition and
self.state == other.state and
self.id == other.id)
def __hash__(self):
return hash((self.definition, self.state, self.id))
class ModelAdapter(datastore_rpc.AbstractAdapter):
"""Conversions between 'our' Key and Model classes and protobufs.
This is needed to construct a Connection object, which in turn is
needed to construct a Context object.
See the base class docstring for more info about the signatures.
"""
def __init__(self, default_model=None, id_resolver=None):
"""Constructor.
Args:
default_model: If an implementation for the kind cannot be found, use
this model class. If none is specified, an exception will be thrown
(default).
id_resolver: A datastore_pbs.IdResolver that can resolve
application ids. This is only necessary when running on the Cloud
Datastore v1 API.
"""
# TODO(pcostello): Remove this once AbstractAdapter's constructor makes
# it into production.
try:
super(ModelAdapter, self).__init__(id_resolver)
except:
pass
self.default_model = default_model
self.want_pbs = 0
# Make this a context manager to request setting _orig_pb.
# Used in query.py by _MultiQuery.run_to_queue().
def __enter__(self):
self.want_pbs += 1
def __exit__(self, *unused_args):
self.want_pbs -= 1
def pb_to_key(self, pb):
return Key(reference=pb)
def key_to_pb(self, key):
return key.reference()
def pb_to_entity(self, pb):
key = None
kind = None
if pb.key().path().element_size():
key = Key(reference=pb.key())
kind = key.kind()
modelclass = Model._lookup_model(kind, self.default_model)
entity = modelclass._from_pb(pb, key=key, set_key=False)
if self.want_pbs:
entity._orig_pb = pb
return entity
def entity_to_pb(self, ent):
pb = ent._to_pb()
return pb
def pb_to_index(self, pb):
index_def = pb.definition()
properties = [IndexProperty(name=prop.name(),
direction=_DIR_MAP[prop.direction()])
for prop in index_def.property_list()]
index = Index(kind=index_def.entity_type(),
properties=properties,
ancestor=bool(index_def.ancestor()),
)
index_state = IndexState(definition=index,
state=_STATE_MAP[pb.state()],
id=pb.id(),
)
return index_state
def make_connection(config=None, default_model=None,
_api_version=datastore_rpc._DATASTORE_V3,
_id_resolver=None):
"""Create a new Connection object with the right adapter.
Optionally you can pass in a datastore_rpc.Configuration object.
"""
return datastore_rpc.Connection(
adapter=ModelAdapter(default_model, id_resolver=_id_resolver),
config=config,
_api_version=_api_version)
class ModelAttribute(object):
"""A Base class signifying the presence of a _fix_up() method."""
def _fix_up(self, cls, code_name):
pass
class _BaseValue(_NotEqualMixin):
"""A marker object wrapping a 'base type' value.
This is used to be able to tell whether ent._values[name] is a
user value (i.e. of a type that the Python code understands) or a
base value (i.e of a type that serialization understands).
User values are unwrapped; base values are wrapped in a
_BaseValue instance.
"""
__slots__ = ['b_val']
def __init__(self, b_val):
"""Constructor. Argument is the base value to be wrapped."""
assert b_val is not None, "Cannot wrap None"
assert not isinstance(b_val, list), repr(b_val)
self.b_val = b_val
def __repr__(self):
return '_BaseValue(%r)' % (self.b_val,)
def __eq__(self, other):
if not isinstance(other, _BaseValue):
return NotImplemented
return self.b_val == other.b_val
def __hash__(self):
raise TypeError('_BaseValue is not immutable')
class Property(ModelAttribute):
"""A class describing a typed, persisted attribute of a datastore entity.
Not to be confused with Python's 'property' built-in.
This is just a base class; there are specific subclasses that
describe Properties of various types (and GenericProperty which
describes a dynamically typed Property).
All special Property attributes, even those considered 'public',
have names starting with an underscore, because StructuredProperty
uses the non-underscore attribute namespace to refer to nested
Property names; this is essential for specifying queries on
subproperties (see the module docstring).
The Property class and its predefined subclasses allow easy
subclassing using composable (or stackable) validation and
conversion APIs. These require some terminology definitions:
- A 'user value' is a value such as would be set and accessed by the
application code using standard attributes on the entity.
- A 'base value' is a value such as would be serialized to
and deserialized from the datastore.
The values stored in ent._values[name] and accessed by
_store_value() and _retrieve_value() can be either user values or
base values. To retrieve user values, use
_get_user_value(). To retrieve base values, use
_get_base_value(). In particular, _get_value() calls
_get_user_value(), and _serialize() effectively calls
_get_base_value().
To store a user value, just call _store_value(). To store a
base value, wrap the value in a _BaseValue() and then
call _store_value().
A Property subclass that wants to implement a specific
transformation between user values and serialiazble values should
implement two methods, _to_base_type() and _from_base_type().
These should *NOT* call their super() method; super calls are taken
care of by _call_to_base_type() and _call_from_base_type().
This is what is meant by composable (or stackable) APIs.
The API supports 'stacking' classes with ever more sophisticated
user<-->base conversions: the user-->base conversion
goes from more sophisticated to less sophisticated, while the
base-->user conversion goes from less sophisticated to more
sophisticated. For example, see the relationship between
BlobProperty, TextProperty and StringProperty.
In addition to _to_base_type() and _from_base_type(), the
_validate() method is also a composable API.
The validation API distinguishes between 'lax' and 'strict' user
values. The set of lax values is a superset of the set of strict
values. The _validate() method takes a lax value and if necessary
converts it to a strict value. This means that when setting the
property value, lax values are accepted, while when getting the
property value, only strict values will be returned. If no
conversion is needed, _validate() may return None. If the argument
is outside the set of accepted lax values, _validate() should raise
an exception, preferably TypeError or
datastore_errors.BadValueError.
Example/boilerplate:
def _validate(self, value):
'Lax user value to strict user value.'
if not isinstance(value, <top type>):
raise TypeError(...) # Or datastore_errors.BadValueError(...).
def _to_base_type(self, value):
'(Strict) user value to base value.'
if isinstance(value, <user type>):
return <base type>(value)
def _from_base_type(self, value):
'base value to (strict) user value.'
if not isinstance(value, <base type>):
return <user type>(value)
Things that _validate(), _to_base_type() and _from_base_type()
do *not* need to handle:
- None: They will not be called with None (and if they return None,
this means that the value does not need conversion).
- Repeated values: The infrastructure (_get_user_value() and
_get_base_value()) takes care of calling
_from_base_type() or _to_base_type() for each list item in a
repeated value.
- Wrapping values in _BaseValue(): The wrapping and unwrapping is
taken care of by the infrastructure that calls the composable APIs.
- Comparisons: The comparison operations call _to_base_type() on
their operand.
- Distinguishing between user and base values: the
infrastructure guarantees that _from_base_type() will be called
with an (unwrapped) base value, and that
_to_base_type() will be called with a user value.
- Returning the original value: if any of these return None, the
original value is kept. (Returning a differen value not equal to
None will substitute the different value.)
"""
# TODO: Separate 'simple' properties from base Property class
_code_name = None
_name = None
_indexed = True
_repeated = False
_required = False
_default = None
_choices = None
_validator = None
_verbose_name = None
_write_empty_list = False
__creation_counter_global = 0
_attributes = ['_name', '_indexed', '_repeated', '_required', '_default',
'_choices', '_validator', '_verbose_name',
'_write_empty_list']
_positional = 1 # Only name is a positional argument.
@utils.positional(1 + _positional) # Add 1 for self.
def __init__(self, name=None, indexed=None, repeated=None,
required=None, default=None, choices=None, validator=None,
verbose_name=None, write_empty_list=None):
"""Constructor. For arguments see the module docstring."""
if name is not None:
if isinstance(name, unicode):
name = name.encode('utf-8')
if not isinstance(name, str):
raise TypeError('Name %r is not a string' % (name,))
if '.' in name:
raise ValueError('Name %r cannot contain period characters' % (name,))
self._name = name
if indexed is not None:
self._indexed = indexed
if repeated is not None:
self._repeated = repeated
if required is not None:
self._required = required
if default is not None:
# TODO: Call _validate() on default?
self._default = default
if verbose_name is not None:
self._verbose_name = verbose_name
if write_empty_list is not None:
self._write_empty_list = write_empty_list
if self._repeated and (self._required or self._default is not None):
raise ValueError('repeated is incompatible with required or default')
if choices is not None:
if not isinstance(choices, (list, tuple, set, frozenset)):
raise TypeError('choices must be a list, tuple or set; received %r' %
choices)
# TODO: Call _validate() on each choice?
self._choices = frozenset(choices)
if validator is not None:
# The validator is called as follows:
# value = validator(prop, value)
# It should return the value to be used, or raise an exception.
# It should be idempotent, i.e. calling it a second time should
# not further modify the value. So a validator that returns e.g.
# value.lower() or value.strip() is fine, but one that returns
# value + '$' is not.
if not hasattr(validator, '__call__'):
raise TypeError('validator must be callable or None; received %r' %
validator)
self._validator = validator
# Keep a unique creation counter.
Property.__creation_counter_global += 1
self._creation_counter = Property.__creation_counter_global
def __repr__(self):
"""Return a compact unambiguous string representation of a property."""
args = []
cls = self.__class__
for i, attr in enumerate(self._attributes):
val = getattr(self, attr)
if val is not getattr(cls, attr):
if isinstance(val, type):
s = val.__name__
else:
s = repr(val)
if i >= cls._positional:
if attr.startswith('_'):
attr = attr[1:]
s = '%s=%s' % (attr, s)
args.append(s)
s = '%s(%s)' % (self.__class__.__name__, ', '.join(args))
return s
def _datastore_type(self, value):
"""Internal hook used by property filters.
Sometimes the low-level query interface needs a specific data type
in order for the right filter to be constructed. See _comparison().
"""
return value
def _comparison(self, op, value):
"""Internal helper for comparison operators.
Args:
op: The operator ('=', '<' etc.).
Returns:
A FilterNode instance representing the requested comparison.
"""
# NOTE: This is also used by query.gql().
if not self._indexed:
raise datastore_errors.BadFilterError(
'Cannot query for unindexed property %s' % self._name)
from .query import FilterNode # Import late to avoid circular imports.
if value is not None:
value = self._do_validate(value)
value = self._call_to_base_type(value)
value = self._datastore_type(value)
return FilterNode(self._name, op, value)
# Comparison operators on Property instances don't compare the
# properties; instead they return FilterNode instances that can be
# used in queries. See the module docstrings above and in query.py
# for details on how these can be used.
def __eq__(self, value):
"""Return a FilterNode instance representing the '=' comparison."""
return self._comparison('=', value)