-
Notifications
You must be signed in to change notification settings - Fork 241
/
Copy pathcore.py
6488 lines (6097 loc) · 299 KB
/
core.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
from __future__ import absolute_import, print_function, division
from pony.py23compat import cmp, unicode, buffer, int_types
import builtins, json, re, sys, types, datetime, logging, itertools, warnings, inspect, ast
from operator import attrgetter, itemgetter
from itertools import chain, starmap, repeat
from time import time
from decimal import Decimal
from random import shuffle, randint, random
from threading import Lock, RLock, current_thread, _MainThread
from contextlib import contextmanager
from collections import defaultdict
from hashlib import md5
from inspect import isgeneratorfunction
from functools import wraps
import pony
from pony import options
from pony.orm.decompiling import decompile
from pony.orm.ormtypes import (
LongStr, LongUnicode, numeric_types, raw_sql, RawSQL, normalize, Json, TrackedValue, QueryType,
Array, IntArray, StrArray, FloatArray
)
from pony.orm.asttranslation import ast2src, create_extractors, TranslationError
from pony.orm.dbapiprovider import (
DBAPIProvider, DBException, Warning, Error, InterfaceError, DatabaseError, DataError,
OperationalError, IntegrityError, InternalError, ProgrammingError, NotSupportedError
)
from pony import utils
from pony.utils import localbase, decorator, cut_traceback, cut_traceback_depth, throw, reraise, truncate_repr, \
get_lambda_args, pickle_ast, unpickle_ast, deprecated, import_module, parse_expr, is_ident, tostring, strjoin, \
between, concat, coalesce, HashableDict, deref_proxy, deduplicate
__all__ = [
'pony',
'DBException', 'RowNotFound', 'MultipleRowsFound', 'TooManyRowsFound',
'Warning', 'Error', 'InterfaceError', 'DatabaseError', 'DataError', 'OperationalError',
'IntegrityError', 'InternalError', 'ProgrammingError', 'NotSupportedError',
'OrmError', 'ERDiagramError', 'DBSchemaError', 'MappingError', 'BindingError',
'TableDoesNotExist', 'TableIsNotEmpty', 'ConstraintError', 'CacheIndexError',
'ObjectNotFound', 'MultipleObjectsFoundError', 'TooManyObjectsFoundError', 'OperationWithDeletedObjectError',
'TransactionError', 'ConnectionClosedError', 'TransactionIntegrityError', 'IsolationError',
'CommitException', 'RollbackException', 'UnrepeatableReadError', 'OptimisticCheckError',
'UnresolvableCyclicDependency', 'UnexpectedError', 'DatabaseSessionIsOver',
'PonyRuntimeWarning', 'DatabaseContainsIncorrectValue', 'DatabaseContainsIncorrectEmptyValue',
'TranslationError', 'ExprEvalError', 'PermissionError',
'Database', 'sql_debug', 'set_sql_debug', 'sql_debugging', 'show',
'PrimaryKey', 'Required', 'Optional', 'Set', 'Discriminator',
'composite_key', 'composite_index',
'flush', 'commit', 'rollback', 'db_session', 'with_transaction', 'make_proxy',
'LongStr', 'LongUnicode', 'Json', 'IntArray', 'StrArray', 'FloatArray',
'select', 'left_join', 'get', 'exists', 'delete',
'count', 'sum', 'min', 'max', 'avg', 'group_concat', 'distinct',
'JOIN', 'desc', 'between', 'concat', 'coalesce', 'raw_sql',
'buffer', 'unicode',
'get_current_user', 'set_current_user', 'perm', 'has_perm',
'get_user_groups', 'get_user_roles', 'get_object_labels',
'user_groups_getter', 'user_roles_getter', 'obj_labels_getter'
]
suppress_debug_change = False
def sql_debug(value):
# todo: make sql_debug deprecated
if not suppress_debug_change:
local.debug = value
def set_sql_debug(debug=True, show_values=None):
if not suppress_debug_change:
local.debug = debug
local.show_values = show_values
orm_logger = logging.getLogger('pony.orm')
sql_logger = logging.getLogger('pony.orm.sql')
orm_log_level = logging.INFO
def log_orm(msg):
if orm_logger.hasHandlers():
orm_logger.log(orm_log_level, msg)
else:
print(msg)
def log_sql(sql, arguments=None):
if type(arguments) is list:
sql = 'EXECUTEMANY (%d)\n%s' % (len(arguments), sql)
if sql_logger.hasHandlers():
if local.show_values and arguments:
sql = '%s\n%s' % (sql, format_arguments(arguments))
sql_logger.log(orm_log_level, sql)
else:
if (local.show_values is None or local.show_values) and arguments:
sql = '%s\n%s' % (sql, format_arguments(arguments))
print(sql, end='\n\n')
def format_arguments(arguments):
if type(arguments) is not list: return args2str(arguments)
return '\n'.join(args2str(args) for args in arguments)
def args2str(args):
if isinstance(args, (tuple, list)):
return '[%s]' % ', '.join(map(repr, args))
elif isinstance(args, dict):
return '{%s}' % ', '.join('%s:%s' % (repr(key), repr(val)) for key, val in sorted(args.items()))
adapted_sql_cache = {}
string2ast_cache = {}
class OrmError(Exception): pass
class ERDiagramError(OrmError): pass
class DBSchemaError(OrmError): pass
class MappingError(OrmError): pass
class BindingError(OrmError): pass
class TableDoesNotExist(OrmError): pass
class TableIsNotEmpty(OrmError): pass
class ConstraintError(OrmError): pass
class CacheIndexError(OrmError): pass
class RowNotFound(OrmError): pass
class MultipleRowsFound(OrmError): pass
class TooManyRowsFound(OrmError): pass
class PermissionError(OrmError): pass
class ObjectNotFound(OrmError):
def __init__(exc, entity, pkval=None):
if pkval is not None:
if type(pkval) is tuple:
pkval = ','.join(map(repr, pkval))
else: pkval = repr(pkval)
msg = '%s[%s]' % (entity.__name__, pkval)
else: msg = entity.__name__
OrmError.__init__(exc, msg)
exc.entity = entity
exc.pkval = pkval
class MultipleObjectsFoundError(OrmError): pass
class TooManyObjectsFoundError(OrmError): pass
class OperationWithDeletedObjectError(OrmError): pass
class TransactionError(OrmError): pass
class ConnectionClosedError(TransactionError): pass
class TransactionIntegrityError(TransactionError):
def __init__(exc, msg, original_exc=None):
Exception.__init__(exc, msg)
exc.original_exc = original_exc
class CommitException(TransactionError):
def __init__(exc, msg, exceptions):
Exception.__init__(exc, msg)
exc.exceptions = exceptions
class PartialCommitException(TransactionError):
def __init__(exc, msg, exceptions):
Exception.__init__(exc, msg)
exc.exceptions = exceptions
class RollbackException(TransactionError):
def __init__(exc, msg, exceptions):
Exception.__init__(exc, msg)
exc.exceptions = exceptions
class DatabaseSessionIsOver(TransactionError): pass
TransactionRolledBack = DatabaseSessionIsOver
class IsolationError(TransactionError): pass
class UnrepeatableReadError(IsolationError): pass
class OptimisticCheckError(IsolationError): pass
class UnresolvableCyclicDependency(TransactionError): pass
class UnexpectedError(TransactionError):
def __init__(exc, msg, original_exc):
Exception.__init__(exc, msg)
exc.original_exc = original_exc
class ExprEvalError(TranslationError):
def __init__(exc, src, cause):
assert isinstance(cause, Exception)
msg = '`%s` raises %s: %s' % (src, type(cause).__name__, str(cause))
TranslationError.__init__(exc, msg)
exc.cause = cause
class PonyInternalException(Exception):
pass
class OptimizationFailed(PonyInternalException):
pass # Internal exception, cannot be encountered in user code
class UseAnotherTranslator(PonyInternalException):
def __init__(self, translator):
Exception.__init__(self, 'This exception should be caught internally by PonyORM')
self.translator = translator
class PonyRuntimeWarning(RuntimeWarning):
pass
class DatabaseContainsIncorrectValue(PonyRuntimeWarning):
pass
class DatabaseContainsIncorrectEmptyValue(DatabaseContainsIncorrectValue):
pass
def adapt_sql(sql, paramstyle):
result = adapted_sql_cache.get((sql, paramstyle))
if result is not None: return result
pos = 0
result = []
args = []
kwargs = {}
original_sql = sql
if paramstyle in ('format', 'pyformat'): sql = sql.replace('%', '%%')
while True:
try: i = sql.index('$', pos)
except ValueError:
result.append(sql[pos:])
break
result.append(sql[pos:i])
if sql[i+1] == '$':
result.append('$')
pos = i+2
else:
try: expr, _ = parse_expr(sql, i+1)
except ValueError:
raise # TODO
pos = i+1 + len(expr)
if expr.endswith(';'): expr = expr[:-1]
compile(expr, '<?>', 'eval') # expr correction check
if paramstyle == 'qmark':
args.append(expr)
result.append('?')
elif paramstyle == 'format':
args.append(expr)
result.append('%s')
elif paramstyle == 'numeric':
args.append(expr)
result.append(':%d' % len(args))
elif paramstyle == 'named':
key = 'p%d' % (len(kwargs) + 1)
kwargs[key] = expr
result.append(':' + key)
elif paramstyle == 'pyformat':
key = 'p%d' % (len(kwargs) + 1)
kwargs[key] = expr
result.append('%%(%s)s' % key)
else: throw(NotImplementedError)
if args or kwargs:
adapted_sql = ''.join(result)
if args: source = '(%s,)' % ', '.join(args)
else: source = '{%s}' % ','.join('%r:%s' % item for item in kwargs.items())
code = compile(source, '<?>', 'eval')
else:
adapted_sql = original_sql.replace('$$', '$')
code = compile('None', '<?>', 'eval')
result = adapted_sql, code
adapted_sql_cache[(sql, paramstyle)] = result
return result
class PrefetchContext(object):
def __init__(self, database=None):
self.database = database
self.attrs_to_prefetch_dict = defaultdict(set)
self.entities_to_prefetch = set()
self.relations_to_prefetch_cache = {}
def copy(self):
result = PrefetchContext(self.database)
result.attrs_to_prefetch_dict = self.attrs_to_prefetch_dict.copy()
result.entities_to_prefetch = self.entities_to_prefetch.copy()
return result
def __enter__(self):
local.prefetch_context_stack.append(self)
def __exit__(self, exc_type, exc_val, exc_tb):
stack = local.prefetch_context_stack
assert stack and stack[-1] is self
stack.pop()
def get_frozen_attrs_to_prefetch(self, entity):
attrs_to_prefetch = self.attrs_to_prefetch_dict.get(entity, ())
if type(attrs_to_prefetch) is set:
attrs_to_prefetch = frozenset(attrs_to_prefetch)
self.attrs_to_prefetch_dict[entity] = attrs_to_prefetch
return attrs_to_prefetch
def get_relations_to_prefetch(self, entity):
result = self.relations_to_prefetch_cache.get(entity)
if result is None:
attrs_to_prefetch = self.attrs_to_prefetch_dict[entity]
result = tuple(attr for attr in entity._attrs_
if attr.is_relation and (
attr in attrs_to_prefetch or
attr.py_type in self.entities_to_prefetch and not attr.is_collection))
self.relations_to_prefetch_cache[entity] = result
return result
class Local(localbase):
def __init__(local):
local.debug = False
local.show_values = None
local.debug_stack = []
local.db2cache = {}
local.db_context_counter = 0
local.db_session = None
local.prefetch_context_stack = []
local.current_user = None
local.perms_context = None
local.user_groups_cache = {}
local.user_roles_cache = defaultdict(dict)
@property
def prefetch_context(local):
if local.prefetch_context_stack:
return local.prefetch_context_stack[-1]
return None
def push_debug_state(local, debug, show_values):
local.debug_stack.append((local.debug, local.show_values))
if not suppress_debug_change:
local.debug = debug
local.show_values = show_values
def pop_debug_state(local):
local.debug, local.show_values = local.debug_stack.pop()
local = Local()
def _get_caches():
return list(sorted((cache for cache in local.db2cache.values()),
reverse=True, key=lambda cache : (cache.database.priority, cache.num)))
@cut_traceback
def flush():
for cache in _get_caches(): cache.flush()
def transact_reraise(exc_class, exceptions):
cls, exc, tb = exceptions[0]
new_exc = None
try:
msg = " ".join(tostring(arg) for arg in exc.args)
if not issubclass(cls, TransactionError): msg = '%s: %s' % (cls.__name__, msg)
new_exc = exc_class(msg, exceptions)
new_exc.__cause__ = None
reraise(exc_class, new_exc, tb)
finally: del exceptions, exc, tb, new_exc
def rollback_and_reraise(exc_info):
try:
rollback()
finally:
reraise(*exc_info)
@cut_traceback
def commit():
caches = _get_caches()
if not caches: return
try:
for cache in caches:
cache.flush()
except:
rollback_and_reraise(sys.exc_info())
primary_cache = caches[0]
other_caches = caches[1:]
exceptions = []
try:
primary_cache.commit()
except:
exceptions.append(sys.exc_info())
for cache in other_caches:
try: cache.rollback()
except: exceptions.append(sys.exc_info())
transact_reraise(CommitException, exceptions)
else:
for cache in other_caches:
try: cache.commit()
except: exceptions.append(sys.exc_info())
if exceptions:
transact_reraise(PartialCommitException, exceptions)
finally:
del exceptions
@cut_traceback
def rollback():
exceptions = []
try:
for cache in _get_caches():
try: cache.rollback()
except: exceptions.append(sys.exc_info())
if exceptions:
transact_reraise(RollbackException, exceptions)
assert not local.db2cache
finally:
del exceptions
select_re = re.compile(r'\s*select\b', re.IGNORECASE)
class DBSessionContextManager(object):
__slots__ = 'retry', 'retry_exceptions', 'allowed_exceptions', \
'immediate', 'ddl', 'serializable', 'strict', 'optimistic', \
'sql_debug', 'show_values'
def __init__(db_session, retry=0, immediate=False, ddl=False, serializable=False, strict=False, optimistic=True,
retry_exceptions=(TransactionError,), allowed_exceptions=(), sql_debug=None, show_values=None):
if retry != 0:
if type(retry) is not int: throw(TypeError,
"'retry' parameter of db_session must be of integer type. Got: %s" % type(retry))
if retry < 0: throw(TypeError,
"'retry' parameter of db_session must not be negative. Got: %d" % retry)
if ddl: throw(TypeError, "'ddl' and 'retry' parameters of db_session cannot be used together")
if not callable(allowed_exceptions) and not callable(retry_exceptions):
for e in allowed_exceptions:
if e in retry_exceptions: throw(TypeError,
'The same exception %s cannot be specified in both '
'allowed and retry exception lists simultaneously' % e.__name__)
db_session.retry = retry
db_session.ddl = ddl
db_session.serializable = serializable
db_session.immediate = immediate or ddl or serializable or not optimistic
db_session.strict = strict
db_session.optimistic = optimistic and not serializable
db_session.retry_exceptions = retry_exceptions
db_session.allowed_exceptions = allowed_exceptions
db_session.sql_debug = sql_debug
db_session.show_values = show_values
def __call__(db_session, *args, **kwargs):
if not args and not kwargs: return db_session
if len(args) > 1: throw(TypeError,
'Pass only keyword arguments to db_session or use db_session as decorator')
if not args: return db_session.__class__(**kwargs)
if kwargs: throw(TypeError,
'Pass only keyword arguments to db_session or use db_session as decorator')
func = args[0]
if isgeneratorfunction(func) or hasattr(inspect, 'iscoroutinefunction') and inspect.iscoroutinefunction(func):
return db_session._wrap_coroutine_or_generator_function(func)
return db_session._wrap_function(func)
def __enter__(db_session):
if db_session.retry != 0: throw(TypeError,
"@db_session can accept 'retry' parameter only when used as decorator and not as context manager")
db_session._enter()
def _enter(db_session):
if local.db_session is None:
assert not local.db_context_counter
local.db_session = db_session
elif db_session.ddl and not local.db_session.ddl: throw(TransactionError,
'Cannot start ddl transaction inside non-ddl transaction')
elif db_session.serializable and not local.db_session.serializable: throw(TransactionError,
'Cannot start serializable transaction inside non-serializable transaction')
local.db_context_counter += 1
if db_session.sql_debug is not None:
local.push_debug_state(db_session.sql_debug, db_session.show_values)
def __exit__(db_session, exc_type=None, exc=None, tb=None):
local.db_context_counter -= 1
try:
if not local.db_context_counter:
assert local.db_session is db_session
db_session._commit_or_rollback(exc_type, exc, tb)
finally:
if db_session.sql_debug is not None:
local.pop_debug_state()
def _commit_or_rollback(db_session, exc_type, exc, tb):
try:
if exc_type is None: can_commit = True
elif not callable(db_session.allowed_exceptions):
can_commit = issubclass(exc_type, tuple(db_session.allowed_exceptions))
else:
assert exc is not None # exc can be None in Python 2.6 even if exc_type is not None
try: can_commit = db_session.allowed_exceptions(exc)
except: rollback_and_reraise(sys.exc_info())
if can_commit:
commit()
for cache in _get_caches(): cache.release()
assert not local.db2cache
else:
try: rollback()
except:
if exc_type is None: raise # if exc_type is not None it will be reraised outside of __exit__
finally:
del exc, tb
local.db_session = None
local.user_groups_cache.clear()
local.user_roles_cache.clear()
def _wrap_function(db_session, func):
def new_func(func, *args, **kwargs):
if local.db_context_counter:
if db_session.ddl:
fname = func.__name__ + '()' if isinstance(func, types.FunctionType) else func
throw(TransactionError, '@db_session-decorated %s function with `ddl` option '
'cannot be called inside of another db_session' % fname)
if db_session.retry:
fname = func.__name__ + '()' if isinstance(func, types.FunctionType) else func
message = '@db_session decorator with `retry=%d` option is ignored for %s function ' \
'because it is called inside another db_session' % (db_session.retry, fname)
warnings.warn(message, PonyRuntimeWarning, stacklevel=3)
if db_session.sql_debug is None:
return func(*args, **kwargs)
local.push_debug_state(db_session.sql_debug, db_session.show_values)
try:
return func(*args, **kwargs)
finally:
local.pop_debug_state()
exc = tb = None
try:
for i in range(db_session.retry+1):
db_session._enter()
exc_type = exc = tb = None
try:
result = func(*args, **kwargs)
commit()
return result
except:
exc_type, exc, tb = sys.exc_info()
if getattr(exc, 'should_retry', False):
do_retry = True
else:
retry_exceptions = db_session.retry_exceptions
if not callable(retry_exceptions):
do_retry = issubclass(exc_type, tuple(retry_exceptions))
else:
assert exc is not None # exc can be None in Python 2.6
do_retry = retry_exceptions(exc)
if not do_retry:
raise
rollback()
finally:
db_session.__exit__(exc_type, exc, tb)
reraise(exc_type, exc, tb)
finally:
del exc, tb
return decorator(new_func, func)
def _wrap_coroutine_or_generator_function(db_session, gen_func):
for option in ('ddl', 'retry', 'serializable'):
if getattr(db_session, option, None): throw(TypeError,
"db_session with `%s` option cannot be applied to generator function" % option)
def interact(iterator, input=None, exc_info=None):
if exc_info is None:
return next(iterator) if input is None else iterator.send(input)
if exc_info[0] is GeneratorExit:
close = getattr(iterator, 'close', None)
if close is not None: close()
reraise(*exc_info)
throw_ = getattr(iterator, 'throw', None)
if throw_ is None: reraise(*exc_info)
return throw_(*exc_info)
@wraps(gen_func)
def new_gen_func(*args, **kwargs):
db2cache_copy = {}
def wrapped_interact(iterator, input=None, exc_info=None):
if local.db_session is not None: throw(TransactionError,
'@db_session-wrapped generator cannot be used inside another db_session')
assert not local.db_context_counter and not local.db2cache
local.db_context_counter = 1
local.db_session = db_session
local.db2cache.update(db2cache_copy)
db2cache_copy.clear()
if db_session.sql_debug is not None:
local.push_debug_state(db_session.sql_debug, db_session.show_values)
try:
try:
output = interact(iterator, input, exc_info)
except StopIteration as e:
commit()
for cache in _get_caches():
cache.release()
assert not local.db2cache
raise e
for cache in _get_caches():
if cache.modified or cache.in_transaction: throw(TransactionError,
'You need to manually commit() changes before suspending the generator')
except:
rollback_and_reraise(sys.exc_info())
else:
return output
finally:
if db_session.sql_debug is not None:
local.pop_debug_state()
db2cache_copy.update(local.db2cache)
local.db2cache.clear()
local.db_context_counter = 0
local.db_session = None
gen = gen_func(*args, **kwargs)
iterator = gen.__await__() if hasattr(gen, '__await__') else iter(gen)
try:
output = wrapped_interact(iterator)
while True:
try:
input = yield output
except:
output = wrapped_interact(iterator, exc_info=sys.exc_info())
else:
output = wrapped_interact(iterator, input)
except StopIteration:
assert not db2cache_copy and not local.db2cache
return
if hasattr(types, 'coroutine'):
new_gen_func = types.coroutine(new_gen_func)
return new_gen_func
db_session = DBSessionContextManager()
class SQLDebuggingContextManager(object):
def __init__(self, debug=True, show_values=None):
self.debug = debug
self.show_values = show_values
def __call__(self, *args, **kwargs):
if not kwargs and len(args) == 1 and callable(args[0]):
arg = args[0]
if not isgeneratorfunction(arg):
return self._wrap_function(arg)
return self._wrap_generator_function(arg)
return self.__class__(*args, **kwargs)
def __enter__(self):
local.push_debug_state(self.debug, self.show_values)
def __exit__(self, exc_type=None, exc=None, tb=None):
local.pop_debug_state()
def _wrap_function(self, func):
def new_func(func, *args, **kwargs):
self.__enter__()
try:
return func(*args, **kwargs)
finally:
self.__exit__()
return decorator(new_func, func)
def _wrap_generator_function(self, gen_func):
def interact(iterator, input=None, exc_info=None):
if exc_info is None:
return next(iterator) if input is None else iterator.send(input)
if exc_info[0] is GeneratorExit:
close = getattr(iterator, 'close', None)
if close is not None: close()
reraise(*exc_info)
throw_ = getattr(iterator, 'throw', None)
if throw_ is None: reraise(*exc_info)
return throw_(*exc_info)
def new_gen_func(gen_func, *args, **kwargs):
def wrapped_interact(iterator, input=None, exc_info=None):
self.__enter__()
try:
return interact(iterator, input, exc_info)
finally:
self.__exit__()
gen = gen_func(*args, **kwargs)
iterator = iter(gen)
output = wrapped_interact(iterator)
try:
while True:
try:
input = yield output
except:
output = wrapped_interact(iterator, exc_info=sys.exc_info())
else:
output = wrapped_interact(iterator, input)
except StopIteration:
return
return decorator(new_gen_func, gen_func)
sql_debugging = SQLDebuggingContextManager()
def throw_db_session_is_over(action, obj, attr=None):
msg = 'Cannot %s %s%s: the database session is over'
throw(DatabaseSessionIsOver, msg % (action, safe_repr(obj), '.%s' % attr.name if attr else ''))
def with_transaction(*args, **kwargs):
deprecated(3, "@with_transaction decorator is deprecated, use @db_session decorator instead")
return db_session(*args, **kwargs)
@decorator
def db_decorator(func, *args, **kwargs):
web = sys.modules.get('pony.web')
allowed_exceptions = [ web.HttpRedirect ] if web else []
try:
with db_session(allowed_exceptions=allowed_exceptions):
return func(*args, **kwargs)
except (ObjectNotFound, RowNotFound):
if web: throw(web.Http404NotFound)
raise
known_providers = ('sqlite', 'postgres', 'mysql', 'oracle')
class OnConnectDecorator(object):
@staticmethod
def check_provider(provider):
if provider:
if not isinstance(provider, str):
throw(TypeError, "'provider' option should be type of 'string', got %r" % type(provider).__name__)
if provider not in known_providers:
throw(BindingError, 'Unknown provider %s' % provider)
def __init__(self, database, provider):
OnConnectDecorator.check_provider(provider)
self.provider = provider
self.database = database
def __call__(self, func=None, provider=None):
if isinstance(func, types.FunctionType):
self.database._on_connect_funcs.append((func, provider or self.provider))
if not provider and func is str:
provider = func
OnConnectDecorator.check_provider(provider)
return OnConnectDecorator(self.database, provider)
db_id_counter = itertools.count(1)
class Database(object):
def __deepcopy__(self, memo):
return self # Database cannot be cloned by deepcopy()
@cut_traceback
def __init__(self, *args, **kwargs):
self.id = next(db_id_counter)
# argument 'self' cannot be named 'database', because 'database' can be in kwargs
self.priority = 0
self._insert_cache = {}
# ER-diagram related stuff:
self._translator_cache = {}
self._constructed_sql_cache = {}
self.entities = {}
self.schema = None
self.Entity = type.__new__(EntityMeta, 'Entity', (Entity,), {})
self.Entity._database_ = self
# Statistics-related stuff:
self._global_stats = {}
self._global_stats_lock = RLock()
self._dblocal = DbLocal()
self.on_connect = OnConnectDecorator(self, None)
self._on_connect_funcs = []
self.provider = self.provider_name = None
if args or kwargs: self._bind(*args, **kwargs)
def call_on_connect(database, con):
for func, provider in database._on_connect_funcs:
if not provider or provider == database.provider_name:
func(database, con)
con.commit()
@cut_traceback
def bind(self, *args, **kwargs):
self._bind(*args, **kwargs)
def _bind(self, *args, **kwargs):
# argument 'self' cannot be named 'database', because 'database' can be in kwargs
if self.provider is not None:
throw(BindingError, 'Database object was already bound to %s provider' % self.provider.dialect)
if len(args) == 1 and not kwargs and hasattr(args[0], 'keys'):
args, kwargs = (), args[0]
provider = None
if args: provider, args = args[0], args[1:]
elif 'provider' not in kwargs: throw(TypeError, 'Database provider is not specified')
else: provider = kwargs.pop('provider')
if isinstance(provider, type) and issubclass(provider, DBAPIProvider):
provider_cls = provider
else:
if not isinstance(provider, str):
throw(TypeError, 'Provider name should be string. Got: %r' % type(provider).__name__)
if provider == 'pygresql': throw(TypeError,
'Pony no longer supports PyGreSQL module. Please use psycopg2 instead.')
self.provider_name = provider
provider_module = import_module('pony.orm.dbproviders.' + provider)
provider_cls = provider_module.provider_cls
kwargs['pony_call_on_connect'] = self.call_on_connect
self.provider = provider_cls(self, *args, **kwargs)
@property
def last_sql(database):
return database._dblocal.last_sql
@property
def local_stats(database):
return database._dblocal.stats
def _update_local_stat(database, sql, query_start_time):
dblocal = database._dblocal
dblocal.last_sql = sql
stats = dblocal.stats
query_end_time = time()
duration = query_end_time - query_start_time
stat = stats.get(sql)
if stat is not None:
stat.query_executed(duration)
else:
stats[sql] = QueryStat(sql, duration)
total_stat = stats.get(None)
if total_stat is not None:
total_stat.query_executed(duration)
else:
stats[None] = QueryStat(None, duration)
def merge_local_stats(database):
setdefault = database._global_stats.setdefault
with database._global_stats_lock:
for sql, stat in database._dblocal.stats.items():
global_stat = setdefault(sql, stat)
if global_stat is not stat: global_stat.merge(stat)
database._dblocal.stats = {None: QueryStat(None)}
@property
def global_stats(database):
with database._global_stats_lock:
return {sql: stat.copy() for sql, stat in database._global_stats.items()}
@property
def global_stats_lock(database):
deprecated(3, "global_stats_lock is deprecated, just use global_stats property without any locking")
return database._global_stats_lock
@cut_traceback
def get_connection(database):
cache = database._get_cache()
if not cache.in_transaction:
cache.immediate = True
cache.prepare_connection_for_query_execution()
cache.in_transaction = True
connection = cache.connection
assert connection is not None
return connection
@cut_traceback
def disconnect(database):
provider = database.provider
if provider is None: return
if local.db_context_counter: throw(TransactionError, 'disconnect() cannot be called inside of db_session')
cache = local.db2cache.get(database)
if cache is not None: cache.rollback()
provider.disconnect()
def _get_cache(database):
if database.provider is None: throw(MappingError, 'Database object is not bound with a provider yet')
cache = local.db2cache.get(database)
if cache is not None: return cache
if not local.db_context_counter and not (
pony.MODE == 'INTERACTIVE' and current_thread().__class__ is _MainThread
): throw(TransactionError, 'db_session is required when working with the database')
cache = local.db2cache[database] = SessionCache(database)
return cache
@cut_traceback
def flush(database):
database._get_cache().flush()
@cut_traceback
def commit(database):
cache = local.db2cache.get(database)
if cache is not None:
cache.flush_and_commit()
@cut_traceback
def rollback(database):
cache = local.db2cache.get(database)
if cache is not None:
try: cache.rollback()
except: transact_reraise(RollbackException, [sys.exc_info()])
@cut_traceback
def execute(database, sql, globals=None, locals=None):
return database._exec_raw_sql(sql, globals, locals, frame_depth=cut_traceback_depth+1, start_transaction=True)
def _exec_raw_sql(database, sql, globals, locals, frame_depth, start_transaction=False):
provider = database.provider
if provider is None: throw(MappingError, 'Database object is not bound with a provider yet')
sql = sql[:] # sql = templating.plainstr(sql)
if globals is None:
assert locals is None
frame_depth += 1
globals = sys._getframe(frame_depth).f_globals
locals = sys._getframe(frame_depth).f_locals
adapted_sql, code = adapt_sql(sql, provider.paramstyle)
arguments = eval(code, globals, locals)
return database._exec_sql(adapted_sql, arguments, False, start_transaction)
@cut_traceback
def select(database, sql, globals=None, locals=None, frame_depth=0):
if not select_re.match(sql): sql = 'select ' + sql
cursor = database._exec_raw_sql(sql, globals, locals, frame_depth+cut_traceback_depth+1)
max_fetch_count = options.MAX_FETCH_COUNT
if max_fetch_count is not None:
result = cursor.fetchmany(max_fetch_count)
if cursor.fetchone() is not None: throw(TooManyRowsFound)
else: result = cursor.fetchall()
if len(cursor.description) == 1: return [ row[0] for row in result ]
row_class = type("row", (tuple,), {})
for i, column_info in enumerate(cursor.description):
column_name = column_info[0]
if not is_ident(column_name): continue
if hasattr(tuple, column_name) and column_name.startswith('__'): continue
setattr(row_class, column_name, property(itemgetter(i)))
return [ row_class(row) for row in result ]
@cut_traceback
def get(database, sql, globals=None, locals=None):
rows = database.select(sql, globals, locals, frame_depth=cut_traceback_depth+1)
if not rows: throw(RowNotFound)
if len(rows) > 1: throw(MultipleRowsFound)
row = rows[0]
return row
@cut_traceback
def exists(database, sql, globals=None, locals=None):
if not select_re.match(sql): sql = 'select ' + sql
cursor = database._exec_raw_sql(sql, globals, locals, frame_depth=cut_traceback_depth+1)
result = cursor.fetchone()
return bool(result)
@cut_traceback
def insert(database, table_name, returning=None, **kwargs):
table_name = database._get_table_name(table_name)
if database.provider is None: throw(MappingError, 'Database object is not bound with a provider yet')
query_key = (table_name,) + tuple(kwargs) # keys are not sorted deliberately!!
if returning is not None: query_key = query_key + (returning,)
cached_sql = database._insert_cache.get(query_key)
if cached_sql is None:
ast = [ 'INSERT', table_name, kwargs.keys(),
[ [ 'PARAM', (i, None, None) ] for i in range(len(kwargs)) ], returning ]
sql, adapter = database._ast2sql(ast)
cached_sql = sql, adapter
database._insert_cache[query_key] = cached_sql
else: sql, adapter = cached_sql
arguments = adapter(list(kwargs.values())) # order of values same as order of keys
if returning is not None:
return database._exec_sql(sql, arguments, returning_id=True, start_transaction=True)
cursor = database._exec_sql(sql, arguments, start_transaction=True)
return getattr(cursor, 'lastrowid', None)
def _ast2sql(database, sql_ast):
sql, adapter = database.provider.ast2sql(sql_ast)
return sql, adapter
def _exec_sql(database, sql, arguments=None, returning_id=False, start_transaction=False):
cache = database._get_cache()
if start_transaction: cache.immediate = True
connection = cache.prepare_connection_for_query_execution()
cursor = connection.cursor()
if local.debug: log_sql(sql, arguments)
provider = database.provider
t = time()
try: new_id = provider.execute(cursor, sql, arguments, returning_id)
except Exception as e:
connection = cache.reconnect(e)
cursor = connection.cursor()
if local.debug: log_sql(sql, arguments)
t = time()
new_id = provider.execute(cursor, sql, arguments, returning_id)
if cache.immediate:
cache.in_transaction = True
database._update_local_stat(sql, t)
if not returning_id: return cursor
return new_id
@cut_traceback
def generate_mapping(database, filename=None, check_tables=True, create_tables=False):
provider = database.provider
if provider is None: throw(MappingError, 'Database object is not bound with a provider yet')
if database.schema: throw(BindingError, 'Mapping was already generated')
if filename is not None: throw(NotImplementedError)
schema = database.schema = provider.dbschema_cls(provider)
entities = list(sorted(database.entities.values(), key=attrgetter('_id_')))
for entity in entities:
entity._resolve_attr_types_()
for entity in entities:
entity._link_reverse_attrs_()
for entity in entities:
entity._check_table_options_()
def get_columns(table, column_names):
column_dict = table.column_dict
return tuple(column_dict[name] for name in column_names)
for entity in entities:
entity._get_pk_columns_()
table_name = entity._table_
is_subclass = entity._root_ is not entity
if is_subclass:
if table_name is not None: throw(NotImplementedError,
'Cannot specify table name for entity %r which is subclass of %r' % (entity.__name__, entity._root_.__name__))
table_name = entity._root_._table_
entity._table_ = table_name
elif table_name is None:
table_name = provider.get_default_entity_table_name(entity)
entity._table_ = table_name
else: assert isinstance(table_name, (str, tuple))
table = schema.tables.get(table_name)
if table is None: table = schema.add_table(table_name, entity)
else: table.add_entity(entity)
for attr in entity._new_attrs_:
if attr.is_collection:
if not isinstance(attr, Set): throw(NotImplementedError)
reverse = attr.reverse
if not reverse.is_collection: # many-to-one:
if attr.table is not None: throw(MappingError,
"Parameter 'table' is not allowed for many-to-one attribute %s" % attr)
elif attr.columns: throw(NotImplementedError,