-
Notifications
You must be signed in to change notification settings - Fork 56
/
jsoncodec.py
419 lines (359 loc) · 17.1 KB
/
jsoncodec.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
""" Defines JSON-format encoding and decoding functions """
#***************************************************************************************************
# Copyright 2015, 2019 National Technology & Engineering Solutions of Sandia, LLC (NTESS).
# Under the terms of Contract DE-NA0003525 with NTESS, the U.S. Government retains certain rights
# in this software.
# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
# in compliance with the License. You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0 or in the LICENSE file in the root pyGSTi directory.
#***************************************************************************************************
# XXX this module should certainly be rewritten as a custom `json.JSONEncoder`
import types as _types
import importlib as _importlib
import base64 as _base64
import numpy as _np
import uuid as _uuid
import collections as _collections
import pygsti.objects
def class_hasattr(instance, attr):
"""Helper function for checking if `instance.__class__` has an attribute"""
return hasattr(instance.__class__, attr)
def encode_obj(py_obj, binary):
"""
Returns JSON-compatible version of `py_obj`.
Constructs in-memory a JSON-format-compatible copy of the Python object
`py_obj`, handling pyGSTi objects appropriately. When `binary=False`,
the output must contain only ASCII-compatible strings (no 'bytes'),
otherwise the output is allowed to contain non-ASCII string values (OK for
binary formats like MSGPACK and BSON).
Parameters
----------
py_obj : object
The object to encode.
binary : bool
Whether the output is allowed to have binary-mode strings or not.
Returns
-------
object
A JSON-format compatible object. Usually a dict, list, or string.
"""
#print("ENCODING ", str(type(py_obj)))
is_pygsti_obj = hasattr(py_obj, '__class__') and \
hasattr(py_obj.__class__, '__module__') and \
py_obj.__class__.__module__.startswith('pygsti')
is_pygsti_class = isinstance(py_obj, type) and hasattr(py_obj, '__module__') \
and py_obj.__module__.startswith('pygsti')
is_plotly_fig = hasattr(py_obj, '__class__') and \
hasattr(py_obj.__class__, '__module__') and \
py_obj.__class__.__module__ == 'plotly.graph_objs._figure' and \
py_obj.__class__.__name__ == "Figure"
# just needed for v3 plotly where figures aren't dicts...
# Pygsti class encoding
if is_pygsti_class: # or class_hasattr(py_obj, '__pygsti_getstate__')
return {'__pygsticlass__': (py_obj.__module__, py_obj.__name__)}
# Pygsti object encoding
elif is_pygsti_obj: # or class_hasattr(py_obj, '__pygsti_getstate__')
#Get State (and/or init args)
if class_hasattr(py_obj, '__pygsti_reduce__'):
red = py_obj.__pygsti_reduce__() # returns class, construtor_args, state
assert(red[0] is py_obj.__class__), "No support for weird reducing!"
init_args = red[1] if len(red) > 1 else []
state = red[2] if len(red) > 2 else {}
if state is None: state = {}
state.update({'__init_args__': init_args})
elif class_hasattr(py_obj, '__pygsti_getstate__'):
state = py_obj.__pygsti_getstate__() # must return a dict
elif class_hasattr(py_obj, '__getstate__'):
state = py_obj.__getstate__()
elif hasattr(py_obj, '__dict__'):
state = py_obj.__dict__ # take __dict__ as state
elif class_hasattr(py_obj, '__reduce__'):
red = py_obj.__reduce__() # returns class, construtor_args, state
if red[0] is not py_obj.__class__:
state = None # weird reducing can happen, for instance, for namedtuples - just punt
else:
init_args = red[1] if len(red) > 1 else []
state = red[2] if len(red) > 2 else {}
if state is None: state = {}
state.update({'__init_args__': init_args})
else:
state = None
if state is None: # Note: __dict__ and __getstate__ may *return* None (python 2.7)
if hasattr(py_obj, '_asdict'): # named tuples
state = {'__init_args__': list(py_obj._asdict().values())}
# values will be ordered as per __init__ so no need for keys
else:
raise ValueError("Can't get state of %s object" % type(py_obj))
d = {k: encode_obj(v, binary) for k, v in state.items()}
#DEBUG (instead of above line)
#import json as _json
#d = {}
#print("DB: Encoding state for pyGSTi %s object:" % type(py_obj))
#for k,v in state.items():
# print(">>> Encoding key: ",k)
# d[k] = encode_obj(v,binary)
# print("<<< Done encoding key ",k)
# try: _json.dumps(d[k])
# except Exception as e:
# print("Cannot JSON %s key: " % k, d[k])
# raise e
d.update({'__pygstiobj__': (py_obj.__class__.__module__,
py_obj.__class__.__name__)})
#Currently, don't add standard-base-class state
#if we know how to __init__, since we'll assume this
# should initialize the entire (base class included) instance
encode_std_base = bool('__init_args__' not in d)
if encode_std_base:
std_encode = encode_std_obj(py_obj, binary)
if std_encode is not py_obj: # if there's something to encode
# this pygsti object is also a standard-object instance
assert(isinstance(std_encode, dict))
d['__std_base__'] = std_encode
#try:
# _json.dumps(d)
#except Exception as e:
# print("Cannot JSON ",type(py_obj))
# raise e
return d
#Special case: a plotly Figure object - these need special help being serialized
elif is_plotly_fig and hasattr(py_obj, 'to_dict'):
return {'__plotlyfig__': encode_std_obj(py_obj.to_dict(), binary)}
else:
return encode_std_obj(py_obj, binary)
def encode_std_obj(py_obj, binary):
"""
Helper to :func:`encode_obj` that encodes only "standard" (non-pyGSTi) types
"""
# Other builtin or standard object encoding
#print("Encoding std type: ",str(type(py_obj)))
if isinstance(py_obj, tuple):
return {'__tuple__': [encode_obj(v, binary) for v in py_obj]}
elif isinstance(py_obj, list):
return {'__list__': [encode_obj(v, binary) for v in py_obj]}
elif isinstance(py_obj, set):
return {'__set__': [encode_obj(v, binary) for v in py_obj]}
elif isinstance(py_obj, slice):
return {'__slice__': [encode_obj(py_obj.start, binary),
encode_obj(py_obj.stop, binary),
encode_obj(py_obj.step, binary)]}
elif isinstance(py_obj, range):
return {'__range__': (py_obj.start, py_obj.stop, py_obj.step)}
elif isinstance(py_obj, _collections.OrderedDict):
return {'__odict__': [(encode_obj(k, binary), encode_obj(v, binary))
for k, v in py_obj.items()]}
elif isinstance(py_obj, _collections.Counter):
return {'__counter__': [(encode_obj(k, binary), encode_obj(v, binary))
for k, v in dict(py_obj).items()]}
elif isinstance(py_obj, dict):
return {'__ndict__': [(encode_obj(k, binary), encode_obj(v, binary))
for k, v in py_obj.items()]}
elif isinstance(py_obj, _uuid.UUID):
return {'__uuid__': str(py_obj.hex)}
elif isinstance(py_obj, complex):
rep = py_obj.__repr__() # a string
data = tobin(rep) if binary else rep # binary if need be
return {'__complex__': data}
elif not binary and isinstance(py_obj, bytes):
return {'__bytes__': tostr(_base64.b64encode(py_obj))}
elif binary and isinstance(py_obj, str):
return {'__string__': tobin(py_obj)}
#Numpy encoding
elif isinstance(py_obj, _np.ndarray):
# If the dtype is structured, store the interface description;
# otherwise, store the corresponding array protocol type string:
if py_obj.dtype.kind == 'V':
kind = 'V'
descr = tobin(py_obj.dtype.descr) if binary else tostr(py_obj.dtype.descr)
else:
kind = ''
descr = tobin(py_obj.dtype.str) if binary else tostr(py_obj.dtype.str)
data = py_obj.tobytes() if binary else tostr(_base64.b64encode(py_obj.tobytes()))
if(py_obj.dtype == _np.object): raise TypeError("Cannot serialize object ndarrays!")
return {'__ndarray__': data,
'dtype': descr,
'kind': kind,
'shape': py_obj.shape}
elif isinstance(py_obj, (_np.bool_, _np.number)):
data = py_obj.tobytes() if binary else tostr(_base64.b64encode(py_obj.tobytes()))
return {'__npgeneric__': data,
'dtype': tostr(py_obj.dtype.str)}
elif isinstance(py_obj, _types.FunctionType): # functions
# OLD: elif callable(py_obj): #incorrectly includes pygsti classes w/__call__ (e.g. AutoGator)
return {'__function__': (py_obj.__module__, py_obj.__name__)}
return py_obj # assume the bare py_obj is json-able
def decode_obj(json_obj, binary):
"""
Inverse of :func:`encode_obj` that decodes the JSON-compatible `json_obj`
object into the original Python object that was encoded.
Parameters
----------
json_obj : object
The JSON-compabtible object to decode. Note that this is NOT a JSON
string, but rather the object that would be decoded from such a string
(by `json.loads`, for instance).
binary : bool
Whether `json_obj` is a binary format or not. If so, then the decoding
expects all strings to be binary strings i.e. `b'name'` instead of just
`'name'`. The value of this argument should match that used in the
original call to :func:`encode_obj`.
Returns
-------
object
A Python object.
"""
B = tobin if binary else _ident
if isinstance(json_obj, dict):
if B('__pygsticlass__') in json_obj:
modname, clsname = json_obj[B('__pygsticlass__')]
module = _importlib.import_module(tostr(modname))
class_ = getattr(module, tostr(clsname))
return class_
elif B('__pygstiobj__') in json_obj:
#DEBUG
#print("DB: creating %s" % str(json_obj['__pygstiobj__']))
#print("DB: json_obj is type %s with keyvals:" % type(json_obj))
#for k,v in json_obj.items():
# print("%s (%s): %s (%s)" % (k,type(k),v,type(v)))
modname, clsname = json_obj[B('__pygstiobj__')]
module = _importlib.import_module(tostr(modname))
class_ = getattr(module, tostr(clsname))
if B('__init_args__') in json_obj: # construct via __init__
args = decode_obj(json_obj[B('__init_args__')], binary)
instance = class_(*args)
else: # init via __new__ and set state
instance = class_.__new__(class_)
#Create state dict
state_dict = {}
for k, v in json_obj.items():
if k in (B('__pygstiobj__'), B('__init_args__'), B('__std_base__')): continue
state_dict[tostr(k)] = decode_obj(v, binary)
#Set state
if class_hasattr(instance, '__pygsti_setstate__'):
instance.__pygsti_setstate__(state_dict)
elif class_hasattr(instance, '__setstate__'):
instance.__setstate__(state_dict)
elif hasattr(instance, '__dict__'): # just update __dict__
instance.__dict__.update(state_dict)
elif len(state_dict) > 0:
raise ValueError("Cannot set nontrivial state of %s object" % type(instance))
#update instance with std-object info if needed (only if __init__ not called)
if B('__std_base__') in json_obj:
decode_std_base(json_obj[B('__std_base__')], instance, binary)
return instance
elif B('__plotlyfig__') in json_obj:
import plotly.graph_objs as go
return go.Figure(decode_obj(json_obj[B('__plotlyfig__')], binary))
else:
return decode_std_obj(json_obj, binary)
else:
return json_obj
def decode_std_base(json_obj, start, binary):
"""
Helper to :func:`decode_obj` for decoding pyGSTi objects that are also
derived from a standard type.
"""
B = tobin if binary else _ident
if B('__tuple__') in json_obj:
#OK if __init_args since this means we knew how to construct it (e.g. namedtuples)
assert(B('__init_args') in json_obj), "No support for sub-classing tuple"
elif B('__list__') in json_obj:
for v in json_obj[B('__list__')]:
start.append(decode_obj(v, binary))
elif B('__set__') in json_obj:
for v in json_obj[B('__set__')]:
start.add(decode_obj(v, binary))
elif B('__ndict__') in json_obj:
for k, v in json_obj[B('__ndict__')]:
start[decode_obj(k, binary)] = decode_obj(v, binary)
elif B('__odict__') in json_obj:
for k, v in json_obj[B('__odict__')]:
start[decode_obj(k, binary)] = decode_obj(v, binary)
elif B('__uuid__') in json_obj:
assert(False), "No support for sub-classing UUID"
elif B('__ndarray__') in json_obj:
assert(False), "No support for sub-classing ndarray"
elif B('__npgeneric__') in json_obj:
assert(False), "No support for sub-classing numpy generics"
elif B('__complex__') in json_obj:
assert(False), "No support for sub-classing complex"
elif B('__counter__') in json_obj:
assert(False), "No support for sub-classing Counter"
elif B('__slice__') in json_obj:
assert(False), "No support for sub-classing slice"
def decode_std_obj(json_obj, binary):
"""
Helper to :func:`decode_obj` that decodes standard (non-pyGSTi) types.
"""
B = tobin if binary else _ident
if B('__tuple__') in json_obj:
return tuple([decode_obj(v, binary) for v in json_obj[B('__tuple__')]])
elif B('__list__') in json_obj:
return list([decode_obj(v, binary) for v in json_obj[B('__list__')]])
elif B('__set__') in json_obj:
return set([decode_obj(v, binary) for v in json_obj[B('__set__')]])
elif B('__slice__') in json_obj:
v = json_obj[B('__slice__')]
return slice(decode_obj(v[0], binary), decode_obj(v[1], binary),
decode_obj(v[2], binary))
elif B('__range__') in json_obj:
start, stop, step = json_obj[B('__range__')]
return range(start, stop, step)
elif B('__ndict__') in json_obj:
return dict([(decode_obj(k, binary), decode_obj(v, binary))
for k, v in json_obj[B('__ndict__')]])
elif B('__odict__') in json_obj:
return _collections.OrderedDict(
[(decode_obj(k, binary), decode_obj(v, binary)) for k, v in json_obj[B('__odict__')]])
elif B('__counter__') in json_obj:
return _collections.Counter(
{decode_obj(k, binary): decode_obj(v, binary) for k, v in json_obj[B('__counter__')]})
elif B('__uuid__') in json_obj:
return _uuid.UUID(hex=tostr(json_obj[B('__uuid__')]))
elif B('__bytes__') in json_obj:
return json_obj[B('__bytes__')] if binary else \
_base64.b64decode(json_obj[B('__bytes__')])
elif B('__string__') in json_obj:
return tostr(json_obj[B('__string__')]) if binary else \
json_obj[B('__string__')]
# check for numpy
elif B('__ndarray__') in json_obj:
# Check if 'kind' is in json_obj to enable decoding of data
# serialized with older versions:
if json_obj[B('kind')] == 'V':
descr = [tuple(tostr(t) if isinstance(t, bytes) else t for t in d)
for d in json_obj[B('dtype')]]
else:
descr = json_obj[B('dtype')]
data = json_obj[B('__ndarray__')] if binary else \
_base64.b64decode(json_obj[B('__ndarray__')])
return _np.fromstring(data, dtype=_np.dtype(descr)).reshape(json_obj[B('shape')])
elif B('__npgeneric__') in json_obj:
data = json_obj[B('__npgeneric__')] if binary else \
_base64.b64decode(json_obj[B('__npgeneric__')])
return _np.fromstring(
data, dtype=_np.dtype(json_obj[B('dtype')])
)[0]
elif B('__complex__') in json_obj:
return complex(tostr(json_obj[B('__complex__')]))
elif B('__function__') in json_obj:
modname, fnname = json_obj[B('__function__')]
module = _importlib.import_module(tostr(modname))
return getattr(module, tostr(fnname))
def tostr(x):
"""
Convert a value to the native string format.
"""
if isinstance(x, bytes):
return x.decode()
else:
return str(x)
def tobin(x):
"""
Serialize strings to UTF8
"""
if isinstance(x, str):
return bytes(x, 'utf-8')
else:
return x
def _ident(x):
return x