From 1c72aef8d28df5951da14024fca6e610aacae542 Mon Sep 17 00:00:00 2001 From: Dennis Lutter Date: Tue, 13 Dec 2011 21:29:29 +0100 Subject: [PATCH 1/9] handle getbanner and getposter while chaining. while chaining the these cmds normal output is replaced with an error response --- sickbeard/webapi.py | 21 ++++++++++++--------- 1 file changed, 12 insertions(+), 9 deletions(-) diff --git a/sickbeard/webapi.py b/sickbeard/webapi.py index a6ad3f77e5..60bc43ade4 100644 --- a/sickbeard/webapi.py +++ b/sickbeard/webapi.py @@ -221,15 +221,18 @@ def call_dispatcher(args, kwargs): cmd, cmdIndex = cmd.split("_") # this gives us the clear cmd and the index logger.log(u"API :: " + cmd + ": curKwargs " + str(curKwargs), logger.DEBUG) - try: - if cmd in _functionMaper: - curOutDict = _functionMaper.get(cmd)(curArgs, curKwargs).run() # get the cmd class, init it and run() - elif _is_int(cmd): - curOutDict = TVDBShorthandWrapper(curArgs, curKwargs, cmd).run() - else: - curOutDict = _responds(RESULT_ERROR, "No such cmd: '" + cmd + "'") - except ApiError, e: # Api errors that we raised, they are harmless - curOutDict = _responds(RESULT_ERROR, msg=ex(e)) + if not (multiCmds and cmd in ('show.getposter','show.getbanner')): # skip these cmd while chaining + try: + if cmd in _functionMaper: + curOutDict = _functionMaper.get(cmd)(curArgs, curKwargs).run() # get the cmd class, init it and run() + elif _is_int(cmd): + curOutDict = TVDBShorthandWrapper(curArgs, curKwargs, cmd).run() + else: + curOutDict = _responds(RESULT_ERROR, "No such cmd: '" + cmd + "'") + except ApiError, e: # Api errors that we raised, they are harmless + curOutDict = _responds(RESULT_ERROR, msg=ex(e)) + else: # if someone chained one of the forbiden cmds they will get an error for this one cmd + curOutDict = _responds(RESULT_ERROR, msg="The cmd '"+cmd+"' is not supported while chaining") if multiCmds: # note: if multiple same cmds are issued but one has not an index defined it will override all others From de3a00c594f09a910e86797ec4e0aab30cba5d9e Mon Sep 17 00:00:00 2001 From: daften Date: Sat, 17 Dec 2011 00:43:02 +0100 Subject: [PATCH 2/9] Using python json library instead of simplejson --- sickbeard/notifiers/trakt.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/sickbeard/notifiers/trakt.py b/sickbeard/notifiers/trakt.py index 1a5dcf5f42..eb469ef7d5 100644 --- a/sickbeard/notifiers/trakt.py +++ b/sickbeard/notifiers/trakt.py @@ -21,7 +21,7 @@ from hashlib import sha1 -from lib import simplejson as json +import json import sickbeard @@ -78,8 +78,6 @@ def _use_me(self): def _notifyTrakt(self, method, api, username, password, data = {}): logger.log("trakt_notifier: Call method " + method, logger.DEBUG) - json._toggle_speedups(False) - if not api: api = self._api() if not username: @@ -104,7 +102,7 @@ def _notifyTrakt(self, method, api, username, password, data = {}): if ("error" in resp): raise Exception(resp["error"]) - except (IOError, json.JSONDecodeError): + except (IOError): logger.log("trakt_notifier: Failed calling method", logger.ERROR) return False From d50c790e79db66993a5eb41246a6a74a11a51682 Mon Sep 17 00:00:00 2001 From: daften Date: Sat, 17 Dec 2011 00:44:41 +0100 Subject: [PATCH 3/9] Revert "Updated simplejson to latest version" This reverts commit c9b46b8e51bbd0a2499b84a020dd7c54d52bce3d. --- lib/simplejson/__init__.py | 264 +++--------- lib/simplejson/_speedups.c | 767 +++++++++------------------------ lib/simplejson/decoder.py | 169 +++----- lib/simplejson/encoder.py | 164 ++----- lib/simplejson/ordered_dict.py | 119 ----- lib/simplejson/scanner.py | 24 +- lib/simplejson/tool.py | 39 -- 7 files changed, 359 insertions(+), 1187 deletions(-) mode change 100755 => 100644 lib/simplejson/__init__.py mode change 100755 => 100644 lib/simplejson/_speedups.c mode change 100755 => 100644 lib/simplejson/decoder.py mode change 100755 => 100644 lib/simplejson/encoder.py delete mode 100755 lib/simplejson/ordered_dict.py mode change 100755 => 100644 lib/simplejson/scanner.py delete mode 100755 lib/simplejson/tool.py diff --git a/lib/simplejson/__init__.py b/lib/simplejson/__init__.py old mode 100755 new mode 100644 index ae4a39b9f9..d5b4d39913 --- a/lib/simplejson/__init__.py +++ b/lib/simplejson/__init__.py @@ -37,7 +37,7 @@ Pretty printing:: >>> import simplejson as json - >>> s = json.dumps({'4': 5, '6': 7}, sort_keys=True, indent=' ') + >>> s = json.dumps({'4': 5, '6': 7}, sort_keys=True, indent=4) >>> print '\n'.join([l.rstrip() for l in s.splitlines()]) { "4": 5, @@ -68,8 +68,8 @@ >>> json.loads('{"__complex__": true, "real": 1, "imag": 2}', ... object_hook=as_complex) (1+2j) - >>> from decimal import Decimal - >>> json.loads('1.1', parse_float=Decimal) == Decimal('1.1') + >>> import decimal + >>> json.loads('1.1', parse_float=decimal.Decimal) == decimal.Decimal('1.1') True Specializing JSON object encoding:: @@ -97,34 +97,16 @@ $ echo '{ 1.2:3.4}' | python -m simplejson.tool Expecting property name: line 1 column 2 (char 2) """ -__version__ = '2.2.1' +__version__ = '2.0.9' __all__ = [ 'dump', 'dumps', 'load', 'loads', - 'JSONDecoder', 'JSONDecodeError', 'JSONEncoder', - 'OrderedDict', + 'JSONDecoder', 'JSONEncoder', ] __author__ = 'Bob Ippolito ' -from decimal import Decimal - -from decoder import JSONDecoder, JSONDecodeError +from decoder import JSONDecoder from encoder import JSONEncoder -def _import_OrderedDict(): - import collections - try: - return collections.OrderedDict - except AttributeError: - import ordered_dict - return ordered_dict.OrderedDict -OrderedDict = _import_OrderedDict() - -def _import_c_make_encoder(): - try: - from simplejson._speedups import make_encoder - return make_encoder - except ImportError: - return None _default_encoder = JSONEncoder( skipkeys=False, @@ -135,16 +117,11 @@ def _import_c_make_encoder(): separators=None, encoding='utf-8', default=None, - use_decimal=True, - namedtuple_as_object=True, - tuple_as_array=True, ) def dump(obj, fp, skipkeys=False, ensure_ascii=True, check_circular=True, allow_nan=True, cls=None, indent=None, separators=None, - encoding='utf-8', default=None, use_decimal=True, - namedtuple_as_object=True, tuple_as_array=True, - **kw): + encoding='utf-8', default=None, **kw): """Serialize ``obj`` as a JSON formatted stream to ``fp`` (a ``.write()``-supporting file-like object). @@ -167,12 +144,9 @@ def dump(obj, fp, skipkeys=False, ensure_ascii=True, check_circular=True, in strict compliance of the JSON specification, instead of using the JavaScript equivalents (``NaN``, ``Infinity``, ``-Infinity``). - If *indent* is a string, then JSON array elements and object members - will be pretty-printed with a newline followed by that string repeated - for each level of nesting. ``None`` (the default) selects the most compact - representation without any newlines. For backwards compatibility with - versions of simplejson earlier than 2.1.0, an integer is also accepted - and is converted to a string with that many spaces. + If ``indent`` is a non-negative integer, then JSON array elements and object + members will be pretty-printed with that indent level. An indent level + of 0 will only insert newlines. ``None`` is the most compact representation. If ``separators`` is an ``(item_separator, dict_separator)`` tuple then it will be used instead of the default ``(', ', ': ')`` separators. @@ -183,16 +157,6 @@ def dump(obj, fp, skipkeys=False, ensure_ascii=True, check_circular=True, ``default(obj)`` is a function that should return a serializable version of obj or raise TypeError. The default simply raises TypeError. - If *use_decimal* is true (default: ``True``) then decimal.Decimal - will be natively serialized to JSON with full precision. - - If *namedtuple_as_object* is true (default: ``True``), - :class:`tuple` subclasses with ``_asdict()`` methods will be encoded - as JSON objects. - - If *tuple_as_array* is true (default: ``True``), - :class:`tuple` (and subclasses) will be encoded as JSON arrays. - To use a custom ``JSONEncoder`` subclass (e.g. one that overrides the ``.default()`` method to serialize additional types), specify it with the ``cls`` kwarg. @@ -202,8 +166,7 @@ def dump(obj, fp, skipkeys=False, ensure_ascii=True, check_circular=True, if (not skipkeys and ensure_ascii and check_circular and allow_nan and cls is None and indent is None and separators is None and - encoding == 'utf-8' and default is None and use_decimal - and namedtuple_as_object and tuple_as_array and not kw): + encoding == 'utf-8' and default is None and not kw): iterable = _default_encoder.iterencode(obj) else: if cls is None: @@ -211,10 +174,7 @@ def dump(obj, fp, skipkeys=False, ensure_ascii=True, check_circular=True, iterable = cls(skipkeys=skipkeys, ensure_ascii=ensure_ascii, check_circular=check_circular, allow_nan=allow_nan, indent=indent, separators=separators, encoding=encoding, - default=default, use_decimal=use_decimal, - namedtuple_as_object=namedtuple_as_object, - tuple_as_array=tuple_as_array, - **kw).iterencode(obj) + default=default, **kw).iterencode(obj) # could accelerate with writelines in some versions of Python, at # a debuggability cost for chunk in iterable: @@ -223,10 +183,7 @@ def dump(obj, fp, skipkeys=False, ensure_ascii=True, check_circular=True, def dumps(obj, skipkeys=False, ensure_ascii=True, check_circular=True, allow_nan=True, cls=None, indent=None, separators=None, - encoding='utf-8', default=None, use_decimal=True, - namedtuple_as_object=True, - tuple_as_array=True, - **kw): + encoding='utf-8', default=None, **kw): """Serialize ``obj`` to a JSON formatted ``str``. If ``skipkeys`` is false then ``dict`` keys that are not basic types @@ -246,12 +203,10 @@ def dumps(obj, skipkeys=False, ensure_ascii=True, check_circular=True, strict compliance of the JSON specification, instead of using the JavaScript equivalents (``NaN``, ``Infinity``, ``-Infinity``). - If ``indent`` is a string, then JSON array elements and object members - will be pretty-printed with a newline followed by that string repeated - for each level of nesting. ``None`` (the default) selects the most compact - representation without any newlines. For backwards compatibility with - versions of simplejson earlier than 2.1.0, an integer is also accepted - and is converted to a string with that many spaces. + If ``indent`` is a non-negative integer, then JSON array elements and + object members will be pretty-printed with that indent level. An indent + level of 0 will only insert newlines. ``None`` is the most compact + representation. If ``separators`` is an ``(item_separator, dict_separator)`` tuple then it will be used instead of the default ``(', ', ': ')`` separators. @@ -262,16 +217,6 @@ def dumps(obj, skipkeys=False, ensure_ascii=True, check_circular=True, ``default(obj)`` is a function that should return a serializable version of obj or raise TypeError. The default simply raises TypeError. - If *use_decimal* is true (default: ``True``) then decimal.Decimal - will be natively serialized to JSON with full precision. - - If *namedtuple_as_object* is true (default: ``True``), - :class:`tuple` subclasses with ``_asdict()`` methods will be encoded - as JSON objects. - - If *tuple_as_array* is true (default: ``True``), - :class:`tuple` (and subclasses) will be encoded as JSON arrays. - To use a custom ``JSONEncoder`` subclass (e.g. one that overrides the ``.default()`` method to serialize additional types), specify it with the ``cls`` kwarg. @@ -281,8 +226,7 @@ def dumps(obj, skipkeys=False, ensure_ascii=True, check_circular=True, if (not skipkeys and ensure_ascii and check_circular and allow_nan and cls is None and indent is None and separators is None and - encoding == 'utf-8' and default is None and use_decimal - and namedtuple_as_object and tuple_as_array and not kw): + encoding == 'utf-8' and default is None and not kw): return _default_encoder.encode(obj) if cls is None: cls = JSONEncoder @@ -290,61 +234,28 @@ def dumps(obj, skipkeys=False, ensure_ascii=True, check_circular=True, skipkeys=skipkeys, ensure_ascii=ensure_ascii, check_circular=check_circular, allow_nan=allow_nan, indent=indent, separators=separators, encoding=encoding, default=default, - use_decimal=use_decimal, - namedtuple_as_object=namedtuple_as_object, - tuple_as_array=tuple_as_array, **kw).encode(obj) -_default_decoder = JSONDecoder(encoding=None, object_hook=None, - object_pairs_hook=None) +_default_decoder = JSONDecoder(encoding=None, object_hook=None) def load(fp, encoding=None, cls=None, object_hook=None, parse_float=None, - parse_int=None, parse_constant=None, object_pairs_hook=None, - use_decimal=False, namedtuple_as_object=True, tuple_as_array=True, - **kw): + parse_int=None, parse_constant=None, **kw): """Deserialize ``fp`` (a ``.read()``-supporting file-like object containing a JSON document) to a Python object. - *encoding* determines the encoding used to interpret any - :class:`str` objects decoded by this instance (``'utf-8'`` by - default). It has no effect when decoding :class:`unicode` objects. - - Note that currently only encodings that are a superset of ASCII work, - strings of other encodings should be passed in as :class:`unicode`. - - *object_hook*, if specified, will be called with the result of every - JSON object decoded and its return value will be used in place of the - given :class:`dict`. This can be used to provide custom - deserializations (e.g. to support JSON-RPC class hinting). - - *object_pairs_hook* is an optional function that will be called with - the result of any object literal decode with an ordered list of pairs. - The return value of *object_pairs_hook* will be used instead of the - :class:`dict`. This feature can be used to implement custom decoders - that rely on the order that the key and value pairs are decoded (for - example, :func:`collections.OrderedDict` will remember the order of - insertion). If *object_hook* is also defined, the *object_pairs_hook* - takes priority. - - *parse_float*, if specified, will be called with the string of every - JSON float to be decoded. By default, this is equivalent to - ``float(num_str)``. This can be used to use another datatype or parser - for JSON floats (e.g. :class:`decimal.Decimal`). - - *parse_int*, if specified, will be called with the string of every - JSON int to be decoded. By default, this is equivalent to - ``int(num_str)``. This can be used to use another datatype or parser - for JSON integers (e.g. :class:`float`). - - *parse_constant*, if specified, will be called with one of the - following strings: ``'-Infinity'``, ``'Infinity'``, ``'NaN'``. This - can be used to raise an exception if invalid JSON numbers are - encountered. - - If *use_decimal* is true (default: ``False``) then it implies - parse_float=decimal.Decimal for parity with ``dump``. + If the contents of ``fp`` is encoded with an ASCII based encoding other + than utf-8 (e.g. latin-1), then an appropriate ``encoding`` name must + be specified. Encodings that are not ASCII based (such as UCS-2) are + not allowed, and should be wrapped with + ``codecs.getreader(fp)(encoding)``, or simply decoded to a ``unicode`` + object and passed to ``loads()`` + + ``object_hook`` is an optional function that will be called with the + result of any object literal decode (a ``dict``). The return value of + ``object_hook`` will be used instead of the ``dict``. This feature + can be used to implement custom decoders (e.g. JSON-RPC class hinting). To use a custom ``JSONDecoder`` subclass, specify it with the ``cls`` kwarg. @@ -353,54 +264,38 @@ def load(fp, encoding=None, cls=None, object_hook=None, parse_float=None, return loads(fp.read(), encoding=encoding, cls=cls, object_hook=object_hook, parse_float=parse_float, parse_int=parse_int, - parse_constant=parse_constant, object_pairs_hook=object_pairs_hook, - use_decimal=use_decimal, **kw) + parse_constant=parse_constant, **kw) def loads(s, encoding=None, cls=None, object_hook=None, parse_float=None, - parse_int=None, parse_constant=None, object_pairs_hook=None, - use_decimal=False, **kw): + parse_int=None, parse_constant=None, **kw): """Deserialize ``s`` (a ``str`` or ``unicode`` instance containing a JSON document) to a Python object. - *encoding* determines the encoding used to interpret any - :class:`str` objects decoded by this instance (``'utf-8'`` by - default). It has no effect when decoding :class:`unicode` objects. - - Note that currently only encodings that are a superset of ASCII work, - strings of other encodings should be passed in as :class:`unicode`. - - *object_hook*, if specified, will be called with the result of every - JSON object decoded and its return value will be used in place of the - given :class:`dict`. This can be used to provide custom - deserializations (e.g. to support JSON-RPC class hinting). - - *object_pairs_hook* is an optional function that will be called with - the result of any object literal decode with an ordered list of pairs. - The return value of *object_pairs_hook* will be used instead of the - :class:`dict`. This feature can be used to implement custom decoders - that rely on the order that the key and value pairs are decoded (for - example, :func:`collections.OrderedDict` will remember the order of - insertion). If *object_hook* is also defined, the *object_pairs_hook* - takes priority. - - *parse_float*, if specified, will be called with the string of every - JSON float to be decoded. By default, this is equivalent to - ``float(num_str)``. This can be used to use another datatype or parser - for JSON floats (e.g. :class:`decimal.Decimal`). - - *parse_int*, if specified, will be called with the string of every - JSON int to be decoded. By default, this is equivalent to - ``int(num_str)``. This can be used to use another datatype or parser - for JSON integers (e.g. :class:`float`). - - *parse_constant*, if specified, will be called with one of the - following strings: ``'-Infinity'``, ``'Infinity'``, ``'NaN'``. This - can be used to raise an exception if invalid JSON numbers are - encountered. - - If *use_decimal* is true (default: ``False``) then it implies - parse_float=decimal.Decimal for parity with ``dump``. + If ``s`` is a ``str`` instance and is encoded with an ASCII based encoding + other than utf-8 (e.g. latin-1) then an appropriate ``encoding`` name + must be specified. Encodings that are not ASCII based (such as UCS-2) + are not allowed and should be decoded to ``unicode`` first. + + ``object_hook`` is an optional function that will be called with the + result of any object literal decode (a ``dict``). The return value of + ``object_hook`` will be used instead of the ``dict``. This feature + can be used to implement custom decoders (e.g. JSON-RPC class hinting). + + ``parse_float``, if specified, will be called with the string + of every JSON float to be decoded. By default this is equivalent to + float(num_str). This can be used to use another datatype or parser + for JSON floats (e.g. decimal.Decimal). + + ``parse_int``, if specified, will be called with the string + of every JSON int to be decoded. By default this is equivalent to + int(num_str). This can be used to use another datatype or parser + for JSON integers (e.g. float). + + ``parse_constant``, if specified, will be called with one of the + following strings: -Infinity, Infinity, NaN, null, true, false. + This can be used to raise an exception if invalid JSON numbers + are encountered. To use a custom ``JSONDecoder`` subclass, specify it with the ``cls`` kwarg. @@ -408,59 +303,16 @@ def loads(s, encoding=None, cls=None, object_hook=None, parse_float=None, """ if (cls is None and encoding is None and object_hook is None and parse_int is None and parse_float is None and - parse_constant is None and object_pairs_hook is None - and not use_decimal and not kw): + parse_constant is None and not kw): return _default_decoder.decode(s) if cls is None: cls = JSONDecoder if object_hook is not None: kw['object_hook'] = object_hook - if object_pairs_hook is not None: - kw['object_pairs_hook'] = object_pairs_hook if parse_float is not None: kw['parse_float'] = parse_float if parse_int is not None: kw['parse_int'] = parse_int if parse_constant is not None: kw['parse_constant'] = parse_constant - if use_decimal: - if parse_float is not None: - raise TypeError("use_decimal=True implies parse_float=Decimal") - kw['parse_float'] = Decimal return cls(encoding=encoding, **kw).decode(s) - - -def _toggle_speedups(enabled): - import lib.simplejson.decoder as dec - import lib.simplejson.encoder as enc - import lib.simplejson.scanner as scan - c_make_encoder = _import_c_make_encoder() - if enabled: - dec.scanstring = dec.c_scanstring or dec.py_scanstring - enc.c_make_encoder = c_make_encoder - enc.encode_basestring_ascii = (enc.c_encode_basestring_ascii or - enc.py_encode_basestring_ascii) - scan.make_scanner = scan.c_make_scanner or scan.py_make_scanner - else: - dec.scanstring = dec.py_scanstring - enc.c_make_encoder = None - enc.encode_basestring_ascii = enc.py_encode_basestring_ascii - scan.make_scanner = scan.py_make_scanner - dec.make_scanner = scan.make_scanner - global _default_decoder - _default_decoder = JSONDecoder( - encoding=None, - object_hook=None, - object_pairs_hook=None, - ) - global _default_encoder - _default_encoder = JSONEncoder( - skipkeys=False, - ensure_ascii=True, - check_circular=True, - allow_nan=True, - indent=None, - separators=None, - encoding='utf-8', - default=None, - ) diff --git a/lib/simplejson/_speedups.c b/lib/simplejson/_speedups.c old mode 100755 new mode 100644 index f8b0565866..23b5f4a6e6 --- a/lib/simplejson/_speedups.c +++ b/lib/simplejson/_speedups.c @@ -1,26 +1,8 @@ #include "Python.h" #include "structmember.h" -#if PY_VERSION_HEX < 0x02070000 && !defined(PyOS_string_to_double) -#define PyOS_string_to_double json_PyOS_string_to_double -static double -json_PyOS_string_to_double(const char *s, char **endptr, PyObject *overflow_exception); -static double -json_PyOS_string_to_double(const char *s, char **endptr, PyObject *overflow_exception) { - double x; - assert(endptr == NULL); - assert(overflow_exception == NULL); - PyFPE_START_PROTECT("json_PyOS_string_to_double", return -1.0;) - x = PyOS_ascii_atof(s); - PyFPE_END_PROTECT(x) - return x; -} -#endif #if PY_VERSION_HEX < 0x02060000 && !defined(Py_TYPE) #define Py_TYPE(ob) (((PyObject*)(ob))->ob_type) #endif -#if PY_VERSION_HEX < 0x02060000 && !defined(Py_SIZE) -#define Py_SIZE(ob) (((PyVarObject*)(ob))->ob_size) -#endif #if PY_VERSION_HEX < 0x02050000 && !defined(PY_SSIZE_T_MIN) typedef int Py_ssize_t; #define PY_SSIZE_T_MAX INT_MAX @@ -44,29 +26,24 @@ typedef int Py_ssize_t; #define PyScanner_CheckExact(op) (Py_TYPE(op) == &PyScannerType) #define PyEncoder_Check(op) PyObject_TypeCheck(op, &PyEncoderType) #define PyEncoder_CheckExact(op) (Py_TYPE(op) == &PyEncoderType) -#define Decimal_Check(op) (PyObject_TypeCheck(op, DecimalTypePtr)) static PyTypeObject PyScannerType; static PyTypeObject PyEncoderType; -static PyTypeObject *DecimalTypePtr; typedef struct _PyScannerObject { PyObject_HEAD PyObject *encoding; PyObject *strict; PyObject *object_hook; - PyObject *pairs_hook; PyObject *parse_float; PyObject *parse_int; PyObject *parse_constant; - PyObject *memo; } PyScannerObject; static PyMemberDef scanner_members[] = { {"encoding", T_OBJECT, offsetof(PyScannerObject, encoding), READONLY, "encoding"}, {"strict", T_OBJECT, offsetof(PyScannerObject, strict), READONLY, "strict"}, {"object_hook", T_OBJECT, offsetof(PyScannerObject, object_hook), READONLY, "object_hook"}, - {"object_pairs_hook", T_OBJECT, offsetof(PyScannerObject, pairs_hook), READONLY, "object_pairs_hook"}, {"parse_float", T_OBJECT, offsetof(PyScannerObject, parse_float), READONLY, "parse_float"}, {"parse_int", T_OBJECT, offsetof(PyScannerObject, parse_int), READONLY, "parse_int"}, {"parse_constant", T_OBJECT, offsetof(PyScannerObject, parse_constant), READONLY, "parse_constant"}, @@ -83,12 +60,8 @@ typedef struct _PyEncoderObject { PyObject *item_separator; PyObject *sort_keys; PyObject *skipkeys; - PyObject *key_memo; int fast_encode; int allow_nan; - int use_decimal; - int namedtuple_as_object; - int tuple_as_array; } PyEncoderObject; static PyMemberDef encoder_members[] = { @@ -100,7 +73,6 @@ static PyMemberDef encoder_members[] = { {"item_separator", T_OBJECT, offsetof(PyEncoderObject, item_separator), READONLY, "item_separator"}, {"sort_keys", T_OBJECT, offsetof(PyEncoderObject, sort_keys), READONLY, "sort_keys"}, {"skipkeys", T_OBJECT, offsetof(PyEncoderObject, skipkeys), READONLY, "skipkeys"}, - {"key_memo", T_OBJECT, offsetof(PyEncoderObject, key_memo), READONLY, "key_memo"}, {NULL} }; @@ -142,7 +114,7 @@ encoder_listencode_obj(PyEncoderObject *s, PyObject *rval, PyObject *obj, Py_ssi static int encoder_listencode_dict(PyEncoderObject *s, PyObject *rval, PyObject *dct, Py_ssize_t indent_level); static PyObject * -_encoded_const(PyObject *obj); +_encoded_const(PyObject *const); static void raise_errmsg(char *msg, PyObject *s, Py_ssize_t end); static PyObject * @@ -153,8 +125,6 @@ static PyObject * _convertPyInt_FromSsize_t(Py_ssize_t *size_ptr); static PyObject * encoder_encode_float(PyEncoderObject *s, PyObject *obj); -static int -_is_namedtuple(PyObject *obj); #define S_CHAR(c) (c >= ' ' && c <= '~' && c != '\\' && c != '"') #define IS_WHITESPACE(c) (((c) == ' ') || ((c) == '\t') || ((c) == '\n') || ((c) == '\r')) @@ -166,20 +136,14 @@ _is_namedtuple(PyObject *obj); #define MAX_EXPANSION MIN_EXPANSION #endif -static int -_is_namedtuple(PyObject *obj) -{ - return PyTuple_Check(obj) && PyObject_HasAttrString(obj, "_asdict"); -} - static int _convertPyInt_AsSsize_t(PyObject *o, Py_ssize_t *size_ptr) { /* PyObject to Py_ssize_t converter */ *size_ptr = PyInt_AsSsize_t(o); - if (*size_ptr == -1 && PyErr_Occurred()) - return 0; - return 1; + if (*size_ptr == -1 && PyErr_Occurred()); + return 1; + return 0; } static PyObject * @@ -377,21 +341,21 @@ raise_errmsg(char *msg, PyObject *s, Py_ssize_t end) { /* Use the Python function simplejson.decoder.errmsg to raise a nice looking ValueError exception */ - static PyObject *JSONDecodeError = NULL; - PyObject *exc; - if (JSONDecodeError == NULL) { + static PyObject *errmsg_fn = NULL; + PyObject *pymsg; + if (errmsg_fn == NULL) { PyObject *decoder = PyImport_ImportModule("simplejson.decoder"); if (decoder == NULL) return; - JSONDecodeError = PyObject_GetAttrString(decoder, "JSONDecodeError"); + errmsg_fn = PyObject_GetAttrString(decoder, "errmsg"); Py_DECREF(decoder); - if (JSONDecodeError == NULL) + if (errmsg_fn == NULL) return; } - exc = PyObject_CallFunction(JSONDecodeError, "(zOO&)", msg, s, _convertPyInt_FromSsize_t, &end); - if (exc) { - PyErr_SetObject(JSONDecodeError, exc); - Py_DECREF(exc); + pymsg = PyObject_CallFunction(errmsg_fn, "(zOO&)", msg, s, _convertPyInt_FromSsize_t, &end); + if (pymsg) { + PyErr_SetObject(PyExc_ValueError, pymsg); + Py_DECREF(pymsg); } } @@ -458,20 +422,6 @@ _build_rval_index_tuple(PyObject *rval, Py_ssize_t idx) { return tpl; } -#define APPEND_OLD_CHUNK \ - if (chunk != NULL) { \ - if (chunks == NULL) { \ - chunks = PyList_New(0); \ - if (chunks == NULL) { \ - goto bail; \ - } \ - } \ - if (PyList_Append(chunks, chunk)) { \ - goto bail; \ - } \ - Py_CLEAR(chunk); \ - } - static PyObject * scanstring_str(PyObject *pystr, Py_ssize_t end, char *encoding, int strict, Py_ssize_t *next_end_ptr) { @@ -490,19 +440,18 @@ scanstring_str(PyObject *pystr, Py_ssize_t end, char *encoding, int strict, Py_s Py_ssize_t next = begin; int has_unicode = 0; char *buf = PyString_AS_STRING(pystr); - PyObject *chunks = NULL; - PyObject *chunk = NULL; - - if (len == end) { - raise_errmsg("Unterminated string starting at", pystr, begin); + PyObject *chunks = PyList_New(0); + if (chunks == NULL) { + goto bail; } - else if (end < 0 || len < end) { + if (end < 0 || len <= end) { PyErr_SetString(PyExc_ValueError, "end is out of bounds"); goto bail; } while (1) { /* Find the end of the string or the next escape */ Py_UNICODE c = 0; + PyObject *chunk = NULL; for (next = end; next < len; next++) { c = (unsigned char)buf[next]; if (c == '"' || c == '\\') { @@ -522,9 +471,7 @@ scanstring_str(PyObject *pystr, Py_ssize_t end, char *encoding, int strict, Py_s } /* Pick up this chunk if it's not zero length */ if (next != end) { - PyObject *strchunk; - APPEND_OLD_CHUNK - strchunk = PyString_FromStringAndSize(&buf[end], next - end); + PyObject *strchunk = PyString_FromStringAndSize(&buf[end], next - end); if (strchunk == NULL) { goto bail; } @@ -538,6 +485,11 @@ scanstring_str(PyObject *pystr, Py_ssize_t end, char *encoding, int strict, Py_s else { chunk = strchunk; } + if (PyList_Append(chunks, chunk)) { + Py_DECREF(chunk); + goto bail; + } + Py_DECREF(chunk); } next++; if (c == '"') { @@ -642,7 +594,6 @@ scanstring_str(PyObject *pystr, Py_ssize_t end, char *encoding, int strict, Py_s if (c > 0x7f) { has_unicode = 1; } - APPEND_OLD_CHUNK if (has_unicode) { chunk = PyUnicode_FromUnicode(&c, 1); if (chunk == NULL) { @@ -656,28 +607,22 @@ scanstring_str(PyObject *pystr, Py_ssize_t end, char *encoding, int strict, Py_s goto bail; } } - } - - if (chunks == NULL) { - if (chunk != NULL) - rval = chunk; - else - rval = PyString_FromStringAndSize("", 0); - } - else { - APPEND_OLD_CHUNK - rval = join_list_string(chunks); - if (rval == NULL) { + if (PyList_Append(chunks, chunk)) { + Py_DECREF(chunk); goto bail; } - Py_CLEAR(chunks); + Py_DECREF(chunk); } + rval = join_list_string(chunks); + if (rval == NULL) { + goto bail; + } + Py_CLEAR(chunks); *next_end_ptr = end; return rval; bail: *next_end_ptr = -1; - Py_XDECREF(chunk); Py_XDECREF(chunks); return NULL; } @@ -699,19 +644,18 @@ scanstring_unicode(PyObject *pystr, Py_ssize_t end, int strict, Py_ssize_t *next Py_ssize_t begin = end - 1; Py_ssize_t next = begin; const Py_UNICODE *buf = PyUnicode_AS_UNICODE(pystr); - PyObject *chunks = NULL; - PyObject *chunk = NULL; - - if (len == end) { - raise_errmsg("Unterminated string starting at", pystr, begin); + PyObject *chunks = PyList_New(0); + if (chunks == NULL) { + goto bail; } - else if (end < 0 || len < end) { + if (end < 0 || len <= end) { PyErr_SetString(PyExc_ValueError, "end is out of bounds"); goto bail; } while (1) { /* Find the end of the string or the next escape */ Py_UNICODE c = 0; + PyObject *chunk = NULL; for (next = end; next < len; next++) { c = buf[next]; if (c == '"' || c == '\\') { @@ -728,11 +672,15 @@ scanstring_unicode(PyObject *pystr, Py_ssize_t end, int strict, Py_ssize_t *next } /* Pick up this chunk if it's not zero length */ if (next != end) { - APPEND_OLD_CHUNK chunk = PyUnicode_FromUnicode(&buf[end], next - end); if (chunk == NULL) { goto bail; } + if (PyList_Append(chunks, chunk)) { + Py_DECREF(chunk); + goto bail; + } + Py_DECREF(chunk); } next++; if (c == '"') { @@ -834,32 +782,26 @@ scanstring_unicode(PyObject *pystr, Py_ssize_t end, int strict, Py_ssize_t *next } #endif } - APPEND_OLD_CHUNK chunk = PyUnicode_FromUnicode(&c, 1); if (chunk == NULL) { goto bail; } - } - - if (chunks == NULL) { - if (chunk != NULL) - rval = chunk; - else - rval = PyUnicode_FromUnicode(NULL, 0); - } - else { - APPEND_OLD_CHUNK - rval = join_list_unicode(chunks); - if (rval == NULL) { + if (PyList_Append(chunks, chunk)) { + Py_DECREF(chunk); goto bail; } - Py_CLEAR(chunks); + Py_DECREF(chunk); + } + + rval = join_list_unicode(chunks); + if (rval == NULL) { + goto bail; } + Py_DECREF(chunks); *next_end_ptr = end; return rval; bail: *next_end_ptr = -1; - Py_XDECREF(chunk); Py_XDECREF(chunks); return NULL; } @@ -949,11 +891,9 @@ scanner_traverse(PyObject *self, visitproc visit, void *arg) Py_VISIT(s->encoding); Py_VISIT(s->strict); Py_VISIT(s->object_hook); - Py_VISIT(s->pairs_hook); Py_VISIT(s->parse_float); Py_VISIT(s->parse_int); Py_VISIT(s->parse_constant); - Py_VISIT(s->memo); return 0; } @@ -966,11 +906,9 @@ scanner_clear(PyObject *self) Py_CLEAR(s->encoding); Py_CLEAR(s->strict); Py_CLEAR(s->object_hook); - Py_CLEAR(s->pairs_hook); Py_CLEAR(s->parse_float); Py_CLEAR(s->parse_int); Py_CLEAR(s->parse_constant); - Py_CLEAR(s->memo); return 0; } @@ -981,30 +919,18 @@ _parse_object_str(PyScannerObject *s, PyObject *pystr, Py_ssize_t idx, Py_ssize_ *next_idx_ptr is a return-by-reference index to the first character after the closing curly brace. - Returns a new PyObject (usually a dict, but object_hook or - object_pairs_hook can change that) + Returns a new PyObject (usually a dict, but object_hook can change that) */ char *str = PyString_AS_STRING(pystr); Py_ssize_t end_idx = PyString_GET_SIZE(pystr) - 1; - PyObject *rval = NULL; - PyObject *pairs = NULL; - PyObject *item; + PyObject *rval = PyDict_New(); PyObject *key = NULL; PyObject *val = NULL; char *encoding = PyString_AS_STRING(s->encoding); int strict = PyObject_IsTrue(s->strict); - int has_pairs_hook = (s->pairs_hook != Py_None); Py_ssize_t next_idx; - if (has_pairs_hook) { - pairs = PyList_New(0); - if (pairs == NULL) - return NULL; - } - else { - rval = PyDict_New(); - if (rval == NULL) - return NULL; - } + if (rval == NULL) + return NULL; /* skip whitespace after { */ while (idx <= end_idx && IS_WHITESPACE(str[idx])) idx++; @@ -1012,8 +938,6 @@ _parse_object_str(PyScannerObject *s, PyObject *pystr, Py_ssize_t idx, Py_ssize_ /* only loop if the object is non-empty */ if (idx <= end_idx && str[idx] != '}') { while (idx <= end_idx) { - PyObject *memokey; - /* read key */ if (str[idx] != '"') { raise_errmsg("Expecting property name", pystr, idx); @@ -1022,16 +946,6 @@ _parse_object_str(PyScannerObject *s, PyObject *pystr, Py_ssize_t idx, Py_ssize_ key = scanstring_str(pystr, idx + 1, encoding, strict, &next_idx); if (key == NULL) goto bail; - memokey = PyDict_GetItem(s->memo, key); - if (memokey != NULL) { - Py_INCREF(memokey); - Py_DECREF(key); - key = memokey; - } - else { - if (PyDict_SetItem(s->memo, key, key) < 0) - goto bail; - } idx = next_idx; /* skip whitespace between key and : delimiter, read :, skip whitespace */ @@ -1048,24 +962,11 @@ _parse_object_str(PyScannerObject *s, PyObject *pystr, Py_ssize_t idx, Py_ssize_ if (val == NULL) goto bail; - if (has_pairs_hook) { - item = PyTuple_Pack(2, key, val); - if (item == NULL) - goto bail; - Py_CLEAR(key); - Py_CLEAR(val); - if (PyList_Append(pairs, item) == -1) { - Py_DECREF(item); - goto bail; - } - Py_DECREF(item); - } - else { - if (PyDict_SetItem(rval, key, val) < 0) - goto bail; - Py_CLEAR(key); - Py_CLEAR(val); - } + if (PyDict_SetItem(rval, key, val) == -1) + goto bail; + + Py_CLEAR(key); + Py_CLEAR(val); idx = next_idx; /* skip whitespace before } or , */ @@ -1091,17 +992,6 @@ _parse_object_str(PyScannerObject *s, PyObject *pystr, Py_ssize_t idx, Py_ssize_ raise_errmsg("Expecting object", pystr, end_idx); goto bail; } - - /* if pairs_hook is not None: rval = object_pairs_hook(pairs) */ - if (s->pairs_hook != Py_None) { - val = PyObject_CallFunctionObjArgs(s->pairs_hook, pairs, NULL); - if (val == NULL) - goto bail; - Py_DECREF(pairs); - *next_idx_ptr = idx + 1; - return val; - } - /* if object_hook is not None: rval = object_hook(rval) */ if (s->object_hook != Py_None) { val = PyObject_CallFunctionObjArgs(s->object_hook, rval, NULL); @@ -1114,10 +1004,9 @@ _parse_object_str(PyScannerObject *s, PyObject *pystr, Py_ssize_t idx, Py_ssize_ *next_idx_ptr = idx + 1; return rval; bail: - Py_XDECREF(rval); Py_XDECREF(key); Py_XDECREF(val); - Py_XDECREF(pairs); + Py_DECREF(rval); return NULL; } @@ -1132,34 +1021,20 @@ _parse_object_unicode(PyScannerObject *s, PyObject *pystr, Py_ssize_t idx, Py_ss */ Py_UNICODE *str = PyUnicode_AS_UNICODE(pystr); Py_ssize_t end_idx = PyUnicode_GET_SIZE(pystr) - 1; - PyObject *rval = NULL; - PyObject *pairs = NULL; - PyObject *item; - PyObject *key = NULL; PyObject *val = NULL; + PyObject *rval = PyDict_New(); + PyObject *key = NULL; int strict = PyObject_IsTrue(s->strict); - int has_pairs_hook = (s->pairs_hook != Py_None); Py_ssize_t next_idx; + if (rval == NULL) + return NULL; - if (has_pairs_hook) { - pairs = PyList_New(0); - if (pairs == NULL) - return NULL; - } - else { - rval = PyDict_New(); - if (rval == NULL) - return NULL; - } - /* skip whitespace after { */ while (idx <= end_idx && IS_WHITESPACE(str[idx])) idx++; /* only loop if the object is non-empty */ if (idx <= end_idx && str[idx] != '}') { while (idx <= end_idx) { - PyObject *memokey; - /* read key */ if (str[idx] != '"') { raise_errmsg("Expecting property name", pystr, idx); @@ -1168,16 +1043,6 @@ _parse_object_unicode(PyScannerObject *s, PyObject *pystr, Py_ssize_t idx, Py_ss key = scanstring_unicode(pystr, idx + 1, strict, &next_idx); if (key == NULL) goto bail; - memokey = PyDict_GetItem(s->memo, key); - if (memokey != NULL) { - Py_INCREF(memokey); - Py_DECREF(key); - key = memokey; - } - else { - if (PyDict_SetItem(s->memo, key, key) < 0) - goto bail; - } idx = next_idx; /* skip whitespace between key and : delimiter, read :, skip whitespace */ @@ -1194,24 +1059,11 @@ _parse_object_unicode(PyScannerObject *s, PyObject *pystr, Py_ssize_t idx, Py_ss if (val == NULL) goto bail; - if (has_pairs_hook) { - item = PyTuple_Pack(2, key, val); - if (item == NULL) - goto bail; - Py_CLEAR(key); - Py_CLEAR(val); - if (PyList_Append(pairs, item) == -1) { - Py_DECREF(item); - goto bail; - } - Py_DECREF(item); - } - else { - if (PyDict_SetItem(rval, key, val) < 0) - goto bail; - Py_CLEAR(key); - Py_CLEAR(val); - } + if (PyDict_SetItem(rval, key, val) == -1) + goto bail; + + Py_CLEAR(key); + Py_CLEAR(val); idx = next_idx; /* skip whitespace before } or , */ @@ -1239,16 +1091,6 @@ _parse_object_unicode(PyScannerObject *s, PyObject *pystr, Py_ssize_t idx, Py_ss goto bail; } - /* if pairs_hook is not None: rval = object_pairs_hook(pairs) */ - if (s->pairs_hook != Py_None) { - val = PyObject_CallFunctionObjArgs(s->pairs_hook, pairs, NULL); - if (val == NULL) - goto bail; - Py_DECREF(pairs); - *next_idx_ptr = idx + 1; - return val; - } - /* if object_hook is not None: rval = object_hook(rval) */ if (s->object_hook != Py_None) { val = PyObject_CallFunctionObjArgs(s->object_hook, rval, NULL); @@ -1261,10 +1103,9 @@ _parse_object_unicode(PyScannerObject *s, PyObject *pystr, Py_ssize_t idx, Py_ss *next_idx_ptr = idx + 1; return rval; bail: - Py_XDECREF(rval); Py_XDECREF(key); Py_XDECREF(val); - Py_XDECREF(pairs); + Py_DECREF(rval); return NULL; } @@ -1294,13 +1135,8 @@ _parse_array_str(PyScannerObject *s, PyObject *pystr, Py_ssize_t idx, Py_ssize_t /* read any JSON term and de-tuplefy the (rval, idx) */ val = scan_once_str(s, pystr, idx, &next_idx); - if (val == NULL) { - if (PyErr_ExceptionMatches(PyExc_StopIteration)) { - PyErr_Clear(); - raise_errmsg("Expecting object", pystr, idx); - } + if (val == NULL) goto bail; - } if (PyList_Append(rval, val) == -1) goto bail; @@ -1366,13 +1202,8 @@ _parse_array_unicode(PyScannerObject *s, PyObject *pystr, Py_ssize_t idx, Py_ssi /* read any JSON term */ val = scan_once_unicode(s, pystr, idx, &next_idx); - if (val == NULL) { - if (PyErr_ExceptionMatches(PyExc_StopIteration)) { - PyErr_Clear(); - raise_errmsg("Expecting object", pystr, idx); - } + if (val == NULL) goto bail; - } if (PyList_Append(rval, val) == -1) goto bail; @@ -1519,12 +1350,7 @@ _match_number_str(PyScannerObject *s, PyObject *pystr, Py_ssize_t start, Py_ssiz rval = PyObject_CallFunctionObjArgs(s->parse_float, numstr, NULL); } else { - /* rval = PyFloat_FromDouble(PyOS_ascii_atof(PyString_AS_STRING(numstr))); */ - double d = PyOS_string_to_double(PyString_AS_STRING(numstr), - NULL, NULL); - if (d == -1.0 && PyErr_Occurred()) - return NULL; - rval = PyFloat_FromDouble(d); + rval = PyFloat_FromDouble(PyOS_ascii_atof(PyString_AS_STRING(numstr))); } } else { @@ -1587,7 +1413,7 @@ _match_number_unicode(PyScannerObject *s, PyObject *pystr, Py_ssize_t start, Py_ if (idx < end_idx && str[idx] == '.' && str[idx + 1] >= '0' && str[idx + 1] <= '9') { is_float = 1; idx += 2; - while (idx <= end_idx && str[idx] >= '0' && str[idx] <= '9') idx++; + while (idx < end_idx && str[idx] >= '0' && str[idx] <= '9') idx++; } /* if the next char is 'e' or 'E' then maybe read the exponent (or backtrack) */ @@ -1644,92 +1470,68 @@ scan_once_str(PyScannerObject *s, PyObject *pystr, Py_ssize_t idx, Py_ssize_t *n */ char *str = PyString_AS_STRING(pystr); Py_ssize_t length = PyString_GET_SIZE(pystr); - PyObject *rval = NULL; - int fallthrough = 0; if (idx >= length) { PyErr_SetNone(PyExc_StopIteration); return NULL; } - if (Py_EnterRecursiveCall(" while decoding a JSON document")) - return NULL; switch (str[idx]) { case '"': /* string */ - rval = scanstring_str(pystr, idx + 1, + return scanstring_str(pystr, idx + 1, PyString_AS_STRING(s->encoding), PyObject_IsTrue(s->strict), next_idx_ptr); - break; case '{': /* object */ - rval = _parse_object_str(s, pystr, idx + 1, next_idx_ptr); - break; + return _parse_object_str(s, pystr, idx + 1, next_idx_ptr); case '[': /* array */ - rval = _parse_array_str(s, pystr, idx + 1, next_idx_ptr); - break; + return _parse_array_str(s, pystr, idx + 1, next_idx_ptr); case 'n': /* null */ if ((idx + 3 < length) && str[idx + 1] == 'u' && str[idx + 2] == 'l' && str[idx + 3] == 'l') { Py_INCREF(Py_None); *next_idx_ptr = idx + 4; - rval = Py_None; + return Py_None; } - else - fallthrough = 1; break; case 't': /* true */ if ((idx + 3 < length) && str[idx + 1] == 'r' && str[idx + 2] == 'u' && str[idx + 3] == 'e') { Py_INCREF(Py_True); *next_idx_ptr = idx + 4; - rval = Py_True; + return Py_True; } - else - fallthrough = 1; break; case 'f': /* false */ if ((idx + 4 < length) && str[idx + 1] == 'a' && str[idx + 2] == 'l' && str[idx + 3] == 's' && str[idx + 4] == 'e') { Py_INCREF(Py_False); *next_idx_ptr = idx + 5; - rval = Py_False; + return Py_False; } - else - fallthrough = 1; break; case 'N': /* NaN */ if ((idx + 2 < length) && str[idx + 1] == 'a' && str[idx + 2] == 'N') { - rval = _parse_constant(s, "NaN", idx, next_idx_ptr); + return _parse_constant(s, "NaN", idx, next_idx_ptr); } - else - fallthrough = 1; break; case 'I': /* Infinity */ if ((idx + 7 < length) && str[idx + 1] == 'n' && str[idx + 2] == 'f' && str[idx + 3] == 'i' && str[idx + 4] == 'n' && str[idx + 5] == 'i' && str[idx + 6] == 't' && str[idx + 7] == 'y') { - rval = _parse_constant(s, "Infinity", idx, next_idx_ptr); + return _parse_constant(s, "Infinity", idx, next_idx_ptr); } - else - fallthrough = 1; break; case '-': /* -Infinity */ if ((idx + 8 < length) && str[idx + 1] == 'I' && str[idx + 2] == 'n' && str[idx + 3] == 'f' && str[idx + 4] == 'i' && str[idx + 5] == 'n' && str[idx + 6] == 'i' && str[idx + 7] == 't' && str[idx + 8] == 'y') { - rval = _parse_constant(s, "-Infinity", idx, next_idx_ptr); + return _parse_constant(s, "-Infinity", idx, next_idx_ptr); } - else - fallthrough = 1; break; - default: - fallthrough = 1; } /* Didn't find a string, object, array, or named constant. Look for a number. */ - if (fallthrough) - rval = _match_number_str(s, pystr, idx, next_idx_ptr); - Py_LeaveRecursiveCall(); - return rval; + return _match_number_str(s, pystr, idx, next_idx_ptr); } static PyObject * @@ -1744,91 +1546,67 @@ scan_once_unicode(PyScannerObject *s, PyObject *pystr, Py_ssize_t idx, Py_ssize_ */ Py_UNICODE *str = PyUnicode_AS_UNICODE(pystr); Py_ssize_t length = PyUnicode_GET_SIZE(pystr); - PyObject *rval = NULL; - int fallthrough = 0; if (idx >= length) { PyErr_SetNone(PyExc_StopIteration); return NULL; } - if (Py_EnterRecursiveCall(" while decoding a JSON document")) - return NULL; switch (str[idx]) { case '"': /* string */ - rval = scanstring_unicode(pystr, idx + 1, + return scanstring_unicode(pystr, idx + 1, PyObject_IsTrue(s->strict), next_idx_ptr); - break; case '{': /* object */ - rval = _parse_object_unicode(s, pystr, idx + 1, next_idx_ptr); - break; + return _parse_object_unicode(s, pystr, idx + 1, next_idx_ptr); case '[': /* array */ - rval = _parse_array_unicode(s, pystr, idx + 1, next_idx_ptr); - break; + return _parse_array_unicode(s, pystr, idx + 1, next_idx_ptr); case 'n': /* null */ if ((idx + 3 < length) && str[idx + 1] == 'u' && str[idx + 2] == 'l' && str[idx + 3] == 'l') { Py_INCREF(Py_None); *next_idx_ptr = idx + 4; - rval = Py_None; + return Py_None; } - else - fallthrough = 1; break; case 't': /* true */ if ((idx + 3 < length) && str[idx + 1] == 'r' && str[idx + 2] == 'u' && str[idx + 3] == 'e') { Py_INCREF(Py_True); *next_idx_ptr = idx + 4; - rval = Py_True; + return Py_True; } - else - fallthrough = 1; break; case 'f': /* false */ if ((idx + 4 < length) && str[idx + 1] == 'a' && str[idx + 2] == 'l' && str[idx + 3] == 's' && str[idx + 4] == 'e') { Py_INCREF(Py_False); *next_idx_ptr = idx + 5; - rval = Py_False; + return Py_False; } - else - fallthrough = 1; break; case 'N': /* NaN */ if ((idx + 2 < length) && str[idx + 1] == 'a' && str[idx + 2] == 'N') { - rval = _parse_constant(s, "NaN", idx, next_idx_ptr); + return _parse_constant(s, "NaN", idx, next_idx_ptr); } - else - fallthrough = 1; break; case 'I': /* Infinity */ if ((idx + 7 < length) && str[idx + 1] == 'n' && str[idx + 2] == 'f' && str[idx + 3] == 'i' && str[idx + 4] == 'n' && str[idx + 5] == 'i' && str[idx + 6] == 't' && str[idx + 7] == 'y') { - rval = _parse_constant(s, "Infinity", idx, next_idx_ptr); + return _parse_constant(s, "Infinity", idx, next_idx_ptr); } - else - fallthrough = 1; break; case '-': /* -Infinity */ if ((idx + 8 < length) && str[idx + 1] == 'I' && str[idx + 2] == 'n' && str[idx + 3] == 'f' && str[idx + 4] == 'i' && str[idx + 5] == 'n' && str[idx + 6] == 'i' && str[idx + 7] == 't' && str[idx + 8] == 'y') { - rval = _parse_constant(s, "-Infinity", idx, next_idx_ptr); + return _parse_constant(s, "-Infinity", idx, next_idx_ptr); } - else - fallthrough = 1; break; - default: - fallthrough = 1; } /* Didn't find a string, object, array, or named constant. Look for a number. */ - if (fallthrough) - rval = _match_number_unicode(s, pystr, idx, next_idx_ptr); - Py_LeaveRecursiveCall(); - return rval; + return _match_number_unicode(s, pystr, idx, next_idx_ptr); } static PyObject * @@ -1858,7 +1636,6 @@ scanner_call(PyObject *self, PyObject *args, PyObject *kwds) Py_TYPE(pystr)->tp_name); return NULL; } - PyDict_Clear(s->memo); return _build_rval_index_tuple(rval, next_idx); } @@ -1871,7 +1648,6 @@ scanner_new(PyTypeObject *type, PyObject *args, PyObject *kwds) s->encoding = NULL; s->strict = NULL; s->object_hook = NULL; - s->pairs_hook = NULL; s->parse_float = NULL; s->parse_int = NULL; s->parse_constant = NULL; @@ -1892,17 +1668,9 @@ scanner_init(PyObject *self, PyObject *args, PyObject *kwds) if (!PyArg_ParseTupleAndKeywords(args, kwds, "O:make_scanner", kwlist, &ctx)) return -1; - - if (s->memo == NULL) { - s->memo = PyDict_New(); - if (s->memo == NULL) - goto bail; - } /* PyString_AS_STRING is used on encoding */ s->encoding = PyObject_GetAttrString(ctx, "encoding"); - if (s->encoding == NULL) - goto bail; if (s->encoding == Py_None) { Py_DECREF(Py_None); s->encoding = PyString_InternFromString(DEFAULT_ENCODING); @@ -1922,9 +1690,6 @@ scanner_init(PyObject *self, PyObject *args, PyObject *kwds) s->object_hook = PyObject_GetAttrString(ctx, "object_hook"); if (s->object_hook == NULL) goto bail; - s->pairs_hook = PyObject_GetAttrString(ctx, "object_pairs_hook"); - if (s->pairs_hook == NULL) - goto bail; s->parse_float = PyObject_GetAttrString(ctx, "parse_float"); if (s->parse_float == NULL) goto bail; @@ -1941,7 +1706,6 @@ scanner_init(PyObject *self, PyObject *args, PyObject *kwds) Py_CLEAR(s->encoding); Py_CLEAR(s->strict); Py_CLEAR(s->object_hook); - Py_CLEAR(s->pairs_hook); Py_CLEAR(s->parse_float); Py_CLEAR(s->parse_int); Py_CLEAR(s->parse_constant); @@ -2008,7 +1772,6 @@ encoder_new(PyTypeObject *type, PyObject *args, PyObject *kwds) s->item_separator = NULL; s->sort_keys = NULL; s->skipkeys = NULL; - s->key_memo = NULL; } return (PyObject *)s; } @@ -2017,36 +1780,18 @@ static int encoder_init(PyObject *self, PyObject *args, PyObject *kwds) { /* initialize Encoder object */ - static char *kwlist[] = {"markers", "default", "encoder", "indent", "key_separator", "item_separator", "sort_keys", "skipkeys", "allow_nan", "key_memo", "use_decimal", "namedtuple_as_object", "tuple_as_array", NULL}; + static char *kwlist[] = {"markers", "default", "encoder", "indent", "key_separator", "item_separator", "sort_keys", "skipkeys", "allow_nan", NULL}; PyEncoderObject *s; - PyObject *markers, *defaultfn, *encoder, *indent, *key_separator; - PyObject *item_separator, *sort_keys, *skipkeys, *allow_nan, *key_memo, *use_decimal, *namedtuple_as_object, *tuple_as_array; + PyObject *allow_nan; assert(PyEncoder_Check(self)); s = (PyEncoderObject *)self; - if (!PyArg_ParseTupleAndKeywords(args, kwds, "OOOOOOOOOOOOO:make_encoder", kwlist, - &markers, &defaultfn, &encoder, &indent, &key_separator, &item_separator, - &sort_keys, &skipkeys, &allow_nan, &key_memo, &use_decimal, - &namedtuple_as_object, &tuple_as_array)) + if (!PyArg_ParseTupleAndKeywords(args, kwds, "OOOOOOOOO:make_encoder", kwlist, + &s->markers, &s->defaultfn, &s->encoder, &s->indent, &s->key_separator, &s->item_separator, &s->sort_keys, &s->skipkeys, &allow_nan)) return -1; - s->markers = markers; - s->defaultfn = defaultfn; - s->encoder = encoder; - s->indent = indent; - s->key_separator = key_separator; - s->item_separator = item_separator; - s->sort_keys = sort_keys; - s->skipkeys = skipkeys; - s->key_memo = key_memo; - s->fast_encode = (PyCFunction_Check(s->encoder) && PyCFunction_GetFunction(s->encoder) == (PyCFunction)py_encode_basestring_ascii); - s->allow_nan = PyObject_IsTrue(allow_nan); - s->use_decimal = PyObject_IsTrue(use_decimal); - s->namedtuple_as_object = PyObject_IsTrue(namedtuple_as_object); - s->tuple_as_array = PyObject_IsTrue(tuple_as_array); - Py_INCREF(s->markers); Py_INCREF(s->defaultfn); Py_INCREF(s->encoder); @@ -2055,7 +1800,8 @@ encoder_init(PyObject *self, PyObject *args, PyObject *kwds) Py_INCREF(s->item_separator); Py_INCREF(s->sort_keys); Py_INCREF(s->skipkeys); - Py_INCREF(s->key_memo); + s->fast_encode = (PyCFunction_Check(s->encoder) && PyCFunction_GetFunction(s->encoder) == (PyCFunction)py_encode_basestring_ascii); + s->allow_nan = PyObject_IsTrue(allow_nan); return 0; } @@ -2164,91 +1910,79 @@ static int encoder_listencode_obj(PyEncoderObject *s, PyObject *rval, PyObject *obj, Py_ssize_t indent_level) { /* Encode Python object obj to a JSON term, rval is a PyList */ - int rv = -1; - if (Py_EnterRecursiveCall(" while encoding a JSON document")) - return rv; - do { - if (obj == Py_None || obj == Py_True || obj == Py_False) { - PyObject *cstr = _encoded_const(obj); - if (cstr != NULL) - rv = _steal_list_append(rval, cstr); - } - else if (PyString_Check(obj) || PyUnicode_Check(obj)) - { - PyObject *encoded = encoder_encode_string(s, obj); - if (encoded != NULL) - rv = _steal_list_append(rval, encoded); - } - else if (PyInt_Check(obj) || PyLong_Check(obj)) { - PyObject *encoded = PyObject_Str(obj); - if (encoded != NULL) - rv = _steal_list_append(rval, encoded); - } - else if (PyFloat_Check(obj)) { - PyObject *encoded = encoder_encode_float(s, obj); - if (encoded != NULL) - rv = _steal_list_append(rval, encoded); - } - else if (s->namedtuple_as_object && _is_namedtuple(obj)) { - PyObject *newobj = PyObject_CallMethod(obj, "_asdict", NULL); - if (newobj != NULL) { - rv = encoder_listencode_dict(s, rval, newobj, indent_level); - Py_DECREF(newobj); - } - } - else if (PyList_Check(obj) || (s->tuple_as_array && PyTuple_Check(obj))) { - rv = encoder_listencode_list(s, rval, obj, indent_level); - } - else if (PyDict_Check(obj)) { - rv = encoder_listencode_dict(s, rval, obj, indent_level); + PyObject *newobj; + int rv; + + if (obj == Py_None || obj == Py_True || obj == Py_False) { + PyObject *cstr = _encoded_const(obj); + if (cstr == NULL) + return -1; + return _steal_list_append(rval, cstr); + } + else if (PyString_Check(obj) || PyUnicode_Check(obj)) + { + PyObject *encoded = encoder_encode_string(s, obj); + if (encoded == NULL) + return -1; + return _steal_list_append(rval, encoded); + } + else if (PyInt_Check(obj) || PyLong_Check(obj)) { + PyObject *encoded = PyObject_Str(obj); + if (encoded == NULL) + return -1; + return _steal_list_append(rval, encoded); + } + else if (PyFloat_Check(obj)) { + PyObject *encoded = encoder_encode_float(s, obj); + if (encoded == NULL) + return -1; + return _steal_list_append(rval, encoded); + } + else if (PyList_Check(obj) || PyTuple_Check(obj)) { + return encoder_listencode_list(s, rval, obj, indent_level); + } + else if (PyDict_Check(obj)) { + return encoder_listencode_dict(s, rval, obj, indent_level); + } + else { + PyObject *ident = NULL; + if (s->markers != Py_None) { + int has_key; + ident = PyLong_FromVoidPtr(obj); + if (ident == NULL) + return -1; + has_key = PyDict_Contains(s->markers, ident); + if (has_key) { + if (has_key != -1) + PyErr_SetString(PyExc_ValueError, "Circular reference detected"); + Py_DECREF(ident); + return -1; + } + if (PyDict_SetItem(s->markers, ident, obj)) { + Py_DECREF(ident); + return -1; + } + } + newobj = PyObject_CallFunctionObjArgs(s->defaultfn, obj, NULL); + if (newobj == NULL) { + Py_XDECREF(ident); + return -1; } - else if (s->use_decimal && Decimal_Check(obj)) { - PyObject *encoded = PyObject_Str(obj); - if (encoded != NULL) - rv = _steal_list_append(rval, encoded); + rv = encoder_listencode_obj(s, rval, newobj, indent_level); + Py_DECREF(newobj); + if (rv) { + Py_XDECREF(ident); + return -1; } - else { - PyObject *ident = NULL; - PyObject *newobj; - if (s->markers != Py_None) { - int has_key; - ident = PyLong_FromVoidPtr(obj); - if (ident == NULL) - break; - has_key = PyDict_Contains(s->markers, ident); - if (has_key) { - if (has_key != -1) - PyErr_SetString(PyExc_ValueError, "Circular reference detected"); - Py_DECREF(ident); - break; - } - if (PyDict_SetItem(s->markers, ident, obj)) { - Py_DECREF(ident); - break; - } - } - newobj = PyObject_CallFunctionObjArgs(s->defaultfn, obj, NULL); - if (newobj == NULL) { - Py_XDECREF(ident); - break; - } - rv = encoder_listencode_obj(s, rval, newobj, indent_level); - Py_DECREF(newobj); - if (rv) { - Py_XDECREF(ident); - rv = -1; - } - else if (ident != NULL) { - if (PyDict_DelItem(s->markers, ident)) { - Py_XDECREF(ident); - rv = -1; - } + if (ident != NULL) { + if (PyDict_DelItem(s->markers, ident)) { Py_XDECREF(ident); + return -1; } + Py_XDECREF(ident); } - } while (0); - Py_LeaveRecursiveCall(); - return rv; + return rv; + } } static int @@ -2258,22 +1992,18 @@ encoder_listencode_dict(PyEncoderObject *s, PyObject *rval, PyObject *dct, Py_ss static PyObject *open_dict = NULL; static PyObject *close_dict = NULL; static PyObject *empty_dict = NULL; - static PyObject *iteritems = NULL; PyObject *kstr = NULL; PyObject *ident = NULL; - PyObject *iter = NULL; - PyObject *item = NULL; - PyObject *items = NULL; - PyObject *encoded = NULL; + PyObject *key, *value; + Py_ssize_t pos; int skipkeys; Py_ssize_t idx; - if (open_dict == NULL || close_dict == NULL || empty_dict == NULL || iteritems == NULL) { + if (open_dict == NULL || close_dict == NULL || empty_dict == NULL) { open_dict = PyString_InternFromString("{"); close_dict = PyString_InternFromString("}"); empty_dict = PyString_InternFromString("{}"); - iteritems = PyString_InternFromString("iteritems"); - if (open_dict == NULL || close_dict == NULL || empty_dict == NULL || iteritems == NULL) + if (open_dict == NULL || close_dict == NULL || empty_dict == NULL) return -1; } if (PyDict_Size(dct) == 0) @@ -2302,72 +2032,21 @@ encoder_listencode_dict(PyEncoderObject *s, PyObject *rval, PyObject *dct, Py_ss /* TODO: DOES NOT RUN */ indent_level += 1; /* - newline_indent = '\n' + (_indent * _current_indent_level) + newline_indent = '\n' + (' ' * (_indent * _current_indent_level)) separator = _item_separator + newline_indent buf += newline_indent */ } - if (PyObject_IsTrue(s->sort_keys)) { - /* First sort the keys then replace them with (key, value) tuples. */ - Py_ssize_t i, nitems; - if (PyDict_CheckExact(dct)) - items = PyDict_Keys(dct); - else - items = PyMapping_Keys(dct); - if (items == NULL) - goto bail; - if (!PyList_Check(items)) { - PyErr_SetString(PyExc_ValueError, "keys must return list"); - goto bail; - } - if (PyList_Sort(items) < 0) - goto bail; - nitems = PyList_GET_SIZE(items); - for (i = 0; i < nitems; i++) { - PyObject *key, *value; - key = PyList_GET_ITEM(items, i); - value = PyDict_GetItem(dct, key); - item = PyTuple_Pack(2, key, value); - if (item == NULL) - goto bail; - PyList_SET_ITEM(items, i, item); - Py_DECREF(key); - } - } - else { - if (PyDict_CheckExact(dct)) - items = PyDict_Items(dct); - else - items = PyMapping_Items(dct); - } - if (items == NULL) - goto bail; - iter = PyObject_GetIter(items); - Py_DECREF(items); - if (iter == NULL) - goto bail; + /* TODO: C speedup not implemented for sort_keys */ + pos = 0; skipkeys = PyObject_IsTrue(s->skipkeys); idx = 0; - while ((item = PyIter_Next(iter))) { - PyObject *encoded, *key, *value; - if (!PyTuple_Check(item) || Py_SIZE(item) != 2) { - PyErr_SetString(PyExc_ValueError, "items must return 2-tuples"); - goto bail; - } - key = PyTuple_GET_ITEM(item, 0); - if (key == NULL) - goto bail; - value = PyTuple_GET_ITEM(item, 1); - if (value == NULL) - goto bail; + while (PyDict_Next(dct, &pos, &key, &value)) { + PyObject *encoded; - encoded = PyDict_GetItem(s->key_memo, key); - if (encoded != NULL) { - Py_INCREF(encoded); - } - else if (PyString_Check(key) || PyUnicode_Check(key)) { + if (PyString_Check(key) || PyUnicode_Check(key)) { Py_INCREF(key); kstr = key; } @@ -2376,25 +2055,22 @@ encoder_listencode_dict(PyEncoderObject *s, PyObject *rval, PyObject *dct, Py_ss if (kstr == NULL) goto bail; } - else if (key == Py_True || key == Py_False || key == Py_None) { - /* This must come before the PyInt_Check because - True and False are also 1 and 0.*/ - kstr = _encoded_const(key); + else if (PyInt_Check(key) || PyLong_Check(key)) { + kstr = PyObject_Str(key); if (kstr == NULL) goto bail; } - else if (PyInt_Check(key) || PyLong_Check(key)) { - kstr = PyObject_Str(key); + else if (key == Py_True || key == Py_False || key == Py_None) { + kstr = _encoded_const(key); if (kstr == NULL) goto bail; } else if (skipkeys) { - Py_DECREF(item); continue; } else { /* TODO: include repr of key */ - PyErr_SetString(PyExc_TypeError, "keys must be a string"); + PyErr_SetString(PyExc_ValueError, "keys must be a string"); goto bail; } @@ -2403,28 +2079,21 @@ encoder_listencode_dict(PyEncoderObject *s, PyObject *rval, PyObject *dct, Py_ss goto bail; } - if (encoded == NULL) { - encoded = encoder_encode_string(s, kstr); - Py_CLEAR(kstr); - if (encoded == NULL) - goto bail; - if (PyDict_SetItem(s->key_memo, key, encoded)) - goto bail; - } + encoded = encoder_encode_string(s, kstr); + Py_CLEAR(kstr); + if (encoded == NULL) + goto bail; if (PyList_Append(rval, encoded)) { + Py_DECREF(encoded); goto bail; } - Py_CLEAR(encoded); + Py_DECREF(encoded); if (PyList_Append(rval, s->key_separator)) goto bail; if (encoder_listencode_obj(s, rval, value, indent_level)) goto bail; - Py_CLEAR(item); idx += 1; } - Py_CLEAR(iter); - if (PyErr_Occurred()) - goto bail; if (ident != NULL) { if (PyDict_DelItem(s->markers, ident)) goto bail; @@ -2434,7 +2103,7 @@ encoder_listencode_dict(PyEncoderObject *s, PyObject *rval, PyObject *dct, Py_ss /* TODO: DOES NOT RUN */ indent_level -= 1; /* - yield '\n' + (_indent * _current_indent_level) + yield '\n' + (' ' * (_indent * _current_indent_level)) */ } if (PyList_Append(rval, close_dict)) @@ -2442,9 +2111,6 @@ encoder_listencode_dict(PyEncoderObject *s, PyObject *rval, PyObject *dct, Py_ss return 0; bail: - Py_XDECREF(encoded); - Py_XDECREF(items); - Py_XDECREF(iter); Py_XDECREF(kstr); Py_XDECREF(ident); return -1; @@ -2459,10 +2125,10 @@ encoder_listencode_list(PyEncoderObject *s, PyObject *rval, PyObject *seq, Py_ss static PyObject *close_array = NULL; static PyObject *empty_array = NULL; PyObject *ident = NULL; - PyObject *iter = NULL; - PyObject *obj = NULL; - int is_true; - int i = 0; + PyObject *s_fast = NULL; + Py_ssize_t num_items; + PyObject **seq_items; + Py_ssize_t i; if (open_array == NULL || close_array == NULL || empty_array == NULL) { open_array = PyString_InternFromString("["); @@ -2472,11 +2138,14 @@ encoder_listencode_list(PyEncoderObject *s, PyObject *rval, PyObject *seq, Py_ss return -1; } ident = NULL; - is_true = PyObject_IsTrue(seq); - if (is_true == -1) + s_fast = PySequence_Fast(seq, "_iterencode_list needs a sequence"); + if (s_fast == NULL) return -1; - else if (is_true == 0) + num_items = PySequence_Fast_GET_SIZE(s_fast); + if (num_items == 0) { + Py_DECREF(s_fast); return PyList_Append(rval, empty_array); + } if (s->markers != Py_None) { int has_key; @@ -2494,34 +2163,27 @@ encoder_listencode_list(PyEncoderObject *s, PyObject *rval, PyObject *seq, Py_ss } } - iter = PyObject_GetIter(seq); - if (iter == NULL) - goto bail; - + seq_items = PySequence_Fast_ITEMS(s_fast); if (PyList_Append(rval, open_array)) goto bail; if (s->indent != Py_None) { /* TODO: DOES NOT RUN */ indent_level += 1; /* - newline_indent = '\n' + (_indent * _current_indent_level) + newline_indent = '\n' + (' ' * (_indent * _current_indent_level)) separator = _item_separator + newline_indent buf += newline_indent */ } - while ((obj = PyIter_Next(iter))) { + for (i = 0; i < num_items; i++) { + PyObject *obj = seq_items[i]; if (i) { if (PyList_Append(rval, s->item_separator)) goto bail; } if (encoder_listencode_obj(s, rval, obj, indent_level)) goto bail; - i++; - Py_CLEAR(obj); } - Py_CLEAR(iter); - if (PyErr_Occurred()) - goto bail; if (ident != NULL) { if (PyDict_DelItem(s->markers, ident)) goto bail; @@ -2531,17 +2193,17 @@ encoder_listencode_list(PyEncoderObject *s, PyObject *rval, PyObject *seq, Py_ss /* TODO: DOES NOT RUN */ indent_level -= 1; /* - yield '\n' + (_indent * _current_indent_level) + yield '\n' + (' ' * (_indent * _current_indent_level)) */ } if (PyList_Append(rval, close_array)) goto bail; + Py_DECREF(s_fast); return 0; bail: - Py_XDECREF(obj); - Py_XDECREF(iter); Py_XDECREF(ident); + Py_DECREF(s_fast); return -1; } @@ -2567,7 +2229,6 @@ encoder_traverse(PyObject *self, visitproc visit, void *arg) Py_VISIT(s->item_separator); Py_VISIT(s->sort_keys); Py_VISIT(s->skipkeys); - Py_VISIT(s->key_memo); return 0; } @@ -2586,7 +2247,6 @@ encoder_clear(PyObject *self) Py_CLEAR(s->item_separator); Py_CLEAR(s->sort_keys); Py_CLEAR(s->skipkeys); - Py_CLEAR(s->key_memo); return 0; } @@ -2654,22 +2314,13 @@ PyDoc_STRVAR(module_doc, void init_speedups(void) { - PyObject *m, *decimal; + PyObject *m; PyScannerType.tp_new = PyType_GenericNew; if (PyType_Ready(&PyScannerType) < 0) return; PyEncoderType.tp_new = PyType_GenericNew; if (PyType_Ready(&PyEncoderType) < 0) return; - - decimal = PyImport_ImportModule("decimal"); - if (decimal == NULL) - return; - DecimalTypePtr = (PyTypeObject*)PyObject_GetAttrString(decimal, "Decimal"); - Py_DECREF(decimal); - if (DecimalTypePtr == NULL) - return; - m = Py_InitModule3("_speedups", speedups_methods, module_doc); Py_INCREF((PyObject*)&PyScannerType); PyModule_AddObject(m, "make_scanner", (PyObject*)&PyScannerType); diff --git a/lib/simplejson/decoder.py b/lib/simplejson/decoder.py old mode 100755 new mode 100644 index 0702385771..dd57ddeefb --- a/lib/simplejson/decoder.py +++ b/lib/simplejson/decoder.py @@ -5,13 +5,10 @@ import struct from lib.simplejson.scanner import make_scanner -def _import_c_scanstring(): - try: - from simplejson._speedups import scanstring - return scanstring - except ImportError: - return None -c_scanstring = _import_c_scanstring() +try: + from lib.simplejson._speedups import scanstring as c_scanstring +except ImportError: + c_scanstring = None __all__ = ['JSONDecoder'] @@ -19,8 +16,6 @@ def _import_c_scanstring(): def _floatconstants(): _BYTES = '7FF80000000000007FF0000000000000'.decode('hex') - # The struct module in Python 2.4 would get frexp() out of range here - # when an endian is specified in the format string. Fixed in Python 2.5+ if sys.byteorder != 'big': _BYTES = _BYTES[:8][::-1] + _BYTES[8:][::-1] nan, inf = struct.unpack('dd', _BYTES) @@ -29,32 +24,6 @@ def _floatconstants(): NaN, PosInf, NegInf = _floatconstants() -class JSONDecodeError(ValueError): - """Subclass of ValueError with the following additional properties: - - msg: The unformatted error message - doc: The JSON document being parsed - pos: The start index of doc where parsing failed - end: The end index of doc where parsing failed (may be None) - lineno: The line corresponding to pos - colno: The column corresponding to pos - endlineno: The line corresponding to end (may be None) - endcolno: The column corresponding to end (may be None) - - """ - def __init__(self, msg, doc, pos, end=None): - ValueError.__init__(self, errmsg(msg, doc, pos, end=end)) - self.msg = msg - self.doc = doc - self.pos = pos - self.end = end - self.lineno, self.colno = linecol(doc, pos) - if end is not None: - self.endlineno, self.endcolno = linecol(doc, end) - else: - self.endlineno, self.endcolno = None, None - - def linecol(doc, pos): lineno = doc.count('\n', 0, pos) + 1 if lineno == 1: @@ -93,14 +62,13 @@ def errmsg(msg, doc, pos, end=None): DEFAULT_ENCODING = "utf-8" -def py_scanstring(s, end, encoding=None, strict=True, - _b=BACKSLASH, _m=STRINGCHUNK.match): +def py_scanstring(s, end, encoding=None, strict=True, _b=BACKSLASH, _m=STRINGCHUNK.match): """Scan the string s for a JSON string. End is the index of the character in s after the quote that started the JSON string. Unescapes all valid JSON string escape sequences and raises ValueError on attempt to decode an invalid string. If strict is False then literal control characters are allowed in the string. - + Returns a tuple of the decoded string and the index of the character in s after the end quote.""" if encoding is None: @@ -111,8 +79,8 @@ def py_scanstring(s, end, encoding=None, strict=True, while 1: chunk = _m(s, end) if chunk is None: - raise JSONDecodeError( - "Unterminated string starting at", s, begin) + raise ValueError( + errmsg("Unterminated string starting at", s, begin)) end = chunk.end() content, terminator = chunk.groups() # Content is contains zero or more unescaped string characters @@ -128,22 +96,22 @@ def py_scanstring(s, end, encoding=None, strict=True, if strict: msg = "Invalid control character %r at" % (terminator,) #msg = "Invalid control character {0!r} at".format(terminator) - raise JSONDecodeError(msg, s, end) + raise ValueError(errmsg(msg, s, end)) else: _append(terminator) continue try: esc = s[end] except IndexError: - raise JSONDecodeError( - "Unterminated string starting at", s, begin) + raise ValueError( + errmsg("Unterminated string starting at", s, begin)) # If not a unicode escape sequence, must be in the lookup table if esc != 'u': try: char = _b[esc] except KeyError: msg = "Invalid \\escape: " + repr(esc) - raise JSONDecodeError(msg, s, end) + raise ValueError(errmsg(msg, s, end)) end += 1 else: # Unicode escape sequence @@ -151,16 +119,16 @@ def py_scanstring(s, end, encoding=None, strict=True, next_end = end + 5 if len(esc) != 4: msg = "Invalid \\uXXXX escape" - raise JSONDecodeError(msg, s, end) + raise ValueError(errmsg(msg, s, end)) uni = int(esc, 16) # Check for surrogate pair on UCS-4 systems if 0xd800 <= uni <= 0xdbff and sys.maxunicode > 65535: msg = "Invalid \\uXXXX\\uXXXX surrogate pair" if not s[end + 5:end + 7] == '\\u': - raise JSONDecodeError(msg, s, end) + raise ValueError(errmsg(msg, s, end)) esc2 = s[end + 7:end + 11] if len(esc2) != 4: - raise JSONDecodeError(msg, s, end) + raise ValueError(errmsg(msg, s, end)) uni2 = int(esc2, 16) uni = 0x10000 + (((uni - 0xd800) << 10) | (uni2 - 0xdc00)) next_end += 6 @@ -177,14 +145,8 @@ def py_scanstring(s, end, encoding=None, strict=True, WHITESPACE = re.compile(r'[ \t\n\r]*', FLAGS) WHITESPACE_STR = ' \t\n\r' -def JSONObject((s, end), encoding, strict, scan_once, object_hook, - object_pairs_hook, memo=None, - _w=WHITESPACE.match, _ws=WHITESPACE_STR): - # Backwards compatibility - if memo is None: - memo = {} - memo_get = memo.setdefault - pairs = [] +def JSONObject((s, end), encoding, strict, scan_once, object_hook, _w=WHITESPACE.match, _ws=WHITESPACE_STR): + pairs = {} # Use a slice to prevent IndexError from being raised, the following # check will raise a more specific ValueError if the string is empty nextchar = s[end:end + 1] @@ -195,26 +157,19 @@ def JSONObject((s, end), encoding, strict, scan_once, object_hook, nextchar = s[end:end + 1] # Trivial empty object if nextchar == '}': - if object_pairs_hook is not None: - result = object_pairs_hook(pairs) - return result, end + 1 - pairs = {} - if object_hook is not None: - pairs = object_hook(pairs) return pairs, end + 1 elif nextchar != '"': - raise JSONDecodeError("Expecting property name", s, end) + raise ValueError(errmsg("Expecting property name", s, end)) end += 1 while True: key, end = scanstring(s, end, encoding, strict) - key = memo_get(key, key) # To skip some function call overhead we optimize the fast paths where # the JSON key separator is ": " or just ":". if s[end:end + 1] != ':': end = _w(s, end).end() if s[end:end + 1] != ':': - raise JSONDecodeError("Expecting : delimiter", s, end) + raise ValueError(errmsg("Expecting : delimiter", s, end)) end += 1 @@ -229,8 +184,8 @@ def JSONObject((s, end), encoding, strict, scan_once, object_hook, try: value, end = scan_once(s, end) except StopIteration: - raise JSONDecodeError("Expecting object", s, end) - pairs.append((key, value)) + raise ValueError(errmsg("Expecting object", s, end)) + pairs[key] = value try: nextchar = s[end] @@ -244,7 +199,7 @@ def JSONObject((s, end), encoding, strict, scan_once, object_hook, if nextchar == '}': break elif nextchar != ',': - raise JSONDecodeError("Expecting , delimiter", s, end - 1) + raise ValueError(errmsg("Expecting , delimiter", s, end - 1)) try: nextchar = s[end] @@ -259,12 +214,8 @@ def JSONObject((s, end), encoding, strict, scan_once, object_hook, end += 1 if nextchar != '"': - raise JSONDecodeError("Expecting property name", s, end - 1) + raise ValueError(errmsg("Expecting property name", s, end - 1)) - if object_pairs_hook is not None: - result = object_pairs_hook(pairs) - return result, end - pairs = dict(pairs) if object_hook is not None: pairs = object_hook(pairs) return pairs, end @@ -283,7 +234,7 @@ def JSONArray((s, end), scan_once, _w=WHITESPACE.match, _ws=WHITESPACE_STR): try: value, end = scan_once(s, end) except StopIteration: - raise JSONDecodeError("Expecting object", s, end) + raise ValueError(errmsg("Expecting object", s, end)) _append(value) nextchar = s[end:end + 1] if nextchar in _ws: @@ -293,7 +244,7 @@ def JSONArray((s, end), scan_once, _w=WHITESPACE.match, _ws=WHITESPACE_STR): if nextchar == ']': break elif nextchar != ',': - raise JSONDecodeError("Expecting , delimiter", s, end) + raise ValueError(errmsg("Expecting , delimiter", s, end)) try: if s[end] in _ws: @@ -336,54 +287,37 @@ class JSONDecoder(object): """ def __init__(self, encoding=None, object_hook=None, parse_float=None, - parse_int=None, parse_constant=None, strict=True, - object_pairs_hook=None): - """ - *encoding* determines the encoding used to interpret any - :class:`str` objects decoded by this instance (``'utf-8'`` by - default). It has no effect when decoding :class:`unicode` objects. + parse_int=None, parse_constant=None, strict=True): + """``encoding`` determines the encoding used to interpret any ``str`` + objects decoded by this instance (utf-8 by default). It has no + effect when decoding ``unicode`` objects. Note that currently only encodings that are a superset of ASCII work, - strings of other encodings should be passed in as :class:`unicode`. + strings of other encodings should be passed in as ``unicode``. - *object_hook*, if specified, will be called with the result of every - JSON object decoded and its return value will be used in place of the - given :class:`dict`. This can be used to provide custom + ``object_hook``, if specified, will be called with the result + of every JSON object decoded and its return value will be used in + place of the given ``dict``. This can be used to provide custom deserializations (e.g. to support JSON-RPC class hinting). - *object_pairs_hook* is an optional function that will be called with - the result of any object literal decode with an ordered list of pairs. - The return value of *object_pairs_hook* will be used instead of the - :class:`dict`. This feature can be used to implement custom decoders - that rely on the order that the key and value pairs are decoded (for - example, :func:`collections.OrderedDict` will remember the order of - insertion). If *object_hook* is also defined, the *object_pairs_hook* - takes priority. - - *parse_float*, if specified, will be called with the string of every - JSON float to be decoded. By default, this is equivalent to - ``float(num_str)``. This can be used to use another datatype or parser - for JSON floats (e.g. :class:`decimal.Decimal`). - - *parse_int*, if specified, will be called with the string of every - JSON int to be decoded. By default, this is equivalent to - ``int(num_str)``. This can be used to use another datatype or parser - for JSON integers (e.g. :class:`float`). - - *parse_constant*, if specified, will be called with one of the - following strings: ``'-Infinity'``, ``'Infinity'``, ``'NaN'``. This - can be used to raise an exception if invalid JSON numbers are - encountered. - - *strict* controls the parser's behavior when it encounters an - invalid control character in a string. The default setting of - ``True`` means that unescaped control characters are parse errors, if - ``False`` then control characters will be allowed in strings. + ``parse_float``, if specified, will be called with the string + of every JSON float to be decoded. By default this is equivalent to + float(num_str). This can be used to use another datatype or parser + for JSON floats (e.g. decimal.Decimal). + + ``parse_int``, if specified, will be called with the string + of every JSON int to be decoded. By default this is equivalent to + int(num_str). This can be used to use another datatype or parser + for JSON integers (e.g. float). + + ``parse_constant``, if specified, will be called with one of the + following strings: -Infinity, Infinity, NaN. + This can be used to raise an exception if invalid JSON numbers + are encountered. """ self.encoding = encoding self.object_hook = object_hook - self.object_pairs_hook = object_pairs_hook self.parse_float = parse_float or float self.parse_int = parse_int or int self.parse_constant = parse_constant or _CONSTANTS.__getitem__ @@ -391,7 +325,6 @@ def __init__(self, encoding=None, object_hook=None, parse_float=None, self.parse_object = JSONObject self.parse_array = JSONArray self.parse_string = scanstring - self.memo = {} self.scan_once = make_scanner(self) def decode(self, s, _w=WHITESPACE.match): @@ -402,12 +335,12 @@ def decode(self, s, _w=WHITESPACE.match): obj, end = self.raw_decode(s, idx=_w(s, 0).end()) end = _w(s, end).end() if end != len(s): - raise JSONDecodeError("Extra data", s, end, len(s)) + raise ValueError(errmsg("Extra data", s, end, len(s))) return obj def raw_decode(self, s, idx=0): - """Decode a JSON document from ``s`` (a ``str`` or ``unicode`` - beginning with a JSON document) and return a 2-tuple of the Python + """Decode a JSON document from ``s`` (a ``str`` or ``unicode`` beginning + with a JSON document) and return a 2-tuple of the Python representation and the index in ``s`` where the document ended. This can be used to decode a JSON document from a string that may @@ -417,5 +350,5 @@ def raw_decode(self, s, idx=0): try: obj, end = self.scan_once(s, idx) except StopIteration: - raise JSONDecodeError("No JSON object could be decoded", s, idx) + raise ValueError("No JSON object could be decoded") return obj, end diff --git a/lib/simplejson/encoder.py b/lib/simplejson/encoder.py old mode 100755 new mode 100644 index 08a986b6e4..15c35f7a1b --- a/lib/simplejson/encoder.py +++ b/lib/simplejson/encoder.py @@ -1,19 +1,17 @@ """Implementation of JSONEncoder """ import re -from decimal import Decimal -def _import_speedups(): - try: - from simplejson import _speedups - return _speedups.encode_basestring_ascii, _speedups.make_encoder - except ImportError: - return None, None -c_encode_basestring_ascii, c_make_encoder = _import_speedups() +try: + from lib.simplejson._speedups import encode_basestring_ascii as c_encode_basestring_ascii +except ImportError: + c_encode_basestring_ascii = None +try: + from lib.simplejson._speedups import make_encoder as c_make_encoder +except ImportError: + c_make_encoder = None -from lib.simplejson.decoder import PosInf - -ESCAPE = re.compile(ur'[\x00-\x1f\\"\b\f\n\r\t\u2028\u2029]') +ESCAPE = re.compile(r'[\x00-\x1f\\"\b\f\n\r\t]') ESCAPE_ASCII = re.compile(r'([\\"]|[^\ -~])') HAS_UTF8 = re.compile(r'[\x80-\xff]') ESCAPE_DCT = { @@ -24,24 +22,22 @@ def _import_speedups(): '\n': '\\n', '\r': '\\r', '\t': '\\t', - u'\u2028': '\\u2028', - u'\u2029': '\\u2029', } for i in range(0x20): #ESCAPE_DCT.setdefault(chr(i), '\\u{0:04x}'.format(i)) ESCAPE_DCT.setdefault(chr(i), '\\u%04x' % (i,)) +# Assume this produces an infinity on all machines (probably not guaranteed) +INFINITY = float('1e66666') FLOAT_REPR = repr def encode_basestring(s): """Return a JSON representation of a Python string """ - if isinstance(s, str) and HAS_UTF8.search(s) is not None: - s = s.decode('utf-8') def replace(match): return ESCAPE_DCT[match.group(0)] - return u'"' + ESCAPE.sub(replace, s) + u'"' + return '"' + ESCAPE.sub(replace, s) + '"' def py_encode_basestring_ascii(s): @@ -69,8 +65,7 @@ def replace(match): return '"' + str(ESCAPE_ASCII.sub(replace, s)) + '"' -encode_basestring_ascii = ( - c_encode_basestring_ascii or py_encode_basestring_ascii) +encode_basestring_ascii = c_encode_basestring_ascii or py_encode_basestring_ascii class JSONEncoder(object): """Extensible JSON encoder for Python data structures. @@ -80,7 +75,7 @@ class JSONEncoder(object): +-------------------+---------------+ | Python | JSON | +===================+===============+ - | dict, namedtuple | object | + | dict | object | +-------------------+---------------+ | list, tuple | array | +-------------------+---------------+ @@ -105,9 +100,7 @@ class JSONEncoder(object): key_separator = ': ' def __init__(self, skipkeys=False, ensure_ascii=True, check_circular=True, allow_nan=True, sort_keys=False, - indent=None, separators=None, encoding='utf-8', default=None, - use_decimal=True, namedtuple_as_object=True, - tuple_as_array=True): + indent=None, separators=None, encoding='utf-8', default=None): """Constructor for JSONEncoder, with sensible defaults. If skipkeys is false, then it is a TypeError to attempt @@ -132,12 +125,10 @@ def __init__(self, skipkeys=False, ensure_ascii=True, sorted by key; this is useful for regression tests to ensure that JSON serializations can be compared on a day-to-day basis. - If indent is a string, then JSON array elements and object members - will be pretty-printed with a newline followed by that string repeated - for each level of nesting. ``None`` (the default) selects the most compact - representation without any newlines. For backwards compatibility with - versions of simplejson earlier than 2.1.0, an integer is also accepted - and is converted to a string with that many spaces. + If indent is a non-negative integer, then JSON array + elements and object members will be pretty-printed with that + indent level. An indent level of 0 will only insert newlines. + None is the most compact representation. If specified, separators should be a (item_separator, key_separator) tuple. The default is (', ', ': '). To get the most compact JSON @@ -151,15 +142,6 @@ def __init__(self, skipkeys=False, ensure_ascii=True, transformed into unicode using that encoding prior to JSON-encoding. The default is UTF-8. - If use_decimal is true (not the default), ``decimal.Decimal`` will - be supported directly by the encoder. For the inverse, decode JSON - with ``parse_float=decimal.Decimal``. - - If namedtuple_as_object is true (the default), tuple subclasses with - ``_asdict()`` methods will be encoded as JSON objects. - - If tuple_as_array is true (the default), tuple (and subclasses) will - be encoded as JSON arrays. """ self.skipkeys = skipkeys @@ -167,16 +149,9 @@ def __init__(self, skipkeys=False, ensure_ascii=True, self.check_circular = check_circular self.allow_nan = allow_nan self.sort_keys = sort_keys - self.use_decimal = use_decimal - self.namedtuple_as_object = namedtuple_as_object - self.tuple_as_array = tuple_as_array - if isinstance(indent, (int, long)): - indent = ' ' * indent self.indent = indent if separators is not None: self.item_separator, self.key_separator = separators - elif indent is not None: - self.item_separator = ',' if default is not None: self.default = default self.encoding = encoding @@ -204,7 +179,6 @@ def default(self, o): def encode(self, o): """Return a JSON string representation of a Python data structure. - >>> from simplejson import JSONEncoder >>> JSONEncoder().encode({"foo": ["bar", "baz"]}) '{"foo": ["bar", "baz"]}' @@ -226,10 +200,7 @@ def encode(self, o): chunks = self.iterencode(o, _one_shot=True) if not isinstance(chunks, (list, tuple)): chunks = list(chunks) - if self.ensure_ascii: - return ''.join(chunks) - else: - return u''.join(chunks) + return ''.join(chunks) def iterencode(self, o, _one_shot=False): """Encode the given object and yield each string @@ -255,11 +226,9 @@ def _encoder(o, _orig_encoder=_encoder, _encoding=self.encoding): o = o.decode(_encoding) return _orig_encoder(o) - def floatstr(o, allow_nan=self.allow_nan, - _repr=FLOAT_REPR, _inf=PosInf, _neginf=-PosInf): - # Check for specials. Note that this type of test is processor - # and/or platform-specific, so do tests which don't depend on - # the internals. + def floatstr(o, allow_nan=self.allow_nan, _repr=FLOAT_REPR, _inf=INFINITY, _neginf=-INFINITY): + # Check for specials. Note that this type of test is processor- and/or + # platform-specific, so do tests which don't depend on the internals. if o != o: text = 'NaN' @@ -278,62 +247,24 @@ def floatstr(o, allow_nan=self.allow_nan, return text - key_memo = {} - if (_one_shot and c_make_encoder is not None - and self.indent is None): + if _one_shot and c_make_encoder is not None and not self.indent and not self.sort_keys: _iterencode = c_make_encoder( markers, self.default, _encoder, self.indent, self.key_separator, self.item_separator, self.sort_keys, - self.skipkeys, self.allow_nan, key_memo, self.use_decimal, - self.namedtuple_as_object, self.tuple_as_array) + self.skipkeys, self.allow_nan) else: _iterencode = _make_iterencode( markers, self.default, _encoder, self.indent, floatstr, self.key_separator, self.item_separator, self.sort_keys, - self.skipkeys, _one_shot, self.use_decimal, - self.namedtuple_as_object, self.tuple_as_array) - try: - return _iterencode(o, 0) - finally: - key_memo.clear() - + self.skipkeys, _one_shot) + return _iterencode(o, 0) -class JSONEncoderForHTML(JSONEncoder): - """An encoder that produces JSON safe to embed in HTML. - - To embed JSON content in, say, a script tag on a web page, the - characters &, < and > should be escaped. They cannot be escaped - with the usual entities (e.g. &) because they are not expanded - within diff --git a/sickbeard/webapi.py b/sickbeard/webapi.py index 60bc43ade4..0e8e2c99fb 100644 --- a/sickbeard/webapi.py +++ b/sickbeard/webapi.py @@ -67,7 +67,7 @@ class Api: """ api class that returns json results """ - version = 0.1 + version = 0.2 intent = 4 @cherrypy.expose @@ -1917,6 +1917,38 @@ def run(self): return {'outputType': 'image', 'image': webserve.WebInterface().showPoster(self.tvdbid, 'banner')} +class CMD_ShowPause(ApiCall): + _help = {"desc": "set a show's paused state in sickbeard", + "requiredParameters": {"tvdbid": {"desc": "thetvdb.com unique id of a show"}, + }, + "optionalPramameters": {"pause": {"desc": "set the pause state of the show"} + } + } + + def __init__(self, args, kwargs): + # required + self.tvdbid, args = self.check_params(args, kwargs, "tvdbid", None, True, "int", []) + # optional + self.pause, args = self.check_params(args, kwargs, "pause", 0, False, "bool", []) + # super, missing, help + ApiCall.__init__(self, args, kwargs) + + def run(self): + """ set a show's paused state in sickbeard """ + showObj = sickbeard.helpers.findCertainShow(sickbeard.showList, int(self.tvdbid)) + if not showObj: + return _responds(RESULT_FAILURE, msg="Show not found") + + if self.pause == True: + showObj.paused = 1 + return _responds(RESULT_SUCCESS, msg=str(showObj.name) + " has been paused") + else: + showObj.paused = 0 + return _responds(RESULT_SUCCESS, msg=str(showObj.name) + " has been unpaused") + + return _responds(RESULT_FAILURE, msg=str(showObj.name) + " was unable to be paused") + + class CMD_ShowRefresh(ApiCall): _help = {"desc": "refresh a show in sickbeard", "requiredParameters": {"tvdbid": {"desc": "thetvdb.com unique id of a show"}, @@ -2356,6 +2388,7 @@ def run(self): "show.getquality": CMD_ShowGetQuality, "show.getposter": CMD_ShowGetPoster, "show.getbanner": CMD_ShowGetBanner, + "show.pause": CMD_ShowPause, "show.refresh": CMD_ShowRefresh, "show.seasonlist": CMD_ShowSeasonList, "show.seasons": CMD_ShowSeasons, From e474871d1fa18706bd566e34c4e3e5b5b47c27aa Mon Sep 17 00:00:00 2001 From: Jonathon Saine Date: Tue, 3 Jan 2012 17:00:29 -0600 Subject: [PATCH 6/9] Source code cleanup, PEP8 fixes --- sickbeard/webapi.py | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/sickbeard/webapi.py b/sickbeard/webapi.py index 0e8e2c99fb..62a5ce1d51 100644 --- a/sickbeard/webapi.py +++ b/sickbeard/webapi.py @@ -115,7 +115,7 @@ def default(self, *args, **kwargs): outputCallback = outputCallbackDict[outDict['outputType']] else: outputCallback = outputCallbackDict['default'] - + return outputCallback(outDict) @cherrypy.expose @@ -221,7 +221,7 @@ def call_dispatcher(args, kwargs): cmd, cmdIndex = cmd.split("_") # this gives us the clear cmd and the index logger.log(u"API :: " + cmd + ": curKwargs " + str(curKwargs), logger.DEBUG) - if not (multiCmds and cmd in ('show.getposter','show.getbanner')): # skip these cmd while chaining + if not (multiCmds and cmd in ('show.getposter', 'show.getbanner')): # skip these cmd while chaining try: if cmd in _functionMaper: curOutDict = _functionMaper.get(cmd)(curArgs, curKwargs).run() # get the cmd class, init it and run() @@ -232,7 +232,7 @@ def call_dispatcher(args, kwargs): except ApiError, e: # Api errors that we raised, they are harmless curOutDict = _responds(RESULT_ERROR, msg=ex(e)) else: # if someone chained one of the forbiden cmds they will get an error for this one cmd - curOutDict = _responds(RESULT_ERROR, msg="The cmd '"+cmd+"' is not supported while chaining") + curOutDict = _responds(RESULT_ERROR, msg="The cmd '" + cmd + "' is not supported while chaining") if multiCmds: # note: if multiple same cmds are issued but one has not an index defined it will override all others @@ -602,6 +602,7 @@ def _getQualityMap(): Quality.UNKNOWN: 'unknown', ANY: 'any'} + def _getRootDirs(): if sickbeard.ROOT_DIRS == "": return {} @@ -641,6 +642,7 @@ def _getRootDirs(): return dir_list + class ApiError(Exception): "Generic API error" @@ -1251,11 +1253,11 @@ def run(self): root_dirs.pop(0) # clean up the list - replace %xx escapes by their single-character equivalent root_dirs = [urllib.unquote_plus(x) for x in root_dirs] - old_root_dir = root_dirs[index]; + old_root_dir = root_dirs[index] for curRootDir in root_dirs: if not curRootDir == self.location: root_dirs_new.append(curRootDir) - else: # + else: newIndex = 0 for curIndex, curNewRootDir in enumerate(root_dirs_new): @@ -2369,7 +2371,7 @@ def run(self): "sb": CMD_SickBeard, "sb.addrootdir": CMD_SickBeardAddRootDir, "sb.checkscheduler": CMD_SickBeardCheckScheduler, - "sb.deleterootdir":CMD_SickBeardDeleteRootDir, + "sb.deleterootdir": CMD_SickBeardDeleteRootDir, "sb.forcesearch": CMD_SickBeardForceSearch, "sb.getdefaults": CMD_SickBeardGetDefaults, "sb.getmessages": CMD_SickBeardGetMessages, From 7eab418547b4c8700d2e0eb8eaa89765a75b3c45 Mon Sep 17 00:00:00 2001 From: Jonathon Saine Date: Wed, 4 Jan 2012 00:45:05 -0600 Subject: [PATCH 7/9] Fix typo. Cosmetic for the help section. --- sickbeard/webapi.py | 36 ++++++++++++++++++------------------ 1 file changed, 18 insertions(+), 18 deletions(-) diff --git a/sickbeard/webapi.py b/sickbeard/webapi.py index 62a5ce1d51..eba2370734 100644 --- a/sickbeard/webapi.py +++ b/sickbeard/webapi.py @@ -320,7 +320,7 @@ def return_help(self): self._optionalParams = [] for paramDict, type in [(self._requiredParams, "requiredParameters"), - (self._optionalParams, "optionalPramameters")]: + (self._optionalParams, "optionalParameters")]: if type in self._help: for paramName in paramDict: @@ -655,7 +655,7 @@ class IntParseError(Exception): class CMD_Help(ApiCall): _help = {"desc": "display help information for a given subject/command", - "optionalPramameters": {"subject": {"desc": "command - the top level command"}, + "optionalParameters": {"subject": {"desc": "command - the top level command"}, } } @@ -676,7 +676,7 @@ def run(self): class CMD_ComingEpisodes(ApiCall): _help = {"desc": "display the coming episodes", - "optionalPramameters": {"sort": {"desc": "change the sort order"}, + "optionalParameters": {"sort": {"desc": "change the sort order"}, "type": {"desc": "one or more of allowedValues separated by |"} } } @@ -770,7 +770,7 @@ class CMD_Episode(ApiCall): "season": {"desc": "the season number"}, "episode": {"desc": "the episode number"} }, - "optionalPramameters": {"full_path": {"desc": "show the full absolute path (if valid) instead of a relative path for the episode location"} + "optionalParameters": {"full_path": {"desc": "show the full absolute path (if valid) instead of a relative path for the episode location"} } } @@ -941,7 +941,7 @@ def run(self): class CMD_Exceptions(ApiCall): _help = {"desc": "display scene exceptions for all or a given show", - "optionalPramameters": {"tvdbid": {"desc": "thetvdb.com unique id of a show"}, + "optionalParameters": {"tvdbid": {"desc": "thetvdb.com unique id of a show"}, } } @@ -981,7 +981,7 @@ def run(self): class CMD_History(ApiCall): _help = {"desc": "display sickbeard downloaded/snatched history", - "optionalPramameters": {"limit": {"desc": "limit returned results"}, + "optionalParameters": {"limit": {"desc": "limit returned results"}, "type": {"desc": "only show a specific type of results"}, } } @@ -1074,7 +1074,7 @@ def run(self): class CMD_Logs(ApiCall): _help = {"desc": "view sickbeard's log", - "optionalPramameters": {"min_level ": {"desc": "the minimum level classification of log entries to show, with each level inherting its above level"} } + "optionalParameters": {"min_level ": {"desc": "the minimum level classification of log entries to show, with each level inherting its above level"} } } def __init__(self, args, kwargs): @@ -1151,7 +1151,7 @@ class CMD_SickBeardAddRootDir(ApiCall): _help = {"desc": "add a sickbeard user's parent directory", "requiredParameters": {"location": {"desc": "the full path to root (parent) directory"} }, - "optionalPramameters": {"default": {"desc": "make the location passed the default root (parent) directory"} + "optionalParameters": {"default": {"desc": "make the location passed the default root (parent) directory"} } } @@ -1349,7 +1349,7 @@ def run(self): class CMD_SickBeardPauseBacklog(ApiCall): _help = {"desc": "pause the backlog search", - "optionalPramameters": {"pause ": {"desc": "pause or unpause the global backlog"} } + "optionalParameters": {"pause ": {"desc": "pause or unpause the global backlog"} } } def __init__(self, args, kwargs): @@ -1404,7 +1404,7 @@ def run(self): class CMD_SickBeardSearchTVDB(ApiCall): _help = {"desc": "search for show at tvdb with a given string and language", - "optionalPramameters": {"name": {"desc": "name of the show you want to search for"}, + "optionalParameters": {"name": {"desc": "name of the show you want to search for"}, "tvdbid": {"desc": "thetvdb.com unique id of a show"}, "lang": {"desc": "the 2 letter abbreviation lang id"} } @@ -1476,7 +1476,7 @@ def run(self): class CMD_SickBeardSetDefaults(ApiCall): _help = {"desc": "set sickbeard user defaults", - "optionalPramameters": {"initial": {"desc": "initial quality for the show"}, + "optionalParameters": {"initial": {"desc": "initial quality for the show"}, "archive": {"desc": "archive quality for the show"}, "season_folder": {"desc": "use season subfolders within the show directory"}, "status": {"desc": "status of missing episodes"} @@ -1621,7 +1621,7 @@ class CMD_ShowAddExisting(ApiCall): "requiredParameters": {"tvdbid": {"desc": "thetvdb.com unique id of a show"}, "location": {"desc": "full path to the existing folder for the show"} }, - "optionalPramameters": {"initial": {"desc": "initial quality for the show"}, + "optionalParameters": {"initial": {"desc": "initial quality for the show"}, "archive": {"desc": "archive quality for the show"}, "season_folder": {"desc": "use season subfolders for the show"} } @@ -1689,7 +1689,7 @@ class CMD_ShowAddNew(ApiCall): _help = {"desc": "add a new show to sickbeard", "requiredParameters": {"tvdbid": {"desc": "thetvdb.com unique id of a show"} }, - "optionalPramameters": {"initial": {"desc": "initial quality for the show"}, + "optionalParameters": {"initial": {"desc": "initial quality for the show"}, "location": {"desc": "base path for where the show folder is to be created"}, "archive": {"desc": "archive quality for the show"}, "season_folder": {"desc": "use season subfolders for the show"}, @@ -1923,7 +1923,7 @@ class CMD_ShowPause(ApiCall): _help = {"desc": "set a show's paused state in sickbeard", "requiredParameters": {"tvdbid": {"desc": "thetvdb.com unique id of a show"}, }, - "optionalPramameters": {"pause": {"desc": "set the pause state of the show"} + "optionalParameters": {"pause": {"desc": "set the pause state of the show"} } } @@ -1982,7 +1982,7 @@ class CMD_ShowSeasonList(ApiCall): _help = {"desc": "display the season list for a given show", "requiredParameters": {"tvdbid": {"desc": "thetvdb.com unique id of a show"}, }, - "optionalPramameters": {"sort": {"desc": "change the sort order from descending to ascending"} + "optionalParameters": {"sort": {"desc": "change the sort order from descending to ascending"} } } @@ -2017,7 +2017,7 @@ class CMD_ShowSeasons(ApiCall): _help = {"desc": "display a listing of episodes for all or a given season", "requiredParameters": {"tvdbid": {"desc": "thetvdb.com unique id of a show"}, }, - "optionalPramameters": {"season": {"desc": "the season number"}, + "optionalParameters": {"season": {"desc": "the season number"}, } } @@ -2077,7 +2077,7 @@ class CMD_ShowSetQuality(ApiCall): _help = {"desc": "set desired quality of a show in sickbeard. if neither initial or archive are provided then the config default quality will be used", "requiredParameters": {"tvdbid": {"desc": "thetvdb.com unique id of a show"} }, - "optionalPramameters": {"initial": {"desc": "initial quality for the show"}, + "optionalParameters": {"initial": {"desc": "initial quality for the show"}, "archive": {"desc": "archive quality for the show"} } } @@ -2283,7 +2283,7 @@ def run(self): class CMD_Shows(ApiCall): _help = {"desc": "display all shows in sickbeard", - "optionalPramameters": {"sort": {"desc": "sort the list of shows by show name instead of tvdbid"}, + "optionalParameters": {"sort": {"desc": "sort the list of shows by show name instead of tvdbid"}, "paused": {"desc": "only show the shows that are set to paused"}, }, } From 1b40e8fefc300f0c58b3262e35d572e0516c9250 Mon Sep 17 00:00:00 2001 From: Nic Wolfe Date: Wed, 4 Jan 2012 00:01:58 -0700 Subject: [PATCH 8/9] Added support to show or not show paused eps in the coming eps section of the API --- data/interfaces/default/apiBuilder.tmpl | 17 +++++++++++------ sickbeard/webapi.py | 14 ++++++++++---- 2 files changed, 21 insertions(+), 10 deletions(-) diff --git a/data/interfaces/default/apiBuilder.tmpl b/data/interfaces/default/apiBuilder.tmpl index 1fc27db458..92d86c1560 100644 --- a/data/interfaces/default/apiBuilder.tmpl +++ b/data/interfaces/default/apiBuilder.tmpl @@ -268,12 +268,17 @@ addList("future", "Sort by Network", "&sort=network", "future-type"); addList("future", "Sort by Show Name", "&sort=show", "future-type"); addOption("future-type", "Optional Param", "", 1); -addOption("future-type", "Show All Types", "&type=today|missed|soon|later"); -addOption("future-type", "Show Today", "&type=today"); -addOption("future-type", "Show Missed", "&type=missed"); -addOption("future-type", "Show Soon", "&type=soon"); -addOption("future-type", "Show Later", "&type=later"); -addOption("future-type", "Show Today & Missed", "&type=today|missed"); +addList("future-type", "Show All Types", "&type=today|missed|soon|later", "future-paused"); +addList("future-type", "Show Today", "&type=today", "future-paused"); +addList("future-type", "Show Missed", "&type=missed", "future-paused"); +addList("future-type", "Show Soon", "&type=soon", "future-paused"); +addList("future-type", "Show Later", "&type=later", "future-paused"); +addList("future-type", "Show Today & Missed", "&type=today|missed", "future-paused"); + +addOption("future-paused", "Optional Param", "", 1); +addOption("future-paused", "Use default paused display setting", ""); +addOption("future-paused", "Show all paused shows", "&paused=1"); +addOption("future-paused", "Show no paused shows", "&paused=0"); addOption("history", "Optional Param", "", 1); addList("history", "Show Only Downloaded", "&type=downloaded", "history-type"); diff --git a/sickbeard/webapi.py b/sickbeard/webapi.py index 60bc43ade4..1e816a270c 100644 --- a/sickbeard/webapi.py +++ b/sickbeard/webapi.py @@ -675,7 +675,8 @@ def run(self): class CMD_ComingEpisodes(ApiCall): _help = {"desc": "display the coming episodes", "optionalPramameters": {"sort": {"desc": "change the sort order"}, - "type": {"desc": "one or more of allowedValues separated by |"} + "type": {"desc": "one or more of allowedValues separated by |"}, + "paused": {"desc": "0 to omit paused shows, 1 to show them, or omitted to use the SB default"}, } } @@ -684,6 +685,7 @@ def __init__(self, args, kwargs): # optional self.sort, args = self.check_params(args, kwargs, "sort", "date", False, "string", ["date", "show", "network"]) self.type, args = self.check_params(args, kwargs, "type", "today|missed|soon|later", False, "list", ["missed", "later", "today", "soon"]) + self.paused, args = self.check_params(args, kwargs, "paused", sickbeard.COMING_EPS_DISPLAY_PAUSED, False, "int", [0, 1]) # super, missing, help ApiCall.__init__(self, args, kwargs) @@ -697,14 +699,14 @@ def run(self): qualList = Quality.DOWNLOADED + Quality.SNATCHED + [ARCHIVED, IGNORED] myDB = db.DBConnection(row_type="dict") - sql_results = myDB.select("SELECT airdate, airs, episode, name AS 'ep_name', description AS 'ep_plot', network, season, showid AS 'tvdbid', show_name, tv_shows.quality AS quality, tv_shows.status as show_status FROM tv_episodes, tv_shows WHERE season != 0 AND airdate >= ? AND airdate < ? AND tv_shows.tvdb_id = tv_episodes.showid AND tv_episodes.status NOT IN (" + ','.join(['?'] * len(qualList)) + ")", [today, next_week] + qualList) + sql_results = myDB.select("SELECT airdate, airs, episode, name AS 'ep_name', description AS 'ep_plot', network, season, showid AS 'tvdbid', show_name, tv_shows.quality AS quality, tv_shows.status as show_status, tv_shows.paused as paused FROM tv_episodes, tv_shows WHERE season != 0 AND airdate >= ? AND airdate < ? AND tv_shows.tvdb_id = tv_episodes.showid AND tv_episodes.status NOT IN (" + ','.join(['?'] * len(qualList)) + ")", [today, next_week] + qualList) for cur_result in sql_results: done_show_list.append(int(cur_result["tvdbid"])) - more_sql_results = myDB.select("SELECT airdate, airs, episode, name AS 'ep_name', description AS 'ep_plot', network, season, showid AS 'tvdbid', show_name, tv_shows.quality AS quality, tv_shows.status as show_status FROM tv_episodes outer_eps, tv_shows WHERE season != 0 AND showid NOT IN (" + ','.join(['?'] * len(done_show_list)) + ") AND tv_shows.tvdb_id = outer_eps.showid AND airdate = (SELECT airdate FROM tv_episodes inner_eps WHERE inner_eps.showid = outer_eps.showid AND inner_eps.airdate >= ? ORDER BY inner_eps.airdate ASC LIMIT 1) AND outer_eps.status NOT IN (" + ','.join(['?'] * len(Quality.DOWNLOADED + Quality.SNATCHED)) + ")", done_show_list + [next_week] + Quality.DOWNLOADED + Quality.SNATCHED) + more_sql_results = myDB.select("SELECT airdate, airs, episode, name AS 'ep_name', description AS 'ep_plot', network, season, showid AS 'tvdbid', show_name, tv_shows.quality AS quality, tv_shows.status as show_status, tv_shows.paused as paused FROM tv_episodes outer_eps, tv_shows WHERE season != 0 AND showid NOT IN (" + ','.join(['?'] * len(done_show_list)) + ") AND tv_shows.tvdb_id = outer_eps.showid AND airdate = (SELECT airdate FROM tv_episodes inner_eps WHERE inner_eps.showid = outer_eps.showid AND inner_eps.airdate >= ? ORDER BY inner_eps.airdate ASC LIMIT 1) AND outer_eps.status NOT IN (" + ','.join(['?'] * len(Quality.DOWNLOADED + Quality.SNATCHED)) + ")", done_show_list + [next_week] + Quality.DOWNLOADED + Quality.SNATCHED) sql_results += more_sql_results - more_sql_results = myDB.select("SELECT airdate, airs, episode, name AS 'ep_name', description AS 'ep_plot', network, season, showid AS 'tvdbid', show_name, tv_shows.quality AS quality, tv_shows.status as show_status FROM tv_episodes, tv_shows WHERE season != 0 AND tv_shows.tvdb_id = tv_episodes.showid AND airdate < ? AND airdate >= ? AND tv_episodes.status = ? AND tv_episodes.status NOT IN (" + ','.join(['?'] * len(qualList)) + ")", [today, recently, WANTED] + qualList) + more_sql_results = myDB.select("SELECT airdate, airs, episode, name AS 'ep_name', description AS 'ep_plot', network, season, showid AS 'tvdbid', show_name, tv_shows.quality AS quality, tv_shows.status as show_status, tv_shows.paused as paused FROM tv_episodes, tv_shows WHERE season != 0 AND tv_shows.tvdb_id = tv_episodes.showid AND airdate < ? AND airdate >= ? AND tv_episodes.status = ? AND tv_episodes.status NOT IN (" + ','.join(['?'] * len(qualList)) + ")", [today, recently, WANTED] + qualList) sql_results += more_sql_results # sort by air date @@ -728,6 +730,10 @@ def run(self): Soon: tomorrow till next week Later: later than next week """ + + if ep["paused"] and not self.paused: + continue + status = "soon" if ep["airdate"] < today: status = "missed" From 7ef8772a013580274993006f0d12526444203000 Mon Sep 17 00:00:00 2001 From: Jonathon Saine Date: Wed, 4 Jan 2012 01:42:09 -0600 Subject: [PATCH 9/9] Fixed the coming ep paused option, slight modification to sql query as well. --- sickbeard/webapi.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/sickbeard/webapi.py b/sickbeard/webapi.py index 2dc784844d..f72b18e49c 100644 --- a/sickbeard/webapi.py +++ b/sickbeard/webapi.py @@ -677,9 +677,9 @@ def run(self): class CMD_ComingEpisodes(ApiCall): _help = {"desc": "display the coming episodes", "optionalParameters": {"sort": {"desc": "change the sort order"}, - "type": {"desc": "one or more of allowedValues separated by |"}, - "paused": {"desc": "0 to omit paused shows, 1 to show them, or omitted to use the SB default"}, - } + "type": {"desc": "one or more of allowedValues separated by |"}, + "paused": {"desc": "0 to exclude paused shows, 1 to include them, or omitted to use the SB default"}, + } } def __init__(self, args, kwargs): @@ -687,7 +687,7 @@ def __init__(self, args, kwargs): # optional self.sort, args = self.check_params(args, kwargs, "sort", "date", False, "string", ["date", "show", "network"]) self.type, args = self.check_params(args, kwargs, "type", "today|missed|soon|later", False, "list", ["missed", "later", "today", "soon"]) - self.paused, args = self.check_params(args, kwargs, "pause", int(sickbeard.COMING_EPS_DISPLAY_PAUSED), False, "bool", []) + self.paused, args = self.check_params(args, kwargs, "paused", sickbeard.COMING_EPS_DISPLAY_PAUSED, False, "int", [0, 1]) # super, missing, help ApiCall.__init__(self, args, kwargs) @@ -701,14 +701,14 @@ def run(self): qualList = Quality.DOWNLOADED + Quality.SNATCHED + [ARCHIVED, IGNORED] myDB = db.DBConnection(row_type="dict") - sql_results = myDB.select("SELECT airdate, airs, episode, name AS 'ep_name', description AS 'ep_plot', network, season, showid AS 'tvdbid', show_name, tv_shows.quality AS quality, tv_shows.status as show_status, tv_shows.paused as paused FROM tv_episodes, tv_shows WHERE season != 0 AND airdate >= ? AND airdate < ? AND tv_shows.tvdb_id = tv_episodes.showid AND tv_episodes.status NOT IN (" + ','.join(['?'] * len(qualList)) + ")", [today, next_week] + qualList) + sql_results = myDB.select("SELECT airdate, airs, episode, name AS 'ep_name', description AS 'ep_plot', network, season, showid AS 'tvdbid', show_name, tv_shows.quality AS quality, tv_shows.status AS 'show_status', tv_shows.paused AS 'paused' FROM tv_episodes, tv_shows WHERE season != 0 AND airdate >= ? AND airdate < ? AND tv_shows.tvdb_id = tv_episodes.showid AND tv_episodes.status NOT IN (" + ','.join(['?'] * len(qualList)) + ")", [today, next_week] + qualList) for cur_result in sql_results: done_show_list.append(int(cur_result["tvdbid"])) - more_sql_results = myDB.select("SELECT airdate, airs, episode, name AS 'ep_name', description AS 'ep_plot', network, season, showid AS 'tvdbid', show_name, tv_shows.quality AS quality, tv_shows.status as show_status, tv_shows.paused as paused FROM tv_episodes outer_eps, tv_shows WHERE season != 0 AND showid NOT IN (" + ','.join(['?'] * len(done_show_list)) + ") AND tv_shows.tvdb_id = outer_eps.showid AND airdate = (SELECT airdate FROM tv_episodes inner_eps WHERE inner_eps.showid = outer_eps.showid AND inner_eps.airdate >= ? ORDER BY inner_eps.airdate ASC LIMIT 1) AND outer_eps.status NOT IN (" + ','.join(['?'] * len(Quality.DOWNLOADED + Quality.SNATCHED)) + ")", done_show_list + [next_week] + Quality.DOWNLOADED + Quality.SNATCHED) + more_sql_results = myDB.select("SELECT airdate, airs, episode, name AS 'ep_name', description AS 'ep_plot', network, season, showid AS 'tvdbid', show_name, tv_shows.quality AS quality, tv_shows.status AS 'show_status', tv_shows.paused AS 'paused' FROM tv_episodes outer_eps, tv_shows WHERE season != 0 AND showid NOT IN (" + ','.join(['?'] * len(done_show_list)) + ") AND tv_shows.tvdb_id = outer_eps.showid AND airdate = (SELECT airdate FROM tv_episodes inner_eps WHERE inner_eps.showid = outer_eps.showid AND inner_eps.airdate >= ? ORDER BY inner_eps.airdate ASC LIMIT 1) AND outer_eps.status NOT IN (" + ','.join(['?'] * len(Quality.DOWNLOADED + Quality.SNATCHED)) + ")", done_show_list + [next_week] + Quality.DOWNLOADED + Quality.SNATCHED) sql_results += more_sql_results - more_sql_results = myDB.select("SELECT airdate, airs, episode, name AS 'ep_name', description AS 'ep_plot', network, season, showid AS 'tvdbid', show_name, tv_shows.quality AS quality, tv_shows.status as show_status, tv_shows.paused as paused FROM tv_episodes, tv_shows WHERE season != 0 AND tv_shows.tvdb_id = tv_episodes.showid AND airdate < ? AND airdate >= ? AND tv_episodes.status = ? AND tv_episodes.status NOT IN (" + ','.join(['?'] * len(qualList)) + ")", [today, recently, WANTED] + qualList) + more_sql_results = myDB.select("SELECT airdate, airs, episode, name AS 'ep_name', description AS 'ep_plot', network, season, showid AS 'tvdbid', show_name, tv_shows.quality AS quality, tv_shows.status AS 'show_status', tv_shows.paused AS 'paused' FROM tv_episodes, tv_shows WHERE season != 0 AND tv_shows.tvdb_id = tv_episodes.showid AND airdate < ? AND airdate >= ? AND tv_episodes.status = ? AND tv_episodes.status NOT IN (" + ','.join(['?'] * len(qualList)) + ")", [today, recently, WANTED] + qualList) sql_results += more_sql_results # sort by air date