diff --git a/psycopg2/__init__.py b/psycopg2/__init__.py new file mode 100644 index 0000000..cf8c06a --- /dev/null +++ b/psycopg2/__init__.py @@ -0,0 +1,168 @@ +"""A Python driver for PostgreSQL + +psycopg is a PostgreSQL_ database adapter for the Python_ programming +language. This is version 2, a complete rewrite of the original code to +provide new-style classes for connection and cursor objects and other sweet +candies. Like the original, psycopg 2 was written with the aim of being very +small and fast, and stable as a rock. + +Homepage: http://initd.org/projects/psycopg2 + +.. _PostgreSQL: http://www.postgresql.org/ +.. _Python: http://www.python.org/ + +:Groups: + * `Connections creation`: connect + * `Value objects constructors`: Binary, Date, DateFromTicks, Time, + TimeFromTicks, Timestamp, TimestampFromTicks +""" +# psycopg/__init__.py - initialization of the psycopg module +# +# Copyright (C) 2003-2010 Federico Di Gregorio +# +# psycopg2 is free software: you can redistribute it and/or modify it +# under the terms of the GNU Lesser General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# In addition, as a special exception, the copyright holders give +# permission to link this program with the OpenSSL library (or with +# modified versions of OpenSSL that use the same license as OpenSSL), +# and distribute linked combinations including the two. +# +# You must obey the GNU Lesser General Public License in all respects for +# all of the code used other than OpenSSL. +# +# psycopg2 is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + +# Import modules needed by _psycopg to allow tools like py2exe to do +# their work without bothering about the module dependencies. + +# Note: the first internal import should be _psycopg, otherwise the real cause +# of a failed loading of the C module may get hidden, see +# http://archives.postgresql.org/psycopg/2011-02/msg00044.php + +# Import the DBAPI-2.0 stuff into top-level module. + +from psycopg2._psycopg import BINARY, NUMBER, STRING, DATETIME, ROWID + +from psycopg2._psycopg import Binary, Date, Time, Timestamp +from psycopg2._psycopg import DateFromTicks, TimeFromTicks, TimestampFromTicks + +from psycopg2._psycopg import Error, Warning, DataError, DatabaseError, ProgrammingError +from psycopg2._psycopg import IntegrityError, InterfaceError, InternalError +from psycopg2._psycopg import NotSupportedError, OperationalError + +from psycopg2._psycopg import _connect, apilevel, threadsafety, paramstyle +from psycopg2._psycopg import __version__ + +from psycopg2 import tz + + +# Register default adapters. + +import psycopg2.extensions as _ext +_ext.register_adapter(tuple, _ext.SQL_IN) +_ext.register_adapter(type(None), _ext.NoneAdapter) + +# Register the Decimal adapter here instead of in the C layer. +# This way a new class is registered for each sub-interpreter. +# See ticket #52 +try: + from decimal import Decimal +except ImportError: + pass +else: + from psycopg2._psycopg import Decimal as Adapter + _ext.register_adapter(Decimal, Adapter) + del Decimal, Adapter + +import re + +def _param_escape(s, + re_escape=re.compile(r"([\\'])"), + re_space=re.compile(r'\s')): + """ + Apply the escaping rule required by PQconnectdb + """ + if not s: return "''" + + s = re_escape.sub(r'\\\1', s) + if re_space.search(s): + s = "'" + s + "'" + + return s + +del re + + +def connect(dsn=None, + database=None, user=None, password=None, host=None, port=None, + connection_factory=None, cursor_factory=None, async=False, **kwargs): + """ + Create a new database connection. + + The connection parameters can be specified either as a string: + + conn = psycopg2.connect("dbname=test user=postgres password=secret") + + or using a set of keyword arguments: + + conn = psycopg2.connect(database="test", user="postgres", password="secret") + + The basic connection parameters are: + + - *dbname*: the database name (only in dsn string) + - *database*: the database name (only as keyword argument) + - *user*: user name used to authenticate + - *password*: password used to authenticate + - *host*: database host address (defaults to UNIX socket if not provided) + - *port*: connection port number (defaults to 5432 if not provided) + + Using the *connection_factory* parameter a different class or connections + factory can be specified. It should be a callable object taking a dsn + argument. + + Using the *cursor_factory* parameter, a new default cursor factory will be + used by cursor(). + + Using *async*=True an asynchronous connection will be created. + + Any other keyword parameter will be passed to the underlying client + library: the list of supported parameters depends on the library version. + + """ + items = [] + if database is not None: + items.append(('dbname', database)) + if user is not None: + items.append(('user', user)) + if password is not None: + items.append(('password', password)) + if host is not None: + items.append(('host', host)) + if port is not None: + items.append(('port', port)) + + items.extend([(k, v) for (k, v) in kwargs.iteritems() if v is not None]) + + if dsn is not None and items: + raise TypeError( + "'%s' is an invalid keyword argument when the dsn is specified" + % items[0][0]) + + if dsn is None: + if not items: + raise TypeError('missing dsn and no parameters') + else: + dsn = " ".join(["%s=%s" % (k, _param_escape(str(v))) + for (k, v) in items]) + + conn = _connect(dsn, connection_factory=connection_factory, async=async) + if cursor_factory is not None: + conn.cursor_factory = cursor_factory + + return conn diff --git a/psycopg2/_json.py b/psycopg2/_json.py new file mode 100644 index 0000000..536dd58 --- /dev/null +++ b/psycopg2/_json.py @@ -0,0 +1,194 @@ +"""Implementation of the JSON adaptation objects + +This module exists to avoid a circular import problem: pyscopg2.extras depends +on psycopg2.extension, so I can't create the default JSON typecasters in +extensions importing register_json from extras. +""" + +# psycopg/_json.py - Implementation of the JSON adaptation objects +# +# Copyright (C) 2012 Daniele Varrazzo +# +# psycopg2 is free software: you can redistribute it and/or modify it +# under the terms of the GNU Lesser General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# In addition, as a special exception, the copyright holders give +# permission to link this program with the OpenSSL library (or with +# modified versions of OpenSSL that use the same license as OpenSSL), +# and distribute linked combinations including the two. +# +# You must obey the GNU Lesser General Public License in all respects for +# all of the code used other than OpenSSL. +# +# psycopg2 is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + +import sys + +from psycopg2._psycopg import ISQLQuote, QuotedString +from psycopg2._psycopg import new_type, new_array_type, register_type + + +# import the best json implementation available +if sys.version_info[:2] >= (2,6): + import json +else: + try: + import simplejson as json + except ImportError: + json = None + + +# oids from PostgreSQL 9.2 +JSON_OID = 114 +JSONARRAY_OID = 199 + +class Json(object): + """ + An `~psycopg2.extensions.ISQLQuote` wrapper to adapt a Python object to + :sql:`json` data type. + + `!Json` can be used to wrap any object supported by the provided *dumps* + function. If none is provided, the standard :py:func:`json.dumps()` is + used (`!simplejson` for Python < 2.6; + `~psycopg2.extensions.ISQLQuote.getquoted()` will raise `!ImportError` if + the module is not available). + + """ + def __init__(self, adapted, dumps=None): + self.adapted = adapted + + if dumps is not None: + self._dumps = dumps + elif json is not None: + self._dumps = json.dumps + else: + self._dumps = None + + def __conform__(self, proto): + if proto is ISQLQuote: + return self + + def dumps(self, obj): + """Serialize *obj* in JSON format. + + The default is to call `!json.dumps()` or the *dumps* function + provided in the constructor. You can override this method to create a + customized JSON wrapper. + """ + dumps = self._dumps + if dumps is not None: + return dumps(obj) + else: + raise ImportError( + "json module not available: " + "you should provide a dumps function") + + def getquoted(self): + s = self.dumps(self.adapted) + return QuotedString(s).getquoted() + + +def register_json(conn_or_curs=None, globally=False, loads=None, + oid=None, array_oid=None): + """Create and register typecasters converting :sql:`json` type to Python objects. + + :param conn_or_curs: a connection or cursor used to find the :sql:`json` + and :sql:`json[]` oids; the typecasters are registered in a scope + limited to this object, unless *globally* is set to `!True`. It can be + `!None` if the oids are provided + :param globally: if `!False` register the typecasters only on + *conn_or_curs*, otherwise register them globally + :param loads: the function used to parse the data into a Python object. If + `!None` use `!json.loads()`, where `!json` is the module chosen + according to the Python version (see above) + :param oid: the OID of the :sql:`json` type if known; If not, it will be + queried on *conn_or_curs* + :param array_oid: the OID of the :sql:`json[]` array type if known; + if not, it will be queried on *conn_or_curs* + + The connection or cursor passed to the function will be used to query the + database and look for the OID of the :sql:`json` type. No query is + performed if *oid* and *array_oid* are provided. Raise + `~psycopg2.ProgrammingError` if the type is not found. + + """ + if oid is None: + oid, array_oid = _get_json_oids(conn_or_curs) + + JSON, JSONARRAY = _create_json_typecasters(oid, array_oid, loads) + + register_type(JSON, not globally and conn_or_curs or None) + + if JSONARRAY is not None: + register_type(JSONARRAY, not globally and conn_or_curs or None) + + return JSON, JSONARRAY + +def register_default_json(conn_or_curs=None, globally=False, loads=None): + """ + Create and register :sql:`json` typecasters for PostgreSQL 9.2 and following. + + Since PostgreSQL 9.2 :sql:`json` is a builtin type, hence its oid is known + and fixed. This function allows specifying a customized *loads* function + for the default :sql:`json` type without querying the database. + All the parameters have the same meaning of `register_json()`. + """ + return register_json(conn_or_curs=conn_or_curs, globally=globally, + loads=loads, oid=JSON_OID, array_oid=JSONARRAY_OID) + +def _create_json_typecasters(oid, array_oid, loads=None): + """Create typecasters for json data type.""" + if loads is None: + if json is None: + raise ImportError("no json module available") + else: + loads = json.loads + + def typecast_json(s, cur): + if s is None: + return None + return loads(s) + + JSON = new_type((oid, ), 'JSON', typecast_json) + if array_oid is not None: + JSONARRAY = new_array_type((array_oid, ), "JSONARRAY", JSON) + else: + JSONARRAY = None + + return JSON, JSONARRAY + +def _get_json_oids(conn_or_curs): + # lazy imports + from psycopg2.extensions import STATUS_IN_TRANSACTION + from psycopg2.extras import _solve_conn_curs + + conn, curs = _solve_conn_curs(conn_or_curs) + + # Store the transaction status of the connection to revert it after use + conn_status = conn.status + + # column typarray not available before PG 8.3 + typarray = conn.server_version >= 80300 and "typarray" or "NULL" + + # get the oid for the hstore + curs.execute( + "SELECT t.oid, %s FROM pg_type t WHERE t.typname = 'json';" + % typarray) + r = curs.fetchone() + + # revert the status of the connection as before the command + if (conn_status != STATUS_IN_TRANSACTION and not conn.autocommit): + conn.rollback() + + if not r: + raise conn.ProgrammingError("json data type not found") + + return r + + + diff --git a/psycopg2/_psycopg.pyd b/psycopg2/_psycopg.pyd new file mode 100644 index 0000000..2adec7c Binary files /dev/null and b/psycopg2/_psycopg.pyd differ diff --git a/psycopg2/_range.py b/psycopg2/_range.py new file mode 100644 index 0000000..0f15990 --- /dev/null +++ b/psycopg2/_range.py @@ -0,0 +1,470 @@ +"""Implementation of the Range type and adaptation + +""" + +# psycopg/_range.py - Implementation of the Range type and adaptation +# +# Copyright (C) 2012 Daniele Varrazzo +# +# psycopg2 is free software: you can redistribute it and/or modify it +# under the terms of the GNU Lesser General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# In addition, as a special exception, the copyright holders give +# permission to link this program with the OpenSSL library (or with +# modified versions of OpenSSL that use the same license as OpenSSL), +# and distribute linked combinations including the two. +# +# You must obey the GNU Lesser General Public License in all respects for +# all of the code used other than OpenSSL. +# +# psycopg2 is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + +import re + +from psycopg2._psycopg import ProgrammingError, InterfaceError +from psycopg2.extensions import ISQLQuote, adapt, register_adapter, b +from psycopg2.extensions import new_type, new_array_type, register_type + +class Range(object): + """Python representation for a PostgreSQL |range|_ type. + + :param lower: lower bound for the range. `!None` means unbound + :param upper: upper bound for the range. `!None` means unbound + :param bounds: one of the literal strings ``()``, ``[)``, ``(]``, ``[]``, + representing whether the lower or upper bounds are included + :param empty: if `!True`, the range is empty + + """ + __slots__ = ('_lower', '_upper', '_bounds') + + def __init__(self, lower=None, upper=None, bounds='[)', empty=False): + if not empty: + if bounds not in ('[)', '(]', '()', '[]'): + raise ValueError("bound flags not valid: %r" % bounds) + + self._lower = lower + self._upper = upper + self._bounds = bounds + else: + self._lower = self._upper = self._bounds = None + + def __repr__(self): + if self._bounds is None: + return "%s(empty=True)" % self.__class__.__name__ + else: + return "%s(%r, %r, %r)" % (self.__class__.__name__, + self._lower, self._upper, self._bounds) + + @property + def lower(self): + """The lower bound of the range. `!None` if empty or unbound.""" + return self._lower + + @property + def upper(self): + """The upper bound of the range. `!None` if empty or unbound.""" + return self._upper + + @property + def isempty(self): + """`!True` if the range is empty.""" + return self._bounds is None + + @property + def lower_inf(self): + """`!True` if the range doesn't have a lower bound.""" + if self._bounds is None: return False + return self._lower is None + + @property + def upper_inf(self): + """`!True` if the range doesn't have an upper bound.""" + if self._bounds is None: return False + return self._upper is None + + @property + def lower_inc(self): + """`!True` if the lower bound is included in the range.""" + if self._bounds is None: return False + if self._lower is None: return False + return self._bounds[0] == '[' + + @property + def upper_inc(self): + """`!True` if the upper bound is included in the range.""" + if self._bounds is None: return False + if self._upper is None: return False + return self._bounds[1] == ']' + + def __contains__(self, x): + if self._bounds is None: return False + if self._lower is not None: + if self._bounds[0] == '[': + if x < self._lower: return False + else: + if x <= self._lower: return False + + if self._upper is not None: + if self._bounds[1] == ']': + if x > self._upper: return False + else: + if x >= self._upper: return False + + return True + + def __nonzero__(self): + return self._bounds is not None + + def __eq__(self, other): + if not isinstance(other, Range): + return False + return (self._lower == other._lower + and self._upper == other._upper + and self._bounds == other._bounds) + + def __ne__(self, other): + return not self.__eq__(other) + + def __hash__(self): + return hash((self._lower, self._upper, self._bounds)) + + def __lt__(self, other): + raise TypeError( + 'Range objects cannot be ordered; please refer to the PostgreSQL' + ' documentation to perform this operation in the database') + + __le__ = __gt__ = __ge__ = __lt__ + + +def register_range(pgrange, pyrange, conn_or_curs, globally=False): + """Create and register an adapter and the typecasters to convert between + a PostgreSQL |range|_ type and a PostgreSQL `Range` subclass. + + :param pgrange: the name of the PostgreSQL |range| type. Can be + schema-qualified + :param pyrange: a `Range` strict subclass, or just a name to give to a new + class + :param conn_or_curs: a connection or cursor used to find the oid of the + range and its subtype; the typecaster is registered in a scope limited + to this object, unless *globally* is set to `!True` + :param globally: if `!False` (default) register the typecaster only on + *conn_or_curs*, otherwise register it globally + :return: `RangeCaster` instance responsible for the conversion + + If a string is passed to *pyrange*, a new `Range` subclass is created + with such name and will be available as the `~RangeCaster.range` attribute + of the returned `RangeCaster` object. + + The function queries the database on *conn_or_curs* to inspect the + *pgrange* type and raises `~psycopg2.ProgrammingError` if the type is not + found. If querying the database is not advisable, use directly the + `RangeCaster` class and register the adapter and typecasters using the + provided functions. + + """ + caster = RangeCaster._from_db(pgrange, pyrange, conn_or_curs) + caster._register(not globally and conn_or_curs or None) + return caster + + +class RangeAdapter(object): + """`ISQLQuote` adapter for `Range` subclasses. + + This is an abstract class: concrete classes must set a `name` class + attribute or override `getquoted()`. + """ + name = None + + def __init__(self, adapted): + self.adapted = adapted + + def __conform__(self, proto): + if self._proto is ISQLQuote: + return self + + def prepare(self, conn): + self._conn = conn + + def getquoted(self): + if self.name is None: + raise NotImplementedError( + 'RangeAdapter must be subclassed overriding its name ' + 'or the getquoted() method') + + r = self.adapted + if r.isempty: + return b("'empty'::" + self.name) + + if r.lower is not None: + a = adapt(r.lower) + if hasattr(a, 'prepare'): + a.prepare(self._conn) + lower = a.getquoted() + else: + lower = b('NULL') + + if r.upper is not None: + a = adapt(r.upper) + if hasattr(a, 'prepare'): + a.prepare(self._conn) + upper = a.getquoted() + else: + upper = b('NULL') + + return b(self.name + '(') + lower + b(', ') + upper \ + + b(", '%s')" % r._bounds) + + +class RangeCaster(object): + """Helper class to convert between `Range` and PostgreSQL range types. + + Objects of this class are usually created by `register_range()`. Manual + creation could be useful if querying the database is not advisable: in + this case the oids must be provided. + """ + def __init__(self, pgrange, pyrange, oid, subtype_oid, array_oid=None): + self.subtype_oid = subtype_oid + self._create_ranges(pgrange, pyrange) + + name = self.adapter.name or self.adapter.__class__.__name__ + + self.typecaster = new_type((oid,), name, self.parse) + + if array_oid is not None: + self.array_typecaster = new_array_type( + (array_oid,), name + "ARRAY", self.typecaster) + else: + self.array_typecaster = None + + def _create_ranges(self, pgrange, pyrange): + """Create Range and RangeAdapter classes if needed.""" + # if got a string create a new RangeAdapter concrete type (with a name) + # else take it as an adapter. Passing an adapter should be considered + # an implementation detail and is not documented. It is currently used + # for the numeric ranges. + self.adapter = None + if isinstance(pgrange, basestring): + self.adapter = type(pgrange, (RangeAdapter,), {}) + self.adapter.name = pgrange + else: + try: + if issubclass(pgrange, RangeAdapter) and pgrange is not RangeAdapter: + self.adapter = pgrange + except TypeError: + pass + + if self.adapter is None: + raise TypeError( + 'pgrange must be a string or a RangeAdapter strict subclass') + + self.range = None + try: + if isinstance(pyrange, basestring): + self.range = type(pyrange, (Range,), {}) + if issubclass(pyrange, Range) and pyrange is not Range: + self.range = pyrange + except TypeError: + pass + + if self.range is None: + raise TypeError( + 'pyrange must be a type or a Range strict subclass') + + @classmethod + def _from_db(self, name, pyrange, conn_or_curs): + """Return a `RangeCaster` instance for the type *pgrange*. + + Raise `ProgrammingError` if the type is not found. + """ + from psycopg2.extensions import STATUS_IN_TRANSACTION + from psycopg2.extras import _solve_conn_curs + conn, curs = _solve_conn_curs(conn_or_curs) + + if conn.server_version < 90200: + raise ProgrammingError("range types not available in version %s" + % conn.server_version) + + # Store the transaction status of the connection to revert it after use + conn_status = conn.status + + # Use the correct schema + if '.' in name: + schema, tname = name.split('.', 1) + else: + tname = name + schema = 'public' + + # get the type oid and attributes + try: + curs.execute("""\ +select rngtypid, rngsubtype, + (select typarray from pg_type where oid = rngtypid) +from pg_range r +join pg_type t on t.oid = rngtypid +join pg_namespace ns on ns.oid = typnamespace +where typname = %s and ns.nspname = %s; +""", (tname, schema)) + + except ProgrammingError: + if not conn.autocommit: + conn.rollback() + raise + else: + rec = curs.fetchone() + + # revert the status of the connection as before the command + if (conn_status != STATUS_IN_TRANSACTION + and not conn.autocommit): + conn.rollback() + + if not rec: + raise ProgrammingError( + "PostgreSQL type '%s' not found" % name) + + type, subtype, array = rec + + return RangeCaster(name, pyrange, + oid=type, subtype_oid=subtype, array_oid=array) + + _re_range = re.compile(r""" + ( \(|\[ ) # lower bound flag + (?: # lower bound: + " ( (?: [^"] | "")* ) " # - a quoted string + | ( [^",]+ ) # - or an unquoted string + )? # - or empty (not catched) + , + (?: # upper bound: + " ( (?: [^"] | "")* ) " # - a quoted string + | ( [^"\)\]]+ ) # - or an unquoted string + )? # - or empty (not catched) + ( \)|\] ) # upper bound flag + """, re.VERBOSE) + + _re_undouble = re.compile(r'(["\\])\1') + + def parse(self, s, cur=None): + if s is None: + return None + + if s == 'empty': + return self.range(empty=True) + + m = self._re_range.match(s) + if m is None: + raise InterfaceError("failed to parse range: %s") + + lower = m.group(3) + if lower is None: + lower = m.group(2) + if lower is not None: + lower = self._re_undouble.sub(r"\1", lower) + + upper = m.group(5) + if upper is None: + upper = m.group(4) + if upper is not None: + upper = self._re_undouble.sub(r"\1", upper) + + if cur is not None: + lower = cur.cast(self.subtype_oid, lower) + upper = cur.cast(self.subtype_oid, upper) + + bounds = m.group(1) + m.group(6) + + return self.range(lower, upper, bounds) + + def _register(self, scope=None): + register_type(self.typecaster, scope) + if self.array_typecaster is not None: + register_type(self.array_typecaster, scope) + + register_adapter(self.range, self.adapter) + + +class NumericRange(Range): + """A `Range` suitable to pass Python numeric types to a PostgreSQL range. + + PostgreSQL types :sql:`int4range`, :sql:`int8range`, :sql:`numrange` are + casted into `!NumericRange` instances. + """ + pass + +class DateRange(Range): + """Represents :sql:`daterange` values.""" + pass + +class DateTimeRange(Range): + """Represents :sql:`tsrange` values.""" + pass + +class DateTimeTZRange(Range): + """Represents :sql:`tstzrange` values.""" + pass + + +# Special adaptation for NumericRange. Allows to pass number range regardless +# of whether they are ints, floats and what size of ints are, which are +# pointless in Python world. On the way back, no numeric range is casted to +# NumericRange, but only to their subclasses + +class NumberRangeAdapter(RangeAdapter): + """Adapt a range if the subtype doesn't need quotes.""" + def getquoted(self): + r = self.adapted + if r.isempty: + return b("'empty'") + + if not r.lower_inf: + # not exactly: we are relying that none of these object is really + # quoted (they are numbers). Also, I'm lazy and not preparing the + # adapter because I assume encoding doesn't matter for these + # objects. + lower = adapt(r.lower).getquoted().decode('ascii') + else: + lower = '' + + if not r.upper_inf: + upper = adapt(r.upper).getquoted().decode('ascii') + else: + upper = '' + + return ("'%s%s,%s%s'" % ( + r._bounds[0], lower, upper, r._bounds[1])).encode('ascii') + +# TODO: probably won't work with infs, nans and other tricky cases. +register_adapter(NumericRange, NumberRangeAdapter) + + +# Register globally typecasters and adapters for builtin range types. + +# note: the adapter is registered more than once, but this is harmless. +int4range_caster = RangeCaster(NumberRangeAdapter, NumericRange, + oid=3904, subtype_oid=23, array_oid=3905) +int4range_caster._register() + +int8range_caster = RangeCaster(NumberRangeAdapter, NumericRange, + oid=3926, subtype_oid=20, array_oid=3927) +int8range_caster._register() + +numrange_caster = RangeCaster(NumberRangeAdapter, NumericRange, + oid=3906, subtype_oid=1700, array_oid=3907) +numrange_caster._register() + +daterange_caster = RangeCaster('daterange', DateRange, + oid=3912, subtype_oid=1082, array_oid=3913) +daterange_caster._register() + +tsrange_caster = RangeCaster('tsrange', DateTimeRange, + oid=3908, subtype_oid=1114, array_oid=3909) +tsrange_caster._register() + +tstzrange_caster = RangeCaster('tstzrange', DateTimeTZRange, + oid=3910, subtype_oid=1184, array_oid=3911) +tstzrange_caster._register() + + diff --git a/psycopg2/errorcodes.py b/psycopg2/errorcodes.py new file mode 100644 index 0000000..12c300f --- /dev/null +++ b/psycopg2/errorcodes.py @@ -0,0 +1,409 @@ +"""Error codes for PostgresSQL + +This module contains symbolic names for all PostgreSQL error codes. +""" +# psycopg2/errorcodes.py - PostgreSQL error codes +# +# Copyright (C) 2006-2010 Johan Dahlin +# +# psycopg2 is free software: you can redistribute it and/or modify it +# under the terms of the GNU Lesser General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# In addition, as a special exception, the copyright holders give +# permission to link this program with the OpenSSL library (or with +# modified versions of OpenSSL that use the same license as OpenSSL), +# and distribute linked combinations including the two. +# +# You must obey the GNU Lesser General Public License in all respects for +# all of the code used other than OpenSSL. +# +# psycopg2 is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. +# +# Based on: +# +# http://www.postgresql.org/docs/current/static/errcodes-appendix.html +# + +def lookup(code, _cache={}): + """Lookup an error code or class code and return its symbolic name. + + Raise `KeyError` if the code is not found. + """ + if _cache: + return _cache[code] + + # Generate the lookup map at first usage. + for k, v in globals().iteritems(): + if isinstance(v, str) and len(v) in (2, 5): + _cache[v] = k + + return lookup(code) + + +# autogenerated data: do not edit below this point. + +# Error classes +CLASS_SUCCESSFUL_COMPLETION = '00' +CLASS_WARNING = '01' +CLASS_NO_DATA = '02' +CLASS_SQL_STATEMENT_NOT_YET_COMPLETE = '03' +CLASS_CONNECTION_EXCEPTION = '08' +CLASS_TRIGGERED_ACTION_EXCEPTION = '09' +CLASS_FEATURE_NOT_SUPPORTED = '0A' +CLASS_INVALID_TRANSACTION_INITIATION = '0B' +CLASS_LOCATOR_EXCEPTION = '0F' +CLASS_INVALID_GRANTOR = '0L' +CLASS_INVALID_ROLE_SPECIFICATION = '0P' +CLASS_DIAGNOSTICS_EXCEPTION = '0Z' +CLASS_CASE_NOT_FOUND = '20' +CLASS_CARDINALITY_VIOLATION = '21' +CLASS_DATA_EXCEPTION = '22' +CLASS_INTEGRITY_CONSTRAINT_VIOLATION = '23' +CLASS_INVALID_CURSOR_STATE = '24' +CLASS_INVALID_TRANSACTION_STATE = '25' +CLASS_INVALID_SQL_STATEMENT_NAME = '26' +CLASS_TRIGGERED_DATA_CHANGE_VIOLATION = '27' +CLASS_INVALID_AUTHORIZATION_SPECIFICATION = '28' +CLASS_DEPENDENT_PRIVILEGE_DESCRIPTORS_STILL_EXIST = '2B' +CLASS_INVALID_TRANSACTION_TERMINATION = '2D' +CLASS_SQL_ROUTINE_EXCEPTION = '2F' +CLASS_INVALID_CURSOR_NAME = '34' +CLASS_EXTERNAL_ROUTINE_EXCEPTION = '38' +CLASS_EXTERNAL_ROUTINE_INVOCATION_EXCEPTION = '39' +CLASS_SAVEPOINT_EXCEPTION = '3B' +CLASS_INVALID_CATALOG_NAME = '3D' +CLASS_INVALID_SCHEMA_NAME = '3F' +CLASS_TRANSACTION_ROLLBACK = '40' +CLASS_SYNTAX_ERROR_OR_ACCESS_RULE_VIOLATION = '42' +CLASS_WITH_CHECK_OPTION_VIOLATION = '44' +CLASS_INSUFFICIENT_RESOURCES = '53' +CLASS_PROGRAM_LIMIT_EXCEEDED = '54' +CLASS_OBJECT_NOT_IN_PREREQUISITE_STATE = '55' +CLASS_OPERATOR_INTERVENTION = '57' +CLASS_SYSTEM_ERROR = '58' +CLASS_CONFIGURATION_FILE_ERROR = 'F0' +CLASS_FOREIGN_DATA_WRAPPER_ERROR = 'HV' +CLASS_PL_PGSQL_ERROR = 'P0' +CLASS_INTERNAL_ERROR = 'XX' + +# Class 00 - Successful Completion +SUCCESSFUL_COMPLETION = '00000' + +# Class 01 - Warning +WARNING = '01000' +NULL_VALUE_ELIMINATED_IN_SET_FUNCTION = '01003' +STRING_DATA_RIGHT_TRUNCATION = '01004' +PRIVILEGE_NOT_REVOKED = '01006' +PRIVILEGE_NOT_GRANTED = '01007' +IMPLICIT_ZERO_BIT_PADDING = '01008' +DYNAMIC_RESULT_SETS_RETURNED = '0100C' +DEPRECATED_FEATURE = '01P01' + +# Class 02 - No Data (this is also a warning class per the SQL standard) +NO_DATA = '02000' +NO_ADDITIONAL_DYNAMIC_RESULT_SETS_RETURNED = '02001' + +# Class 03 - SQL Statement Not Yet Complete +SQL_STATEMENT_NOT_YET_COMPLETE = '03000' + +# Class 08 - Connection Exception +CONNECTION_EXCEPTION = '08000' +SQLCLIENT_UNABLE_TO_ESTABLISH_SQLCONNECTION = '08001' +CONNECTION_DOES_NOT_EXIST = '08003' +SQLSERVER_REJECTED_ESTABLISHMENT_OF_SQLCONNECTION = '08004' +CONNECTION_FAILURE = '08006' +TRANSACTION_RESOLUTION_UNKNOWN = '08007' +PROTOCOL_VIOLATION = '08P01' + +# Class 09 - Triggered Action Exception +TRIGGERED_ACTION_EXCEPTION = '09000' + +# Class 0A - Feature Not Supported +FEATURE_NOT_SUPPORTED = '0A000' + +# Class 0B - Invalid Transaction Initiation +INVALID_TRANSACTION_INITIATION = '0B000' + +# Class 0F - Locator Exception +LOCATOR_EXCEPTION = '0F000' +INVALID_LOCATOR_SPECIFICATION = '0F001' + +# Class 0L - Invalid Grantor +INVALID_GRANTOR = '0L000' +INVALID_GRANT_OPERATION = '0LP01' + +# Class 0P - Invalid Role Specification +INVALID_ROLE_SPECIFICATION = '0P000' + +# Class 0Z - Diagnostics Exception +DIAGNOSTICS_EXCEPTION = '0Z000' +STACKED_DIAGNOSTICS_ACCESSED_WITHOUT_ACTIVE_HANDLER = '0Z002' + +# Class 20 - Case Not Found +CASE_NOT_FOUND = '20000' + +# Class 21 - Cardinality Violation +CARDINALITY_VIOLATION = '21000' + +# Class 22 - Data Exception +DATA_EXCEPTION = '22000' +STRING_DATA_RIGHT_TRUNCATION = '22001' +NULL_VALUE_NO_INDICATOR_PARAMETER = '22002' +NUMERIC_VALUE_OUT_OF_RANGE = '22003' +NULL_VALUE_NOT_ALLOWED = '22004' +ERROR_IN_ASSIGNMENT = '22005' +INVALID_DATETIME_FORMAT = '22007' +DATETIME_FIELD_OVERFLOW = '22008' +INVALID_TIME_ZONE_DISPLACEMENT_VALUE = '22009' +ESCAPE_CHARACTER_CONFLICT = '2200B' +INVALID_USE_OF_ESCAPE_CHARACTER = '2200C' +INVALID_ESCAPE_OCTET = '2200D' +ZERO_LENGTH_CHARACTER_STRING = '2200F' +MOST_SPECIFIC_TYPE_MISMATCH = '2200G' +NOT_AN_XML_DOCUMENT = '2200L' +INVALID_XML_DOCUMENT = '2200M' +INVALID_XML_CONTENT = '2200N' +INVALID_XML_COMMENT = '2200S' +INVALID_XML_PROCESSING_INSTRUCTION = '2200T' +INVALID_INDICATOR_PARAMETER_VALUE = '22010' +SUBSTRING_ERROR = '22011' +DIVISION_BY_ZERO = '22012' +INVALID_ARGUMENT_FOR_NTILE_FUNCTION = '22014' +INTERVAL_FIELD_OVERFLOW = '22015' +INVALID_ARGUMENT_FOR_NTH_VALUE_FUNCTION = '22016' +INVALID_CHARACTER_VALUE_FOR_CAST = '22018' +INVALID_ESCAPE_CHARACTER = '22019' +INVALID_REGULAR_EXPRESSION = '2201B' +INVALID_ARGUMENT_FOR_LOGARITHM = '2201E' +INVALID_ARGUMENT_FOR_POWER_FUNCTION = '2201F' +INVALID_ARGUMENT_FOR_WIDTH_BUCKET_FUNCTION = '2201G' +INVALID_ROW_COUNT_IN_LIMIT_CLAUSE = '2201W' +INVALID_ROW_COUNT_IN_RESULT_OFFSET_CLAUSE = '2201X' +INVALID_LIMIT_VALUE = '22020' +CHARACTER_NOT_IN_REPERTOIRE = '22021' +INDICATOR_OVERFLOW = '22022' +INVALID_PARAMETER_VALUE = '22023' +UNTERMINATED_C_STRING = '22024' +INVALID_ESCAPE_SEQUENCE = '22025' +STRING_DATA_LENGTH_MISMATCH = '22026' +TRIM_ERROR = '22027' +ARRAY_SUBSCRIPT_ERROR = '2202E' +FLOATING_POINT_EXCEPTION = '22P01' +INVALID_TEXT_REPRESENTATION = '22P02' +INVALID_BINARY_REPRESENTATION = '22P03' +BAD_COPY_FILE_FORMAT = '22P04' +UNTRANSLATABLE_CHARACTER = '22P05' +NONSTANDARD_USE_OF_ESCAPE_CHARACTER = '22P06' + +# Class 23 - Integrity Constraint Violation +INTEGRITY_CONSTRAINT_VIOLATION = '23000' +RESTRICT_VIOLATION = '23001' +NOT_NULL_VIOLATION = '23502' +FOREIGN_KEY_VIOLATION = '23503' +UNIQUE_VIOLATION = '23505' +CHECK_VIOLATION = '23514' +EXCLUSION_VIOLATION = '23P01' + +# Class 24 - Invalid Cursor State +INVALID_CURSOR_STATE = '24000' + +# Class 25 - Invalid Transaction State +INVALID_TRANSACTION_STATE = '25000' +ACTIVE_SQL_TRANSACTION = '25001' +BRANCH_TRANSACTION_ALREADY_ACTIVE = '25002' +INAPPROPRIATE_ACCESS_MODE_FOR_BRANCH_TRANSACTION = '25003' +INAPPROPRIATE_ISOLATION_LEVEL_FOR_BRANCH_TRANSACTION = '25004' +NO_ACTIVE_SQL_TRANSACTION_FOR_BRANCH_TRANSACTION = '25005' +READ_ONLY_SQL_TRANSACTION = '25006' +SCHEMA_AND_DATA_STATEMENT_MIXING_NOT_SUPPORTED = '25007' +HELD_CURSOR_REQUIRES_SAME_ISOLATION_LEVEL = '25008' +NO_ACTIVE_SQL_TRANSACTION = '25P01' +IN_FAILED_SQL_TRANSACTION = '25P02' + +# Class 26 - Invalid SQL Statement Name +INVALID_SQL_STATEMENT_NAME = '26000' + +# Class 27 - Triggered Data Change Violation +TRIGGERED_DATA_CHANGE_VIOLATION = '27000' + +# Class 28 - Invalid Authorization Specification +INVALID_AUTHORIZATION_SPECIFICATION = '28000' +INVALID_PASSWORD = '28P01' + +# Class 2B - Dependent Privilege Descriptors Still Exist +DEPENDENT_PRIVILEGE_DESCRIPTORS_STILL_EXIST = '2B000' +DEPENDENT_OBJECTS_STILL_EXIST = '2BP01' + +# Class 2D - Invalid Transaction Termination +INVALID_TRANSACTION_TERMINATION = '2D000' + +# Class 2F - SQL Routine Exception +SQL_ROUTINE_EXCEPTION = '2F000' +MODIFYING_SQL_DATA_NOT_PERMITTED = '2F002' +PROHIBITED_SQL_STATEMENT_ATTEMPTED = '2F003' +READING_SQL_DATA_NOT_PERMITTED = '2F004' +FUNCTION_EXECUTED_NO_RETURN_STATEMENT = '2F005' + +# Class 34 - Invalid Cursor Name +INVALID_CURSOR_NAME = '34000' + +# Class 38 - External Routine Exception +EXTERNAL_ROUTINE_EXCEPTION = '38000' +CONTAINING_SQL_NOT_PERMITTED = '38001' +MODIFYING_SQL_DATA_NOT_PERMITTED = '38002' +PROHIBITED_SQL_STATEMENT_ATTEMPTED = '38003' +READING_SQL_DATA_NOT_PERMITTED = '38004' + +# Class 39 - External Routine Invocation Exception +EXTERNAL_ROUTINE_INVOCATION_EXCEPTION = '39000' +INVALID_SQLSTATE_RETURNED = '39001' +NULL_VALUE_NOT_ALLOWED = '39004' +TRIGGER_PROTOCOL_VIOLATED = '39P01' +SRF_PROTOCOL_VIOLATED = '39P02' + +# Class 3B - Savepoint Exception +SAVEPOINT_EXCEPTION = '3B000' +INVALID_SAVEPOINT_SPECIFICATION = '3B001' + +# Class 3D - Invalid Catalog Name +INVALID_CATALOG_NAME = '3D000' + +# Class 3F - Invalid Schema Name +INVALID_SCHEMA_NAME = '3F000' + +# Class 40 - Transaction Rollback +TRANSACTION_ROLLBACK = '40000' +SERIALIZATION_FAILURE = '40001' +TRANSACTION_INTEGRITY_CONSTRAINT_VIOLATION = '40002' +STATEMENT_COMPLETION_UNKNOWN = '40003' +DEADLOCK_DETECTED = '40P01' + +# Class 42 - Syntax Error or Access Rule Violation +SYNTAX_ERROR_OR_ACCESS_RULE_VIOLATION = '42000' +INSUFFICIENT_PRIVILEGE = '42501' +SYNTAX_ERROR = '42601' +INVALID_NAME = '42602' +INVALID_COLUMN_DEFINITION = '42611' +NAME_TOO_LONG = '42622' +DUPLICATE_COLUMN = '42701' +AMBIGUOUS_COLUMN = '42702' +UNDEFINED_COLUMN = '42703' +UNDEFINED_OBJECT = '42704' +DUPLICATE_OBJECT = '42710' +DUPLICATE_ALIAS = '42712' +DUPLICATE_FUNCTION = '42723' +AMBIGUOUS_FUNCTION = '42725' +GROUPING_ERROR = '42803' +DATATYPE_MISMATCH = '42804' +WRONG_OBJECT_TYPE = '42809' +INVALID_FOREIGN_KEY = '42830' +CANNOT_COERCE = '42846' +UNDEFINED_FUNCTION = '42883' +RESERVED_NAME = '42939' +UNDEFINED_TABLE = '42P01' +UNDEFINED_PARAMETER = '42P02' +DUPLICATE_CURSOR = '42P03' +DUPLICATE_DATABASE = '42P04' +DUPLICATE_PREPARED_STATEMENT = '42P05' +DUPLICATE_SCHEMA = '42P06' +DUPLICATE_TABLE = '42P07' +AMBIGUOUS_PARAMETER = '42P08' +AMBIGUOUS_ALIAS = '42P09' +INVALID_COLUMN_REFERENCE = '42P10' +INVALID_CURSOR_DEFINITION = '42P11' +INVALID_DATABASE_DEFINITION = '42P12' +INVALID_FUNCTION_DEFINITION = '42P13' +INVALID_PREPARED_STATEMENT_DEFINITION = '42P14' +INVALID_SCHEMA_DEFINITION = '42P15' +INVALID_TABLE_DEFINITION = '42P16' +INVALID_OBJECT_DEFINITION = '42P17' +INDETERMINATE_DATATYPE = '42P18' +INVALID_RECURSION = '42P19' +WINDOWING_ERROR = '42P20' +COLLATION_MISMATCH = '42P21' +INDETERMINATE_COLLATION = '42P22' + +# Class 44 - WITH CHECK OPTION Violation +WITH_CHECK_OPTION_VIOLATION = '44000' + +# Class 53 - Insufficient Resources +INSUFFICIENT_RESOURCES = '53000' +DISK_FULL = '53100' +OUT_OF_MEMORY = '53200' +TOO_MANY_CONNECTIONS = '53300' +CONFIGURATION_LIMIT_EXCEEDED = '53400' + +# Class 54 - Program Limit Exceeded +PROGRAM_LIMIT_EXCEEDED = '54000' +STATEMENT_TOO_COMPLEX = '54001' +TOO_MANY_COLUMNS = '54011' +TOO_MANY_ARGUMENTS = '54023' + +# Class 55 - Object Not In Prerequisite State +OBJECT_NOT_IN_PREREQUISITE_STATE = '55000' +OBJECT_IN_USE = '55006' +CANT_CHANGE_RUNTIME_PARAM = '55P02' +LOCK_NOT_AVAILABLE = '55P03' + +# Class 57 - Operator Intervention +OPERATOR_INTERVENTION = '57000' +QUERY_CANCELED = '57014' +ADMIN_SHUTDOWN = '57P01' +CRASH_SHUTDOWN = '57P02' +CANNOT_CONNECT_NOW = '57P03' +DATABASE_DROPPED = '57P04' + +# Class 58 - System Error (errors external to PostgreSQL itself) +SYSTEM_ERROR = '58000' +IO_ERROR = '58030' +UNDEFINED_FILE = '58P01' +DUPLICATE_FILE = '58P02' + +# Class F0 - Configuration File Error +CONFIG_FILE_ERROR = 'F0000' +LOCK_FILE_EXISTS = 'F0001' + +# Class HV - Foreign Data Wrapper Error (SQL/MED) +FDW_ERROR = 'HV000' +FDW_OUT_OF_MEMORY = 'HV001' +FDW_DYNAMIC_PARAMETER_VALUE_NEEDED = 'HV002' +FDW_INVALID_DATA_TYPE = 'HV004' +FDW_COLUMN_NAME_NOT_FOUND = 'HV005' +FDW_INVALID_DATA_TYPE_DESCRIPTORS = 'HV006' +FDW_INVALID_COLUMN_NAME = 'HV007' +FDW_INVALID_COLUMN_NUMBER = 'HV008' +FDW_INVALID_USE_OF_NULL_POINTER = 'HV009' +FDW_INVALID_STRING_FORMAT = 'HV00A' +FDW_INVALID_HANDLE = 'HV00B' +FDW_INVALID_OPTION_INDEX = 'HV00C' +FDW_INVALID_OPTION_NAME = 'HV00D' +FDW_OPTION_NAME_NOT_FOUND = 'HV00J' +FDW_REPLY_HANDLE = 'HV00K' +FDW_UNABLE_TO_CREATE_EXECUTION = 'HV00L' +FDW_UNABLE_TO_CREATE_REPLY = 'HV00M' +FDW_UNABLE_TO_ESTABLISH_CONNECTION = 'HV00N' +FDW_NO_SCHEMAS = 'HV00P' +FDW_SCHEMA_NOT_FOUND = 'HV00Q' +FDW_TABLE_NOT_FOUND = 'HV00R' +FDW_FUNCTION_SEQUENCE_ERROR = 'HV010' +FDW_TOO_MANY_HANDLES = 'HV014' +FDW_INCONSISTENT_DESCRIPTOR_INFORMATION = 'HV021' +FDW_INVALID_ATTRIBUTE_VALUE = 'HV024' +FDW_INVALID_STRING_LENGTH_OR_BUFFER_LENGTH = 'HV090' +FDW_INVALID_DESCRIPTOR_FIELD_IDENTIFIER = 'HV091' + +# Class P0 - PL/pgSQL Error +PLPGSQL_ERROR = 'P0000' +RAISE_EXCEPTION = 'P0001' +NO_DATA_FOUND = 'P0002' +TOO_MANY_ROWS = 'P0003' + +# Class XX - Internal Error +INTERNAL_ERROR = 'XX000' +DATA_CORRUPTED = 'XX001' +INDEX_CORRUPTED = 'XX002' diff --git a/psycopg2/extensions.py b/psycopg2/extensions.py new file mode 100644 index 0000000..f210da4 --- /dev/null +++ b/psycopg2/extensions.py @@ -0,0 +1,177 @@ +"""psycopg extensions to the DBAPI-2.0 + +This module holds all the extensions to the DBAPI-2.0 provided by psycopg. + +- `connection` -- the new-type inheritable connection class +- `cursor` -- the new-type inheritable cursor class +- `lobject` -- the new-type inheritable large object class +- `adapt()` -- exposes the PEP-246_ compatible adapting mechanism used + by psycopg to adapt Python types to PostgreSQL ones + +.. _PEP-246: http://www.python.org/peps/pep-0246.html +""" +# psycopg/extensions.py - DBAPI-2.0 extensions specific to psycopg +# +# Copyright (C) 2003-2010 Federico Di Gregorio +# +# psycopg2 is free software: you can redistribute it and/or modify it +# under the terms of the GNU Lesser General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# In addition, as a special exception, the copyright holders give +# permission to link this program with the OpenSSL library (or with +# modified versions of OpenSSL that use the same license as OpenSSL), +# and distribute linked combinations including the two. +# +# You must obey the GNU Lesser General Public License in all respects for +# all of the code used other than OpenSSL. +# +# psycopg2 is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + +from psycopg2._psycopg import UNICODE, INTEGER, LONGINTEGER, BOOLEAN, FLOAT +from psycopg2._psycopg import TIME, DATE, INTERVAL, DECIMAL +from psycopg2._psycopg import BINARYARRAY, BOOLEANARRAY, DATEARRAY, DATETIMEARRAY +from psycopg2._psycopg import DECIMALARRAY, FLOATARRAY, INTEGERARRAY, INTERVALARRAY +from psycopg2._psycopg import LONGINTEGERARRAY, ROWIDARRAY, STRINGARRAY, TIMEARRAY +from psycopg2._psycopg import UNICODEARRAY + +from psycopg2._psycopg import Binary, Boolean, Int, Float, QuotedString, AsIs +try: + from psycopg2._psycopg import MXDATE, MXDATETIME, MXINTERVAL, MXTIME + from psycopg2._psycopg import MXDATEARRAY, MXDATETIMEARRAY, MXINTERVALARRAY, MXTIMEARRAY + from psycopg2._psycopg import DateFromMx, TimeFromMx, TimestampFromMx + from psycopg2._psycopg import IntervalFromMx +except ImportError: + pass + +try: + from psycopg2._psycopg import PYDATE, PYDATETIME, PYINTERVAL, PYTIME + from psycopg2._psycopg import PYDATEARRAY, PYDATETIMEARRAY, PYINTERVALARRAY, PYTIMEARRAY + from psycopg2._psycopg import DateFromPy, TimeFromPy, TimestampFromPy + from psycopg2._psycopg import IntervalFromPy +except ImportError: + pass + +from psycopg2._psycopg import adapt, adapters, encodings, connection, cursor, lobject, Xid +from psycopg2._psycopg import string_types, binary_types, new_type, new_array_type, register_type +from psycopg2._psycopg import ISQLQuote, Notify, Diagnostics + +from psycopg2._psycopg import QueryCanceledError, TransactionRollbackError + +try: + from psycopg2._psycopg import set_wait_callback, get_wait_callback +except ImportError: + pass + +"""Isolation level values.""" +ISOLATION_LEVEL_AUTOCOMMIT = 0 +ISOLATION_LEVEL_READ_UNCOMMITTED = 4 +ISOLATION_LEVEL_READ_COMMITTED = 1 +ISOLATION_LEVEL_REPEATABLE_READ = 2 +ISOLATION_LEVEL_SERIALIZABLE = 3 + +"""psycopg connection status values.""" +STATUS_SETUP = 0 +STATUS_READY = 1 +STATUS_BEGIN = 2 +STATUS_SYNC = 3 # currently unused +STATUS_ASYNC = 4 # currently unused +STATUS_PREPARED = 5 + +# This is a useful mnemonic to check if the connection is in a transaction +STATUS_IN_TRANSACTION = STATUS_BEGIN + +"""psycopg asynchronous connection polling values""" +POLL_OK = 0 +POLL_READ = 1 +POLL_WRITE = 2 +POLL_ERROR = 3 + +"""Backend transaction status values.""" +TRANSACTION_STATUS_IDLE = 0 +TRANSACTION_STATUS_ACTIVE = 1 +TRANSACTION_STATUS_INTRANS = 2 +TRANSACTION_STATUS_INERROR = 3 +TRANSACTION_STATUS_UNKNOWN = 4 + +import sys as _sys + +# Return bytes from a string +if _sys.version_info[0] < 3: + def b(s): + return s +else: + def b(s): + return s.encode('utf8') + +def register_adapter(typ, callable): + """Register 'callable' as an ISQLQuote adapter for type 'typ'.""" + adapters[(typ, ISQLQuote)] = callable + + +# The SQL_IN class is the official adapter for tuples starting from 2.0.6. +class SQL_IN(object): + """Adapt any iterable to an SQL quotable object.""" + def __init__(self, seq): + self._seq = seq + self._conn = None + + def prepare(self, conn): + self._conn = conn + + def getquoted(self): + # this is the important line: note how every object in the + # list is adapted and then how getquoted() is called on it + pobjs = [adapt(o) for o in self._seq] + if self._conn is not None: + for obj in pobjs: + if hasattr(obj, 'prepare'): + obj.prepare(self._conn) + qobjs = [o.getquoted() for o in pobjs] + return b('(') + b(', ').join(qobjs) + b(')') + + def __str__(self): + return str(self.getquoted()) + + +class NoneAdapter(object): + """Adapt None to NULL. + + This adapter is not used normally as a fast path in mogrify uses NULL, + but it makes easier to adapt composite types. + """ + def __init__(self, obj): + pass + + def getquoted(self, _null=b("NULL")): + return _null + + +# Create default json typecasters for PostgreSQL 9.2 oids +from psycopg2._json import register_default_json + +try: + JSON, JSONARRAY = register_default_json() +except ImportError: + pass + +del register_default_json + + +# Create default Range typecasters +from psycopg2. _range import Range +del Range + +# Add the "cleaned" version of the encodings to the key. +# When the encoding is set its name is cleaned up from - and _ and turned +# uppercase, so an encoding not respecting these rules wouldn't be found in the +# encodings keys and would raise an exception with the unicode typecaster +for k, v in encodings.items(): + k = k.replace('_', '').replace('-', '').upper() + encodings[k] = v + +del k, v diff --git a/psycopg2/extras.py b/psycopg2/extras.py new file mode 100644 index 0000000..b21e223 --- /dev/null +++ b/psycopg2/extras.py @@ -0,0 +1,974 @@ +"""Miscellaneous goodies for psycopg2 + +This module is a generic place used to hold little helper functions +and classes until a better place in the distribution is found. +""" +# psycopg/extras.py - miscellaneous extra goodies for psycopg +# +# Copyright (C) 2003-2010 Federico Di Gregorio +# +# psycopg2 is free software: you can redistribute it and/or modify it +# under the terms of the GNU Lesser General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# In addition, as a special exception, the copyright holders give +# permission to link this program with the OpenSSL library (or with +# modified versions of OpenSSL that use the same license as OpenSSL), +# and distribute linked combinations including the two. +# +# You must obey the GNU Lesser General Public License in all respects for +# all of the code used other than OpenSSL. +# +# psycopg2 is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + +import os as _os +import sys as _sys +import time as _time +import re as _re + +try: + import logging as _logging +except: + _logging = None + +import psycopg2 +from psycopg2 import extensions as _ext +from psycopg2.extensions import cursor as _cursor +from psycopg2.extensions import connection as _connection +from psycopg2.extensions import adapt as _A +from psycopg2.extensions import b + + +class DictCursorBase(_cursor): + """Base class for all dict-like cursors.""" + + def __init__(self, *args, **kwargs): + if 'row_factory' in kwargs: + row_factory = kwargs['row_factory'] + del kwargs['row_factory'] + else: + raise NotImplementedError( + "DictCursorBase can't be instantiated without a row factory.") + super(DictCursorBase, self).__init__(*args, **kwargs) + self._query_executed = 0 + self._prefetch = 0 + self.row_factory = row_factory + + def fetchone(self): + if self._prefetch: + res = super(DictCursorBase, self).fetchone() + if self._query_executed: + self._build_index() + if not self._prefetch: + res = super(DictCursorBase, self).fetchone() + return res + + def fetchmany(self, size=None): + if self._prefetch: + res = super(DictCursorBase, self).fetchmany(size) + if self._query_executed: + self._build_index() + if not self._prefetch: + res = super(DictCursorBase, self).fetchmany(size) + return res + + def fetchall(self): + if self._prefetch: + res = super(DictCursorBase, self).fetchall() + if self._query_executed: + self._build_index() + if not self._prefetch: + res = super(DictCursorBase, self).fetchall() + return res + + def __iter__(self): + if self._prefetch: + res = super(DictCursorBase, self).__iter__() + first = res.next() + if self._query_executed: + self._build_index() + if not self._prefetch: + res = super(DictCursorBase, self).__iter__() + first = res.next() + + yield first + while 1: + yield res.next() + + +class DictConnection(_connection): + """A connection that uses `DictCursor` automatically.""" + def cursor(self, *args, **kwargs): + kwargs.setdefault('cursor_factory', DictCursor) + return super(DictConnection, self).cursor(*args, **kwargs) + +class DictCursor(DictCursorBase): + """A cursor that keeps a list of column name -> index mappings.""" + + def __init__(self, *args, **kwargs): + kwargs['row_factory'] = DictRow + super(DictCursor, self).__init__(*args, **kwargs) + self._prefetch = 1 + + def execute(self, query, vars=None): + self.index = {} + self._query_executed = 1 + return super(DictCursor, self).execute(query, vars) + + def callproc(self, procname, vars=None): + self.index = {} + self._query_executed = 1 + return super(DictCursor, self).callproc(procname, vars) + + def _build_index(self): + if self._query_executed == 1 and self.description: + for i in range(len(self.description)): + self.index[self.description[i][0]] = i + self._query_executed = 0 + +class DictRow(list): + """A row object that allow by-column-name access to data.""" + + __slots__ = ('_index',) + + def __init__(self, cursor): + self._index = cursor.index + self[:] = [None] * len(cursor.description) + + def __getitem__(self, x): + if not isinstance(x, (int, slice)): + x = self._index[x] + return list.__getitem__(self, x) + + def __setitem__(self, x, v): + if not isinstance(x, (int, slice)): + x = self._index[x] + list.__setitem__(self, x, v) + + def items(self): + return list(self.iteritems()) + + def keys(self): + return self._index.keys() + + def values(self): + return tuple(self[:]) + + def has_key(self, x): + return x in self._index + + def get(self, x, default=None): + try: + return self[x] + except: + return default + + def iteritems(self): + for n, v in self._index.iteritems(): + yield n, list.__getitem__(self, v) + + def iterkeys(self): + return self._index.iterkeys() + + def itervalues(self): + return list.__iter__(self) + + def copy(self): + return dict(self.iteritems()) + + def __contains__(self, x): + return x in self._index + + def __getstate__(self): + return self[:], self._index.copy() + + def __setstate__(self, data): + self[:] = data[0] + self._index = data[1] + + # drop the crusty Py2 methods + if _sys.version_info[0] > 2: + items = iteritems; del iteritems + keys = iterkeys; del iterkeys + values = itervalues; del itervalues + del has_key + + +class RealDictConnection(_connection): + """A connection that uses `RealDictCursor` automatically.""" + def cursor(self, *args, **kwargs): + kwargs.setdefault('cursor_factory', RealDictCursor) + return super(RealDictConnection, self).cursor(*args, **kwargs) + +class RealDictCursor(DictCursorBase): + """A cursor that uses a real dict as the base type for rows. + + Note that this cursor is extremely specialized and does not allow + the normal access (using integer indices) to fetched data. If you need + to access database rows both as a dictionary and a list, then use + the generic `DictCursor` instead of `!RealDictCursor`. + """ + def __init__(self, *args, **kwargs): + kwargs['row_factory'] = RealDictRow + super(RealDictCursor, self).__init__(*args, **kwargs) + self._prefetch = 0 + + def execute(self, query, vars=None): + self.column_mapping = [] + self._query_executed = 1 + return super(RealDictCursor, self).execute(query, vars) + + def callproc(self, procname, vars=None): + self.column_mapping = [] + self._query_executed = 1 + return super(RealDictCursor, self).callproc(procname, vars) + + def _build_index(self): + if self._query_executed == 1 and self.description: + for i in range(len(self.description)): + self.column_mapping.append(self.description[i][0]) + self._query_executed = 0 + +class RealDictRow(dict): + """A `!dict` subclass representing a data record.""" + + __slots__ = ('_column_mapping') + + def __init__(self, cursor): + dict.__init__(self) + # Required for named cursors + if cursor.description and not cursor.column_mapping: + cursor._build_index() + + self._column_mapping = cursor.column_mapping + + def __setitem__(self, name, value): + if type(name) == int: + name = self._column_mapping[name] + return dict.__setitem__(self, name, value) + + def __getstate__(self): + return (self.copy(), self._column_mapping[:]) + + def __setstate__(self, data): + self.update(data[0]) + self._column_mapping = data[1] + + +class NamedTupleConnection(_connection): + """A connection that uses `NamedTupleCursor` automatically.""" + def cursor(self, *args, **kwargs): + kwargs.setdefault('cursor_factory', NamedTupleCursor) + return super(NamedTupleConnection, self).cursor(*args, **kwargs) + +class NamedTupleCursor(_cursor): + """A cursor that generates results as `~collections.namedtuple`. + + `!fetch*()` methods will return named tuples instead of regular tuples, so + their elements can be accessed both as regular numeric items as well as + attributes. + + >>> nt_cur = conn.cursor(cursor_factory=psycopg2.extras.NamedTupleCursor) + >>> rec = nt_cur.fetchone() + >>> rec + Record(id=1, num=100, data="abc'def") + >>> rec[1] + 100 + >>> rec.data + "abc'def" + """ + Record = None + + def execute(self, query, vars=None): + self.Record = None + return super(NamedTupleCursor, self).execute(query, vars) + + def executemany(self, query, vars): + self.Record = None + return super(NamedTupleCursor, self).executemany(query, vars) + + def callproc(self, procname, vars=None): + self.Record = None + return super(NamedTupleCursor, self).callproc(procname, vars) + + def fetchone(self): + t = super(NamedTupleCursor, self).fetchone() + if t is not None: + nt = self.Record + if nt is None: + nt = self.Record = self._make_nt() + return nt._make(t) + + def fetchmany(self, size=None): + ts = super(NamedTupleCursor, self).fetchmany(size) + nt = self.Record + if nt is None: + nt = self.Record = self._make_nt() + return map(nt._make, ts) + + def fetchall(self): + ts = super(NamedTupleCursor, self).fetchall() + nt = self.Record + if nt is None: + nt = self.Record = self._make_nt() + return map(nt._make, ts) + + def __iter__(self): + it = super(NamedTupleCursor, self).__iter__() + t = it.next() + + nt = self.Record + if nt is None: + nt = self.Record = self._make_nt() + + yield nt._make(t) + + while 1: + yield nt._make(it.next()) + + try: + from collections import namedtuple + except ImportError, _exc: + def _make_nt(self): + raise self._exc + else: + def _make_nt(self, namedtuple=namedtuple): + return namedtuple("Record", [d[0] for d in self.description or ()]) + + +class LoggingConnection(_connection): + """A connection that logs all queries to a file or logger__ object. + + .. __: http://docs.python.org/library/logging.html + """ + + def initialize(self, logobj): + """Initialize the connection to log to `!logobj`. + + The `!logobj` parameter can be an open file object or a Logger + instance from the standard logging module. + """ + self._logobj = logobj + if _logging and isinstance(logobj, _logging.Logger): + self.log = self._logtologger + else: + self.log = self._logtofile + + def filter(self, msg, curs): + """Filter the query before logging it. + + This is the method to overwrite to filter unwanted queries out of the + log or to add some extra data to the output. The default implementation + just does nothing. + """ + return msg + + def _logtofile(self, msg, curs): + msg = self.filter(msg, curs) + if msg: self._logobj.write(msg + _os.linesep) + + def _logtologger(self, msg, curs): + msg = self.filter(msg, curs) + if msg: self._logobj.debug(msg) + + def _check(self): + if not hasattr(self, '_logobj'): + raise self.ProgrammingError( + "LoggingConnection object has not been initialize()d") + + def cursor(self, *args, **kwargs): + self._check() + kwargs.setdefault('cursor_factory', LoggingCursor) + return super(LoggingConnection, self).cursor(*args, **kwargs) + +class LoggingCursor(_cursor): + """A cursor that logs queries using its connection logging facilities.""" + + def execute(self, query, vars=None): + try: + return super(LoggingCursor, self).execute(query, vars) + finally: + self.connection.log(self.query, self) + + def callproc(self, procname, vars=None): + try: + return super(LoggingCursor, self).callproc(procname, vars) + finally: + self.connection.log(self.query, self) + + +class MinTimeLoggingConnection(LoggingConnection): + """A connection that logs queries based on execution time. + + This is just an example of how to sub-class `LoggingConnection` to + provide some extra filtering for the logged queries. Both the + `initialize()` and `filter()` methods are overwritten to make sure + that only queries executing for more than ``mintime`` ms are logged. + + Note that this connection uses the specialized cursor + `MinTimeLoggingCursor`. + """ + def initialize(self, logobj, mintime=0): + LoggingConnection.initialize(self, logobj) + self._mintime = mintime + + def filter(self, msg, curs): + t = (_time.time() - curs.timestamp) * 1000 + if t > self._mintime: + return msg + _os.linesep + " (execution time: %d ms)" % t + + def cursor(self, *args, **kwargs): + kwargs.setdefault('cursor_factory', MinTimeLoggingCursor) + return LoggingConnection.cursor(self, *args, **kwargs) + +class MinTimeLoggingCursor(LoggingCursor): + """The cursor sub-class companion to `MinTimeLoggingConnection`.""" + + def execute(self, query, vars=None): + self.timestamp = _time.time() + return LoggingCursor.execute(self, query, vars) + + def callproc(self, procname, vars=None): + self.timestamp = _time.time() + return LoggingCursor.execute(self, procname, vars) + + +# a dbtype and adapter for Python UUID type + +class UUID_adapter(object): + """Adapt Python's uuid.UUID__ type to PostgreSQL's uuid__. + + .. __: http://docs.python.org/library/uuid.html + .. __: http://www.postgresql.org/docs/current/static/datatype-uuid.html + """ + + def __init__(self, uuid): + self._uuid = uuid + + def __conform__(self, proto): + if proto is _ext.ISQLQuote: + return self + + def getquoted(self): + return b("'%s'::uuid" % self._uuid) + + def __str__(self): + return "'%s'::uuid" % self._uuid + +def register_uuid(oids=None, conn_or_curs=None): + """Create the UUID type and an uuid.UUID adapter. + + :param oids: oid for the PostgreSQL :sql:`uuid` type, or 2-items sequence + with oids of the type and the array. If not specified, use PostgreSQL + standard oids. + :param conn_or_curs: where to register the typecaster. If not specified, + register it globally. + """ + + import uuid + + if not oids: + oid1 = 2950 + oid2 = 2951 + elif isinstance(oids, (list, tuple)): + oid1, oid2 = oids + else: + oid1 = oids + oid2 = 2951 + + _ext.UUID = _ext.new_type((oid1, ), "UUID", + lambda data, cursor: data and uuid.UUID(data) or None) + _ext.UUIDARRAY = _ext.new_array_type((oid2,), "UUID[]", _ext.UUID) + + _ext.register_type(_ext.UUID, conn_or_curs) + _ext.register_type(_ext.UUIDARRAY, conn_or_curs) + _ext.register_adapter(uuid.UUID, UUID_adapter) + + return _ext.UUID + + +# a type, dbtype and adapter for PostgreSQL inet type + +class Inet(object): + """Wrap a string to allow for correct SQL-quoting of inet values. + + Note that this adapter does NOT check the passed value to make + sure it really is an inet-compatible address but DOES call adapt() + on it to make sure it is impossible to execute an SQL-injection + by passing an evil value to the initializer. + """ + def __init__(self, addr): + self.addr = addr + + def __repr__(self): + return "%s(%r)" % (self.__class__.__name__, self.addr) + + def prepare(self, conn): + self._conn = conn + + def getquoted(self): + obj = _A(self.addr) + if hasattr(obj, 'prepare'): + obj.prepare(self._conn) + return obj.getquoted() + b("::inet") + + def __conform__(self, proto): + if proto is _ext.ISQLQuote: + return self + + def __str__(self): + return str(self.addr) + +def register_inet(oid=None, conn_or_curs=None): + """Create the INET type and an Inet adapter. + + :param oid: oid for the PostgreSQL :sql:`inet` type, or 2-items sequence + with oids of the type and the array. If not specified, use PostgreSQL + standard oids. + :param conn_or_curs: where to register the typecaster. If not specified, + register it globally. + """ + if not oid: + oid1 = 869 + oid2 = 1041 + elif isinstance(oid, (list, tuple)): + oid1, oid2 = oid + else: + oid1 = oid + oid2 = 1041 + + _ext.INET = _ext.new_type((oid1, ), "INET", + lambda data, cursor: data and Inet(data) or None) + _ext.INETARRAY = _ext.new_array_type((oid2, ), "INETARRAY", _ext.INET) + + _ext.register_type(_ext.INET, conn_or_curs) + _ext.register_type(_ext.INETARRAY, conn_or_curs) + + return _ext.INET + + +def register_tstz_w_secs(oids=None, conn_or_curs=None): + """The function used to register an alternate type caster for + :sql:`TIMESTAMP WITH TIME ZONE` to deal with historical time zones with + seconds in the UTC offset. + + These are now correctly handled by the default type caster, so currently + the function doesn't do anything. + """ + import warnings + warnings.warn("deprecated", DeprecationWarning) + + +def wait_select(conn): + """Wait until a connection or cursor has data available. + + The function is an example of a wait callback to be registered with + `~psycopg2.extensions.set_wait_callback()`. This function uses + :py:func:`~select.select()` to wait for data available. + + """ + import select + from psycopg2.extensions import POLL_OK, POLL_READ, POLL_WRITE + + while 1: + state = conn.poll() + if state == POLL_OK: + break + elif state == POLL_READ: + select.select([conn.fileno()], [], []) + elif state == POLL_WRITE: + select.select([], [conn.fileno()], []) + else: + raise conn.OperationalError("bad state from poll: %s" % state) + + +def _solve_conn_curs(conn_or_curs): + """Return the connection and a DBAPI cursor from a connection or cursor.""" + if conn_or_curs is None: + raise psycopg2.ProgrammingError("no connection or cursor provided") + + if hasattr(conn_or_curs, 'execute'): + conn = conn_or_curs.connection + curs = conn.cursor(cursor_factory=_cursor) + else: + conn = conn_or_curs + curs = conn.cursor(cursor_factory=_cursor) + + return conn, curs + + +class HstoreAdapter(object): + """Adapt a Python dict to the hstore syntax.""" + def __init__(self, wrapped): + self.wrapped = wrapped + + def prepare(self, conn): + self.conn = conn + + # use an old-style getquoted implementation if required + if conn.server_version < 90000: + self.getquoted = self._getquoted_8 + + def _getquoted_8(self): + """Use the operators available in PG pre-9.0.""" + if not self.wrapped: + return b("''::hstore") + + adapt = _ext.adapt + rv = [] + for k, v in self.wrapped.iteritems(): + k = adapt(k) + k.prepare(self.conn) + k = k.getquoted() + + if v is not None: + v = adapt(v) + v.prepare(self.conn) + v = v.getquoted() + else: + v = b('NULL') + + # XXX this b'ing is painfully inefficient! + rv.append(b("(") + k + b(" => ") + v + b(")")) + + return b("(") + b('||').join(rv) + b(")") + + def _getquoted_9(self): + """Use the hstore(text[], text[]) function.""" + if not self.wrapped: + return b("''::hstore") + + k = _ext.adapt(self.wrapped.keys()) + k.prepare(self.conn) + v = _ext.adapt(self.wrapped.values()) + v.prepare(self.conn) + return b("hstore(") + k.getquoted() + b(", ") + v.getquoted() + b(")") + + getquoted = _getquoted_9 + + _re_hstore = _re.compile(r""" + # hstore key: + # a string of normal or escaped chars + "((?: [^"\\] | \\. )*)" + \s*=>\s* # hstore value + (?: + NULL # the value can be null - not catched + # or a quoted string like the key + | "((?: [^"\\] | \\. )*)" + ) + (?:\s*,\s*|$) # pairs separated by comma or end of string. + """, _re.VERBOSE) + + @classmethod + def parse(self, s, cur, _bsdec=_re.compile(r"\\(.)")): + """Parse an hstore representation in a Python string. + + The hstore is represented as something like:: + + "a"=>"1", "b"=>"2" + + with backslash-escaped strings. + """ + if s is None: + return None + + rv = {} + start = 0 + for m in self._re_hstore.finditer(s): + if m is None or m.start() != start: + raise psycopg2.InterfaceError( + "error parsing hstore pair at char %d" % start) + k = _bsdec.sub(r'\1', m.group(1)) + v = m.group(2) + if v is not None: + v = _bsdec.sub(r'\1', v) + + rv[k] = v + start = m.end() + + if start < len(s): + raise psycopg2.InterfaceError( + "error parsing hstore: unparsed data after char %d" % start) + + return rv + + @classmethod + def parse_unicode(self, s, cur): + """Parse an hstore returning unicode keys and values.""" + if s is None: + return None + + s = s.decode(_ext.encodings[cur.connection.encoding]) + return self.parse(s, cur) + + @classmethod + def get_oids(self, conn_or_curs): + """Return the lists of OID of the hstore and hstore[] types. + """ + conn, curs = _solve_conn_curs(conn_or_curs) + + # Store the transaction status of the connection to revert it after use + conn_status = conn.status + + # column typarray not available before PG 8.3 + typarray = conn.server_version >= 80300 and "typarray" or "NULL" + + rv0, rv1 = [], [] + + # get the oid for the hstore + curs.execute("""\ +SELECT t.oid, %s +FROM pg_type t JOIN pg_namespace ns + ON typnamespace = ns.oid +WHERE typname = 'hstore'; +""" % typarray) + for oids in curs: + rv0.append(oids[0]) + rv1.append(oids[1]) + + # revert the status of the connection as before the command + if (conn_status != _ext.STATUS_IN_TRANSACTION + and not conn.autocommit): + conn.rollback() + + return tuple(rv0), tuple(rv1) + +def register_hstore(conn_or_curs, globally=False, unicode=False, + oid=None, array_oid=None): + """Register adapter and typecaster for `!dict`\-\ |hstore| conversions. + + :param conn_or_curs: a connection or cursor: the typecaster will be + registered only on this object unless *globally* is set to `!True` + :param globally: register the adapter globally, not only on *conn_or_curs* + :param unicode: if `!True`, keys and values returned from the database + will be `!unicode` instead of `!str`. The option is not available on + Python 3 + :param oid: the OID of the |hstore| type if known. If not, it will be + queried on *conn_or_curs*. + :param array_oid: the OID of the |hstore| array type if known. If not, it + will be queried on *conn_or_curs*. + + The connection or cursor passed to the function will be used to query the + database and look for the OID of the |hstore| type (which may be different + across databases). If querying is not desirable (e.g. with + :ref:`asynchronous connections `) you may specify it in the + *oid* parameter, which can be found using a query such as :sql:`SELECT + 'hstore'::regtype::oid`. Analogously you can obtain a value for *array_oid* + using a query such as :sql:`SELECT 'hstore[]'::regtype::oid`. + + Note that, when passing a dictionary from Python to the database, both + strings and unicode keys and values are supported. Dictionaries returned + from the database have keys/values according to the *unicode* parameter. + + The |hstore| contrib module must be already installed in the database + (executing the ``hstore.sql`` script in your ``contrib`` directory). + Raise `~psycopg2.ProgrammingError` if the type is not found. + """ + if oid is None: + oid = HstoreAdapter.get_oids(conn_or_curs) + if oid is None or not oid[0]: + raise psycopg2.ProgrammingError( + "hstore type not found in the database. " + "please install it from your 'contrib/hstore.sql' file") + else: + array_oid = oid[1] + oid = oid[0] + + if isinstance(oid, int): + oid = (oid,) + + if array_oid is not None: + if isinstance(array_oid, int): + array_oid = (array_oid,) + else: + array_oid = tuple([x for x in array_oid if x]) + + # create and register the typecaster + if _sys.version_info[0] < 3 and unicode: + cast = HstoreAdapter.parse_unicode + else: + cast = HstoreAdapter.parse + + HSTORE = _ext.new_type(oid, "HSTORE", cast) + _ext.register_type(HSTORE, not globally and conn_or_curs or None) + _ext.register_adapter(dict, HstoreAdapter) + + if array_oid: + HSTOREARRAY = _ext.new_array_type(array_oid, "HSTOREARRAY", HSTORE) + _ext.register_type(HSTOREARRAY, not globally and conn_or_curs or None) + + +class CompositeCaster(object): + """Helps conversion of a PostgreSQL composite type into a Python object. + + The class is usually created by the `register_composite()` function. + You may want to create and register manually instances of the class if + querying the database at registration time is not desirable (such as when + using an :ref:`asynchronous connections `). + + """ + def __init__(self, name, oid, attrs, array_oid=None, schema=None): + self.name = name + self.schema = schema + self.oid = oid + self.array_oid = array_oid + + self.attnames = [ a[0] for a in attrs ] + self.atttypes = [ a[1] for a in attrs ] + self._create_type(name, self.attnames) + self.typecaster = _ext.new_type((oid,), name, self.parse) + if array_oid: + self.array_typecaster = _ext.new_array_type( + (array_oid,), "%sARRAY" % name, self.typecaster) + else: + self.array_typecaster = None + + def parse(self, s, curs): + if s is None: + return None + + tokens = self.tokenize(s) + if len(tokens) != len(self.atttypes): + raise psycopg2.DataError( + "expecting %d components for the type %s, %d found instead" % + (len(self.atttypes), self.name, len(tokens))) + + values = [ curs.cast(oid, token) + for oid, token in zip(self.atttypes, tokens) ] + + return self.make(values) + + def make(self, values): + """Return a new Python object representing the data being casted. + + *values* is the list of attributes, already casted into their Python + representation. + + You can subclass this method to :ref:`customize the composite cast + `. + """ + + return self._ctor(values) + + _re_tokenize = _re.compile(r""" + \(? ([,)]) # an empty token, representing NULL +| \(? " ((?: [^"] | "")*) " [,)] # or a quoted string +| \(? ([^",)]+) [,)] # or an unquoted string + """, _re.VERBOSE) + + _re_undouble = _re.compile(r'(["\\])\1') + + @classmethod + def tokenize(self, s): + rv = [] + for m in self._re_tokenize.finditer(s): + if m is None: + raise psycopg2.InterfaceError("can't parse type: %r" % s) + if m.group(1) is not None: + rv.append(None) + elif m.group(2) is not None: + rv.append(self._re_undouble.sub(r"\1", m.group(2))) + else: + rv.append(m.group(3)) + + return rv + + def _create_type(self, name, attnames): + try: + from collections import namedtuple + except ImportError: + self.type = tuple + self._ctor = self.type + else: + self.type = namedtuple(name, attnames) + self._ctor = self.type._make + + @classmethod + def _from_db(self, name, conn_or_curs): + """Return a `CompositeCaster` instance for the type *name*. + + Raise `ProgrammingError` if the type is not found. + """ + conn, curs = _solve_conn_curs(conn_or_curs) + + # Store the transaction status of the connection to revert it after use + conn_status = conn.status + + # Use the correct schema + if '.' in name: + schema, tname = name.split('.', 1) + else: + tname = name + schema = 'public' + + # column typarray not available before PG 8.3 + typarray = conn.server_version >= 80300 and "typarray" or "NULL" + + # get the type oid and attributes + curs.execute("""\ +SELECT t.oid, %s, attname, atttypid +FROM pg_type t +JOIN pg_namespace ns ON typnamespace = ns.oid +JOIN pg_attribute a ON attrelid = typrelid +WHERE typname = %%s AND nspname = %%s + AND attnum > 0 AND NOT attisdropped +ORDER BY attnum; +""" % typarray, (tname, schema)) + + recs = curs.fetchall() + + # revert the status of the connection as before the command + if (conn_status != _ext.STATUS_IN_TRANSACTION + and not conn.autocommit): + conn.rollback() + + if not recs: + raise psycopg2.ProgrammingError( + "PostgreSQL type '%s' not found" % name) + + type_oid = recs[0][0] + array_oid = recs[0][1] + type_attrs = [ (r[2], r[3]) for r in recs ] + + return self(tname, type_oid, type_attrs, + array_oid=array_oid, schema=schema) + +def register_composite(name, conn_or_curs, globally=False, factory=None): + """Register a typecaster to convert a composite type into a tuple. + + :param name: the name of a PostgreSQL composite type, e.g. created using + the |CREATE TYPE|_ command + :param conn_or_curs: a connection or cursor used to find the type oid and + components; the typecaster is registered in a scope limited to this + object, unless *globally* is set to `!True` + :param globally: if `!False` (default) register the typecaster only on + *conn_or_curs*, otherwise register it globally + :param factory: if specified it should be a `CompositeCaster` subclass: use + it to :ref:`customize how to cast composite types ` + :return: the registered `CompositeCaster` or *factory* instance + responsible for the conversion + """ + if factory is None: + factory = CompositeCaster + + caster = factory._from_db(name, conn_or_curs) + _ext.register_type(caster.typecaster, not globally and conn_or_curs or None) + + if caster.array_typecaster is not None: + _ext.register_type(caster.array_typecaster, not globally and conn_or_curs or None) + + return caster + + +# expose the json adaptation stuff into the module +from psycopg2._json import json, Json, register_json, register_default_json + + +# Expose range-related objects +from psycopg2._range import Range, NumericRange +from psycopg2._range import DateRange, DateTimeRange, DateTimeTZRange +from psycopg2._range import register_range, RangeAdapter, RangeCaster diff --git a/psycopg2/pool.py b/psycopg2/pool.py new file mode 100644 index 0000000..3b41c80 --- /dev/null +++ b/psycopg2/pool.py @@ -0,0 +1,235 @@ +"""Connection pooling for psycopg2 + +This module implements thread-safe (and not) connection pools. +""" +# psycopg/pool.py - pooling code for psycopg +# +# Copyright (C) 2003-2010 Federico Di Gregorio +# +# psycopg2 is free software: you can redistribute it and/or modify it +# under the terms of the GNU Lesser General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# In addition, as a special exception, the copyright holders give +# permission to link this program with the OpenSSL library (or with +# modified versions of OpenSSL that use the same license as OpenSSL), +# and distribute linked combinations including the two. +# +# You must obey the GNU Lesser General Public License in all respects for +# all of the code used other than OpenSSL. +# +# psycopg2 is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + +import psycopg2 +import psycopg2.extensions as _ext + + +class PoolError(psycopg2.Error): + pass + + +class AbstractConnectionPool(object): + """Generic key-based pooling code.""" + + def __init__(self, minconn, maxconn, *args, **kwargs): + """Initialize the connection pool. + + New 'minconn' connections are created immediately calling 'connfunc' + with given parameters. The connection pool will support a maximum of + about 'maxconn' connections. + """ + self.minconn = minconn + self.maxconn = maxconn + self.closed = False + + self._args = args + self._kwargs = kwargs + + self._pool = [] + self._used = {} + self._rused = {} # id(conn) -> key map + self._keys = 0 + + for i in range(self.minconn): + self._connect() + + def _connect(self, key=None): + """Create a new connection and assign it to 'key' if not None.""" + conn = psycopg2.connect(*self._args, **self._kwargs) + if key is not None: + self._used[key] = conn + self._rused[id(conn)] = key + else: + self._pool.append(conn) + return conn + + def _getkey(self): + """Return a new unique key.""" + self._keys += 1 + return self._keys + + def _getconn(self, key=None): + """Get a free connection and assign it to 'key' if not None.""" + if self.closed: raise PoolError("connection pool is closed") + if key is None: key = self._getkey() + + if key in self._used: + return self._used[key] + + if self._pool: + self._used[key] = conn = self._pool.pop() + self._rused[id(conn)] = key + return conn + else: + if len(self._used) == self.maxconn: + raise PoolError("connection pool exausted") + return self._connect(key) + + def _putconn(self, conn, key=None, close=False): + """Put away a connection.""" + if self.closed: raise PoolError("connection pool is closed") + if key is None: key = self._rused.get(id(conn)) + + if not key: + raise PoolError("trying to put unkeyed connection") + + if len(self._pool) < self.minconn and not close: + # Return the connection into a consistent state before putting + # it back into the pool + if not conn.closed: + status = conn.get_transaction_status() + if status == _ext.TRANSACTION_STATUS_UNKNOWN: + # server connection lost + conn.close() + elif status != _ext.TRANSACTION_STATUS_IDLE: + # connection in error or in transaction + conn.rollback() + self._pool.append(conn) + else: + # regular idle connection + self._pool.append(conn) + # If the connection is closed, we just discard it. + else: + conn.close() + + # here we check for the presence of key because it can happen that a + # thread tries to put back a connection after a call to close + if not self.closed or key in self._used: + del self._used[key] + del self._rused[id(conn)] + + def _closeall(self): + """Close all connections. + + Note that this can lead to some code fail badly when trying to use + an already closed connection. If you call .closeall() make sure + your code can deal with it. + """ + if self.closed: raise PoolError("connection pool is closed") + for conn in self._pool + list(self._used.values()): + try: + conn.close() + except: + pass + self.closed = True + + +class SimpleConnectionPool(AbstractConnectionPool): + """A connection pool that can't be shared across different threads.""" + + getconn = AbstractConnectionPool._getconn + putconn = AbstractConnectionPool._putconn + closeall = AbstractConnectionPool._closeall + + +class ThreadedConnectionPool(AbstractConnectionPool): + """A connection pool that works with the threading module.""" + + def __init__(self, minconn, maxconn, *args, **kwargs): + """Initialize the threading lock.""" + import threading + AbstractConnectionPool.__init__( + self, minconn, maxconn, *args, **kwargs) + self._lock = threading.Lock() + + def getconn(self, key=None): + """Get a free connection and assign it to 'key' if not None.""" + self._lock.acquire() + try: + return self._getconn(key) + finally: + self._lock.release() + + def putconn(self, conn=None, key=None, close=False): + """Put away an unused connection.""" + self._lock.acquire() + try: + self._putconn(conn, key, close) + finally: + self._lock.release() + + def closeall(self): + """Close all connections (even the one currently in use.)""" + self._lock.acquire() + try: + self._closeall() + finally: + self._lock.release() + + +class PersistentConnectionPool(AbstractConnectionPool): + """A pool that assigns persistent connections to different threads. + + Note that this connection pool generates by itself the required keys + using the current thread id. This means that until a thread puts away + a connection it will always get the same connection object by successive + `!getconn()` calls. This also means that a thread can't use more than one + single connection from the pool. + """ + + def __init__(self, minconn, maxconn, *args, **kwargs): + """Initialize the threading lock.""" + import warnings + warnings.warn("deprecated: use ZPsycopgDA.pool implementation", + DeprecationWarning) + + import threading + AbstractConnectionPool.__init__( + self, minconn, maxconn, *args, **kwargs) + self._lock = threading.Lock() + + # we we'll need the thread module, to determine thread ids, so we + # import it here and copy it in an instance variable + import thread + self.__thread = thread + + def getconn(self): + """Generate thread id and return a connection.""" + key = self.__thread.get_ident() + self._lock.acquire() + try: + return self._getconn(key) + finally: + self._lock.release() + + def putconn(self, conn=None, close=False): + """Put away an unused connection.""" + key = self.__thread.get_ident() + self._lock.acquire() + try: + if not conn: conn = self._used[key] + self._putconn(conn, key, close) + finally: + self._lock.release() + + def closeall(self): + """Close all connections (even the one currently in use.)""" + self._lock.acquire() + try: + self._closeall() + finally: + self._lock.release() diff --git a/psycopg2/psycopg1.py b/psycopg2/psycopg1.py new file mode 100644 index 0000000..7a24c5f --- /dev/null +++ b/psycopg2/psycopg1.py @@ -0,0 +1,95 @@ +"""psycopg 1.1.x compatibility module + +This module uses the new style connection and cursor types to build a psycopg +1.1.1.x compatibility layer. It should be considered a temporary hack to run +old code while porting to psycopg 2. Import it as follows:: + + from psycopg2 import psycopg1 as psycopg +""" +# psycopg/psycopg1.py - psycopg 1.1.x compatibility module +# +# Copyright (C) 2003-2010 Federico Di Gregorio +# +# psycopg2 is free software: you can redistribute it and/or modify it +# under the terms of the GNU Lesser General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# In addition, as a special exception, the copyright holders give +# permission to link this program with the OpenSSL library (or with +# modified versions of OpenSSL that use the same license as OpenSSL), +# and distribute linked combinations including the two. +# +# You must obey the GNU Lesser General Public License in all respects for +# all of the code used other than OpenSSL. +# +# psycopg2 is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + +import _psycopg as _2psycopg +from psycopg2.extensions import cursor as _2cursor +from psycopg2.extensions import connection as _2connection + +from psycopg2 import * +import psycopg2.extensions as _ext +_2connect = connect + +def connect(*args, **kwargs): + """connect(dsn, ...) -> new psycopg 1.1.x compatible connection object""" + kwargs['connection_factory'] = connection + conn = _2connect(*args, **kwargs) + conn.set_isolation_level(_ext.ISOLATION_LEVEL_READ_COMMITTED) + return conn + +class connection(_2connection): + """psycopg 1.1.x connection.""" + + def cursor(self): + """cursor() -> new psycopg 1.1.x compatible cursor object""" + return _2connection.cursor(self, cursor_factory=cursor) + + def autocommit(self, on_off=1): + """autocommit(on_off=1) -> switch autocommit on (1) or off (0)""" + if on_off > 0: + self.set_isolation_level(_ext.ISOLATION_LEVEL_AUTOCOMMIT) + else: + self.set_isolation_level(_ext.ISOLATION_LEVEL_READ_COMMITTED) + + +class cursor(_2cursor): + """psycopg 1.1.x cursor. + + Note that this cursor implements the exact procedure used by psycopg 1 to + build dictionaries out of result rows. The DictCursor in the + psycopg.extras modules implements a much better and faster algorithm. + """ + + def __build_dict(self, row): + res = {} + for i in range(len(self.description)): + res[self.description[i][0]] = row[i] + return res + + def dictfetchone(self): + row = _2cursor.fetchone(self) + if row: + return self.__build_dict(row) + else: + return row + + def dictfetchmany(self, size): + res = [] + rows = _2cursor.fetchmany(self, size) + for row in rows: + res.append(self.__build_dict(row)) + return res + + def dictfetchall(self): + res = [] + rows = _2cursor.fetchall(self) + for row in rows: + res.append(self.__build_dict(row)) + return res + diff --git a/psycopg2/tests/__init__.py b/psycopg2/tests/__init__.py new file mode 100644 index 0000000..3e677d8 --- /dev/null +++ b/psycopg2/tests/__init__.py @@ -0,0 +1,89 @@ +#!/usr/bin/env python + +# psycopg2 test suite +# +# Copyright (C) 2007-2011 Federico Di Gregorio +# +# psycopg2 is free software: you can redistribute it and/or modify it +# under the terms of the GNU Lesser General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# In addition, as a special exception, the copyright holders give +# permission to link this program with the OpenSSL library (or with +# modified versions of OpenSSL that use the same license as OpenSSL), +# and distribute linked combinations including the two. +# +# You must obey the GNU Lesser General Public License in all respects for +# all of the code used other than OpenSSL. +# +# psycopg2 is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + +import sys +from testconfig import dsn +from testutils import unittest + +import test_async +import test_bugX000 +import test_bug_gc +import test_cancel +import test_connection +import test_copy +import test_cursor +import test_dates +import test_extras_dictcursor +import test_green +import test_lobject +import test_module +import test_notify +import test_psycopg2_dbapi20 +import test_quote +import test_transaction +import test_types_basic +import test_types_extras + +if sys.version_info[:2] >= (2, 5): + import test_with +else: + test_with = None + +def test_suite(): + # If connection to test db fails, bail out early. + import psycopg2 + try: + cnn = psycopg2.connect(dsn) + except Exception, e: + print "Failed connection to test db:", e.__class__.__name__, e + print "Please set env vars 'PSYCOPG2_TESTDB*' to valid values." + sys.exit(1) + else: + cnn.close() + + suite = unittest.TestSuite() + suite.addTest(test_async.test_suite()) + suite.addTest(test_bugX000.test_suite()) + suite.addTest(test_bug_gc.test_suite()) + suite.addTest(test_cancel.test_suite()) + suite.addTest(test_connection.test_suite()) + suite.addTest(test_copy.test_suite()) + suite.addTest(test_cursor.test_suite()) + suite.addTest(test_dates.test_suite()) + suite.addTest(test_extras_dictcursor.test_suite()) + suite.addTest(test_green.test_suite()) + suite.addTest(test_lobject.test_suite()) + suite.addTest(test_module.test_suite()) + suite.addTest(test_notify.test_suite()) + suite.addTest(test_psycopg2_dbapi20.test_suite()) + suite.addTest(test_quote.test_suite()) + suite.addTest(test_transaction.test_suite()) + suite.addTest(test_types_basic.test_suite()) + suite.addTest(test_types_extras.test_suite()) + if test_with: + suite.addTest(test_with.test_suite()) + return suite + +if __name__ == '__main__': + unittest.main(defaultTest='test_suite') diff --git a/psycopg2/tests/dbapi20.py b/psycopg2/tests/dbapi20.py new file mode 100644 index 0000000..b8d6a39 --- /dev/null +++ b/psycopg2/tests/dbapi20.py @@ -0,0 +1,872 @@ +#!/usr/bin/env python +''' Python DB API 2.0 driver compliance unit test suite. + + This software is Public Domain and may be used without restrictions. + + "Now we have booze and barflies entering the discussion, plus rumours of + DBAs on drugs... and I won't tell you what flashes through my mind each + time I read the subject line with 'Anal Compliance' in it. All around + this is turning out to be a thoroughly unwholesome unit test." + + -- Ian Bicking +''' + +__rcs_id__ = '$Id: dbapi20.py,v 1.11 2005/01/02 02:41:01 zenzen Exp $' +__version__ = '$Revision: 1.12 $'[11:-2] +__author__ = 'Stuart Bishop ' + +import unittest +import time +import sys + + +# Revision 1.12 2009/02/06 03:35:11 kf7xm +# Tested okay with Python 3.0, includes last minute patches from Mark H. +# +# Revision 1.1.1.1.2.1 2008/09/20 19:54:59 rupole +# Include latest changes from main branch +# Updates for py3k +# +# Revision 1.11 2005/01/02 02:41:01 zenzen +# Update author email address +# +# Revision 1.10 2003/10/09 03:14:14 zenzen +# Add test for DB API 2.0 optional extension, where database exceptions +# are exposed as attributes on the Connection object. +# +# Revision 1.9 2003/08/13 01:16:36 zenzen +# Minor tweak from Stefan Fleiter +# +# Revision 1.8 2003/04/10 00:13:25 zenzen +# Changes, as per suggestions by M.-A. Lemburg +# - Add a table prefix, to ensure namespace collisions can always be avoided +# +# Revision 1.7 2003/02/26 23:33:37 zenzen +# Break out DDL into helper functions, as per request by David Rushby +# +# Revision 1.6 2003/02/21 03:04:33 zenzen +# Stuff from Henrik Ekelund: +# added test_None +# added test_nextset & hooks +# +# Revision 1.5 2003/02/17 22:08:43 zenzen +# Implement suggestions and code from Henrik Eklund - test that cursor.arraysize +# defaults to 1 & generic cursor.callproc test added +# +# Revision 1.4 2003/02/15 00:16:33 zenzen +# Changes, as per suggestions and bug reports by M.-A. Lemburg, +# Matthew T. Kromer, Federico Di Gregorio and Daniel Dittmar +# - Class renamed +# - Now a subclass of TestCase, to avoid requiring the driver stub +# to use multiple inheritance +# - Reversed the polarity of buggy test in test_description +# - Test exception hierarchy correctly +# - self.populate is now self._populate(), so if a driver stub +# overrides self.ddl1 this change propogates +# - VARCHAR columns now have a width, which will hopefully make the +# DDL even more portible (this will be reversed if it causes more problems) +# - cursor.rowcount being checked after various execute and fetchXXX methods +# - Check for fetchall and fetchmany returning empty lists after results +# are exhausted (already checking for empty lists if select retrieved +# nothing +# - Fix bugs in test_setoutputsize_basic and test_setinputsizes +# +def str2bytes(sval): + if sys.version_info < (3,0) and isinstance(sval, str): + sval = sval.decode("latin1") + return sval.encode("latin1") + +class DatabaseAPI20Test(unittest.TestCase): + ''' Test a database self.driver for DB API 2.0 compatibility. + This implementation tests Gadfly, but the TestCase + is structured so that other self.drivers can subclass this + test case to ensure compiliance with the DB-API. It is + expected that this TestCase may be expanded in the future + if ambiguities or edge conditions are discovered. + + The 'Optional Extensions' are not yet being tested. + + self.drivers should subclass this test, overriding setUp, tearDown, + self.driver, connect_args and connect_kw_args. Class specification + should be as follows: + + import dbapi20 + class mytest(dbapi20.DatabaseAPI20Test): + [...] + + Don't 'import DatabaseAPI20Test from dbapi20', or you will + confuse the unit tester - just 'import dbapi20'. + ''' + + # The self.driver module. This should be the module where the 'connect' + # method is to be found + driver = None + connect_args = () # List of arguments to pass to connect + connect_kw_args = {} # Keyword arguments for connect + table_prefix = 'dbapi20test_' # If you need to specify a prefix for tables + + ddl1 = 'create table %sbooze (name varchar(20))' % table_prefix + ddl2 = 'create table %sbarflys (name varchar(20))' % table_prefix + xddl1 = 'drop table %sbooze' % table_prefix + xddl2 = 'drop table %sbarflys' % table_prefix + + lowerfunc = 'lower' # Name of stored procedure to convert string->lowercase + + # Some drivers may need to override these helpers, for example adding + # a 'commit' after the execute. + def executeDDL1(self,cursor): + cursor.execute(self.ddl1) + + def executeDDL2(self,cursor): + cursor.execute(self.ddl2) + + def setUp(self): + ''' self.drivers should override this method to perform required setup + if any is necessary, such as creating the database. + ''' + pass + + def tearDown(self): + ''' self.drivers should override this method to perform required cleanup + if any is necessary, such as deleting the test database. + The default drops the tables that may be created. + ''' + con = self._connect() + try: + cur = con.cursor() + for ddl in (self.xddl1,self.xddl2): + try: + cur.execute(ddl) + con.commit() + except self.driver.Error: + # Assume table didn't exist. Other tests will check if + # execute is busted. + pass + finally: + con.close() + + def _connect(self): + try: + return self.driver.connect( + *self.connect_args,**self.connect_kw_args + ) + except AttributeError: + self.fail("No connect method found in self.driver module") + + def test_connect(self): + con = self._connect() + con.close() + + def test_apilevel(self): + try: + # Must exist + apilevel = self.driver.apilevel + # Must equal 2.0 + self.assertEqual(apilevel,'2.0') + except AttributeError: + self.fail("Driver doesn't define apilevel") + + def test_threadsafety(self): + try: + # Must exist + threadsafety = self.driver.threadsafety + # Must be a valid value + self.failUnless(threadsafety in (0,1,2,3)) + except AttributeError: + self.fail("Driver doesn't define threadsafety") + + def test_paramstyle(self): + try: + # Must exist + paramstyle = self.driver.paramstyle + # Must be a valid value + self.failUnless(paramstyle in ( + 'qmark','numeric','named','format','pyformat' + )) + except AttributeError: + self.fail("Driver doesn't define paramstyle") + + def test_Exceptions(self): + # Make sure required exceptions exist, and are in the + # defined hierarchy. + if sys.version[0] == '3': #under Python 3 StardardError no longer exists + self.failUnless(issubclass(self.driver.Warning,Exception)) + self.failUnless(issubclass(self.driver.Error,Exception)) + else: + self.failUnless(issubclass(self.driver.Warning,StandardError)) + self.failUnless(issubclass(self.driver.Error,StandardError)) + + self.failUnless( + issubclass(self.driver.InterfaceError,self.driver.Error) + ) + self.failUnless( + issubclass(self.driver.DatabaseError,self.driver.Error) + ) + self.failUnless( + issubclass(self.driver.OperationalError,self.driver.Error) + ) + self.failUnless( + issubclass(self.driver.IntegrityError,self.driver.Error) + ) + self.failUnless( + issubclass(self.driver.InternalError,self.driver.Error) + ) + self.failUnless( + issubclass(self.driver.ProgrammingError,self.driver.Error) + ) + self.failUnless( + issubclass(self.driver.NotSupportedError,self.driver.Error) + ) + + def test_ExceptionsAsConnectionAttributes(self): + # OPTIONAL EXTENSION + # Test for the optional DB API 2.0 extension, where the exceptions + # are exposed as attributes on the Connection object + # I figure this optional extension will be implemented by any + # driver author who is using this test suite, so it is enabled + # by default. + con = self._connect() + drv = self.driver + self.failUnless(con.Warning is drv.Warning) + self.failUnless(con.Error is drv.Error) + self.failUnless(con.InterfaceError is drv.InterfaceError) + self.failUnless(con.DatabaseError is drv.DatabaseError) + self.failUnless(con.OperationalError is drv.OperationalError) + self.failUnless(con.IntegrityError is drv.IntegrityError) + self.failUnless(con.InternalError is drv.InternalError) + self.failUnless(con.ProgrammingError is drv.ProgrammingError) + self.failUnless(con.NotSupportedError is drv.NotSupportedError) + + + def test_commit(self): + con = self._connect() + try: + # Commit must work, even if it doesn't do anything + con.commit() + finally: + con.close() + + def test_rollback(self): + con = self._connect() + # If rollback is defined, it should either work or throw + # the documented exception + if hasattr(con,'rollback'): + try: + con.rollback() + except self.driver.NotSupportedError: + pass + + def test_cursor(self): + con = self._connect() + try: + cur = con.cursor() + finally: + con.close() + + def test_cursor_isolation(self): + con = self._connect() + try: + # Make sure cursors created from the same connection have + # the documented transaction isolation level + cur1 = con.cursor() + cur2 = con.cursor() + self.executeDDL1(cur1) + cur1.execute("insert into %sbooze values ('Victoria Bitter')" % ( + self.table_prefix + )) + cur2.execute("select name from %sbooze" % self.table_prefix) + booze = cur2.fetchall() + self.assertEqual(len(booze),1) + self.assertEqual(len(booze[0]),1) + self.assertEqual(booze[0][0],'Victoria Bitter') + finally: + con.close() + + def test_description(self): + con = self._connect() + try: + cur = con.cursor() + self.executeDDL1(cur) + self.assertEqual(cur.description,None, + 'cursor.description should be none after executing a ' + 'statement that can return no rows (such as DDL)' + ) + cur.execute('select name from %sbooze' % self.table_prefix) + self.assertEqual(len(cur.description),1, + 'cursor.description describes too many columns' + ) + self.assertEqual(len(cur.description[0]),7, + 'cursor.description[x] tuples must have 7 elements' + ) + self.assertEqual(cur.description[0][0].lower(),'name', + 'cursor.description[x][0] must return column name' + ) + self.assertEqual(cur.description[0][1],self.driver.STRING, + 'cursor.description[x][1] must return column type. Got %r' + % cur.description[0][1] + ) + + # Make sure self.description gets reset + self.executeDDL2(cur) + self.assertEqual(cur.description,None, + 'cursor.description not being set to None when executing ' + 'no-result statements (eg. DDL)' + ) + finally: + con.close() + + def test_rowcount(self): + con = self._connect() + try: + cur = con.cursor() + self.executeDDL1(cur) + self.assertEqual(cur.rowcount,-1, + 'cursor.rowcount should be -1 after executing no-result ' + 'statements' + ) + cur.execute("insert into %sbooze values ('Victoria Bitter')" % ( + self.table_prefix + )) + self.failUnless(cur.rowcount in (-1,1), + 'cursor.rowcount should == number or rows inserted, or ' + 'set to -1 after executing an insert statement' + ) + cur.execute("select name from %sbooze" % self.table_prefix) + self.failUnless(cur.rowcount in (-1,1), + 'cursor.rowcount should == number of rows returned, or ' + 'set to -1 after executing a select statement' + ) + self.executeDDL2(cur) + self.assertEqual(cur.rowcount,-1, + 'cursor.rowcount not being reset to -1 after executing ' + 'no-result statements' + ) + finally: + con.close() + + lower_func = 'lower' + def test_callproc(self): + con = self._connect() + try: + cur = con.cursor() + if self.lower_func and hasattr(cur,'callproc'): + r = cur.callproc(self.lower_func,('FOO',)) + self.assertEqual(len(r),1) + self.assertEqual(r[0],'FOO') + r = cur.fetchall() + self.assertEqual(len(r),1,'callproc produced no result set') + self.assertEqual(len(r[0]),1, + 'callproc produced invalid result set' + ) + self.assertEqual(r[0][0],'foo', + 'callproc produced invalid results' + ) + finally: + con.close() + + def test_close(self): + con = self._connect() + try: + cur = con.cursor() + finally: + con.close() + + # cursor.execute should raise an Error if called after connection + # closed + self.assertRaises(self.driver.Error,self.executeDDL1,cur) + + # connection.commit should raise an Error if called after connection' + # closed.' + self.assertRaises(self.driver.Error,con.commit) + + # connection.close should raise an Error if called more than once + # Issue discussed on DB-SIG: consensus seem that close() should not + # raised if called on closed objects. Issue reported back to Stuart. + # self.assertRaises(self.driver.Error,con.close) + + def test_execute(self): + con = self._connect() + try: + cur = con.cursor() + self._paraminsert(cur) + finally: + con.close() + + def _paraminsert(self,cur): + self.executeDDL1(cur) + cur.execute("insert into %sbooze values ('Victoria Bitter')" % ( + self.table_prefix + )) + self.failUnless(cur.rowcount in (-1,1)) + + if self.driver.paramstyle == 'qmark': + cur.execute( + 'insert into %sbooze values (?)' % self.table_prefix, + ("Cooper's",) + ) + elif self.driver.paramstyle == 'numeric': + cur.execute( + 'insert into %sbooze values (:1)' % self.table_prefix, + ("Cooper's",) + ) + elif self.driver.paramstyle == 'named': + cur.execute( + 'insert into %sbooze values (:beer)' % self.table_prefix, + {'beer':"Cooper's"} + ) + elif self.driver.paramstyle == 'format': + cur.execute( + 'insert into %sbooze values (%%s)' % self.table_prefix, + ("Cooper's",) + ) + elif self.driver.paramstyle == 'pyformat': + cur.execute( + 'insert into %sbooze values (%%(beer)s)' % self.table_prefix, + {'beer':"Cooper's"} + ) + else: + self.fail('Invalid paramstyle') + self.failUnless(cur.rowcount in (-1,1)) + + cur.execute('select name from %sbooze' % self.table_prefix) + res = cur.fetchall() + self.assertEqual(len(res),2,'cursor.fetchall returned too few rows') + beers = [res[0][0],res[1][0]] + beers.sort() + self.assertEqual(beers[0],"Cooper's", + 'cursor.fetchall retrieved incorrect data, or data inserted ' + 'incorrectly' + ) + self.assertEqual(beers[1],"Victoria Bitter", + 'cursor.fetchall retrieved incorrect data, or data inserted ' + 'incorrectly' + ) + + def test_executemany(self): + con = self._connect() + try: + cur = con.cursor() + self.executeDDL1(cur) + largs = [ ("Cooper's",) , ("Boag's",) ] + margs = [ {'beer': "Cooper's"}, {'beer': "Boag's"} ] + if self.driver.paramstyle == 'qmark': + cur.executemany( + 'insert into %sbooze values (?)' % self.table_prefix, + largs + ) + elif self.driver.paramstyle == 'numeric': + cur.executemany( + 'insert into %sbooze values (:1)' % self.table_prefix, + largs + ) + elif self.driver.paramstyle == 'named': + cur.executemany( + 'insert into %sbooze values (:beer)' % self.table_prefix, + margs + ) + elif self.driver.paramstyle == 'format': + cur.executemany( + 'insert into %sbooze values (%%s)' % self.table_prefix, + largs + ) + elif self.driver.paramstyle == 'pyformat': + cur.executemany( + 'insert into %sbooze values (%%(beer)s)' % ( + self.table_prefix + ), + margs + ) + else: + self.fail('Unknown paramstyle') + self.failUnless(cur.rowcount in (-1,2), + 'insert using cursor.executemany set cursor.rowcount to ' + 'incorrect value %r' % cur.rowcount + ) + cur.execute('select name from %sbooze' % self.table_prefix) + res = cur.fetchall() + self.assertEqual(len(res),2, + 'cursor.fetchall retrieved incorrect number of rows' + ) + beers = [res[0][0],res[1][0]] + beers.sort() + self.assertEqual(beers[0],"Boag's",'incorrect data retrieved') + self.assertEqual(beers[1],"Cooper's",'incorrect data retrieved') + finally: + con.close() + + def test_fetchone(self): + con = self._connect() + try: + cur = con.cursor() + + # cursor.fetchone should raise an Error if called before + # executing a select-type query + self.assertRaises(self.driver.Error,cur.fetchone) + + # cursor.fetchone should raise an Error if called after + # executing a query that cannot return rows + self.executeDDL1(cur) + self.assertRaises(self.driver.Error,cur.fetchone) + + cur.execute('select name from %sbooze' % self.table_prefix) + self.assertEqual(cur.fetchone(),None, + 'cursor.fetchone should return None if a query retrieves ' + 'no rows' + ) + self.failUnless(cur.rowcount in (-1,0)) + + # cursor.fetchone should raise an Error if called after + # executing a query that cannot return rows + cur.execute("insert into %sbooze values ('Victoria Bitter')" % ( + self.table_prefix + )) + self.assertRaises(self.driver.Error,cur.fetchone) + + cur.execute('select name from %sbooze' % self.table_prefix) + r = cur.fetchone() + self.assertEqual(len(r),1, + 'cursor.fetchone should have retrieved a single row' + ) + self.assertEqual(r[0],'Victoria Bitter', + 'cursor.fetchone retrieved incorrect data' + ) + self.assertEqual(cur.fetchone(),None, + 'cursor.fetchone should return None if no more rows available' + ) + self.failUnless(cur.rowcount in (-1,1)) + finally: + con.close() + + samples = [ + 'Carlton Cold', + 'Carlton Draft', + 'Mountain Goat', + 'Redback', + 'Victoria Bitter', + 'XXXX' + ] + + def _populate(self): + ''' Return a list of sql commands to setup the DB for the fetch + tests. + ''' + populate = [ + "insert into %sbooze values ('%s')" % (self.table_prefix,s) + for s in self.samples + ] + return populate + + def test_fetchmany(self): + con = self._connect() + try: + cur = con.cursor() + + # cursor.fetchmany should raise an Error if called without + #issuing a query + self.assertRaises(self.driver.Error,cur.fetchmany,4) + + self.executeDDL1(cur) + for sql in self._populate(): + cur.execute(sql) + + cur.execute('select name from %sbooze' % self.table_prefix) + r = cur.fetchmany() + self.assertEqual(len(r),1, + 'cursor.fetchmany retrieved incorrect number of rows, ' + 'default of arraysize is one.' + ) + cur.arraysize=10 + r = cur.fetchmany(3) # Should get 3 rows + self.assertEqual(len(r),3, + 'cursor.fetchmany retrieved incorrect number of rows' + ) + r = cur.fetchmany(4) # Should get 2 more + self.assertEqual(len(r),2, + 'cursor.fetchmany retrieved incorrect number of rows' + ) + r = cur.fetchmany(4) # Should be an empty sequence + self.assertEqual(len(r),0, + 'cursor.fetchmany should return an empty sequence after ' + 'results are exhausted' + ) + self.failUnless(cur.rowcount in (-1,6)) + + # Same as above, using cursor.arraysize + cur.arraysize=4 + cur.execute('select name from %sbooze' % self.table_prefix) + r = cur.fetchmany() # Should get 4 rows + self.assertEqual(len(r),4, + 'cursor.arraysize not being honoured by fetchmany' + ) + r = cur.fetchmany() # Should get 2 more + self.assertEqual(len(r),2) + r = cur.fetchmany() # Should be an empty sequence + self.assertEqual(len(r),0) + self.failUnless(cur.rowcount in (-1,6)) + + cur.arraysize=6 + cur.execute('select name from %sbooze' % self.table_prefix) + rows = cur.fetchmany() # Should get all rows + self.failUnless(cur.rowcount in (-1,6)) + self.assertEqual(len(rows),6) + self.assertEqual(len(rows),6) + rows = [r[0] for r in rows] + rows.sort() + + # Make sure we get the right data back out + for i in range(0,6): + self.assertEqual(rows[i],self.samples[i], + 'incorrect data retrieved by cursor.fetchmany' + ) + + rows = cur.fetchmany() # Should return an empty list + self.assertEqual(len(rows),0, + 'cursor.fetchmany should return an empty sequence if ' + 'called after the whole result set has been fetched' + ) + self.failUnless(cur.rowcount in (-1,6)) + + self.executeDDL2(cur) + cur.execute('select name from %sbarflys' % self.table_prefix) + r = cur.fetchmany() # Should get empty sequence + self.assertEqual(len(r),0, + 'cursor.fetchmany should return an empty sequence if ' + 'query retrieved no rows' + ) + self.failUnless(cur.rowcount in (-1,0)) + + finally: + con.close() + + def test_fetchall(self): + con = self._connect() + try: + cur = con.cursor() + # cursor.fetchall should raise an Error if called + # without executing a query that may return rows (such + # as a select) + self.assertRaises(self.driver.Error, cur.fetchall) + + self.executeDDL1(cur) + for sql in self._populate(): + cur.execute(sql) + + # cursor.fetchall should raise an Error if called + # after executing a a statement that cannot return rows + self.assertRaises(self.driver.Error,cur.fetchall) + + cur.execute('select name from %sbooze' % self.table_prefix) + rows = cur.fetchall() + self.failUnless(cur.rowcount in (-1,len(self.samples))) + self.assertEqual(len(rows),len(self.samples), + 'cursor.fetchall did not retrieve all rows' + ) + rows = [r[0] for r in rows] + rows.sort() + for i in range(0,len(self.samples)): + self.assertEqual(rows[i],self.samples[i], + 'cursor.fetchall retrieved incorrect rows' + ) + rows = cur.fetchall() + self.assertEqual( + len(rows),0, + 'cursor.fetchall should return an empty list if called ' + 'after the whole result set has been fetched' + ) + self.failUnless(cur.rowcount in (-1,len(self.samples))) + + self.executeDDL2(cur) + cur.execute('select name from %sbarflys' % self.table_prefix) + rows = cur.fetchall() + self.failUnless(cur.rowcount in (-1,0)) + self.assertEqual(len(rows),0, + 'cursor.fetchall should return an empty list if ' + 'a select query returns no rows' + ) + + finally: + con.close() + + def test_mixedfetch(self): + con = self._connect() + try: + cur = con.cursor() + self.executeDDL1(cur) + for sql in self._populate(): + cur.execute(sql) + + cur.execute('select name from %sbooze' % self.table_prefix) + rows1 = cur.fetchone() + rows23 = cur.fetchmany(2) + rows4 = cur.fetchone() + rows56 = cur.fetchall() + self.failUnless(cur.rowcount in (-1,6)) + self.assertEqual(len(rows23),2, + 'fetchmany returned incorrect number of rows' + ) + self.assertEqual(len(rows56),2, + 'fetchall returned incorrect number of rows' + ) + + rows = [rows1[0]] + rows.extend([rows23[0][0],rows23[1][0]]) + rows.append(rows4[0]) + rows.extend([rows56[0][0],rows56[1][0]]) + rows.sort() + for i in range(0,len(self.samples)): + self.assertEqual(rows[i],self.samples[i], + 'incorrect data retrieved or inserted' + ) + finally: + con.close() + + def help_nextset_setUp(self,cur): + ''' Should create a procedure called deleteme + that returns two result sets, first the + number of rows in booze then "name from booze" + ''' + raise NotImplementedError('Helper not implemented') + #sql=""" + # create procedure deleteme as + # begin + # select count(*) from booze + # select name from booze + # end + #""" + #cur.execute(sql) + + def help_nextset_tearDown(self,cur): + 'If cleaning up is needed after nextSetTest' + raise NotImplementedError('Helper not implemented') + #cur.execute("drop procedure deleteme") + + def test_nextset(self): + con = self._connect() + try: + cur = con.cursor() + if not hasattr(cur,'nextset'): + return + + try: + self.executeDDL1(cur) + sql=self._populate() + for sql in self._populate(): + cur.execute(sql) + + self.help_nextset_setUp(cur) + + cur.callproc('deleteme') + numberofrows=cur.fetchone() + assert numberofrows[0]== len(self.samples) + assert cur.nextset() + names=cur.fetchall() + assert len(names) == len(self.samples) + s=cur.nextset() + assert s == None,'No more return sets, should return None' + finally: + self.help_nextset_tearDown(cur) + + finally: + con.close() + + def test_nextset(self): + raise NotImplementedError('Drivers need to override this test') + + def test_arraysize(self): + # Not much here - rest of the tests for this are in test_fetchmany + con = self._connect() + try: + cur = con.cursor() + self.failUnless(hasattr(cur,'arraysize'), + 'cursor.arraysize must be defined' + ) + finally: + con.close() + + def test_setinputsizes(self): + con = self._connect() + try: + cur = con.cursor() + cur.setinputsizes( (25,) ) + self._paraminsert(cur) # Make sure cursor still works + finally: + con.close() + + def test_setoutputsize_basic(self): + # Basic test is to make sure setoutputsize doesn't blow up + con = self._connect() + try: + cur = con.cursor() + cur.setoutputsize(1000) + cur.setoutputsize(2000,0) + self._paraminsert(cur) # Make sure the cursor still works + finally: + con.close() + + def test_setoutputsize(self): + # Real test for setoutputsize is driver dependant + raise NotImplementedError('Driver needed to override this test') + + def test_None(self): + con = self._connect() + try: + cur = con.cursor() + self.executeDDL1(cur) + cur.execute('insert into %sbooze values (NULL)' % self.table_prefix) + cur.execute('select name from %sbooze' % self.table_prefix) + r = cur.fetchall() + self.assertEqual(len(r),1) + self.assertEqual(len(r[0]),1) + self.assertEqual(r[0][0],None,'NULL value not returned as None') + finally: + con.close() + + def test_Date(self): + d1 = self.driver.Date(2002,12,25) + d2 = self.driver.DateFromTicks(time.mktime((2002,12,25,0,0,0,0,0,0))) + # Can we assume this? API doesn't specify, but it seems implied + # self.assertEqual(str(d1),str(d2)) + + def test_Time(self): + t1 = self.driver.Time(13,45,30) + t2 = self.driver.TimeFromTicks(time.mktime((2001,1,1,13,45,30,0,0,0))) + # Can we assume this? API doesn't specify, but it seems implied + # self.assertEqual(str(t1),str(t2)) + + def test_Timestamp(self): + t1 = self.driver.Timestamp(2002,12,25,13,45,30) + t2 = self.driver.TimestampFromTicks( + time.mktime((2002,12,25,13,45,30,0,0,0)) + ) + # Can we assume this? API doesn't specify, but it seems implied + # self.assertEqual(str(t1),str(t2)) + + def test_Binary(self): + b = self.driver.Binary(str2bytes('Something')) + b = self.driver.Binary(str2bytes('')) + + def test_STRING(self): + self.failUnless(hasattr(self.driver,'STRING'), + 'module.STRING must be defined' + ) + + def test_BINARY(self): + self.failUnless(hasattr(self.driver,'BINARY'), + 'module.BINARY must be defined.' + ) + + def test_NUMBER(self): + self.failUnless(hasattr(self.driver,'NUMBER'), + 'module.NUMBER must be defined.' + ) + + def test_DATETIME(self): + self.failUnless(hasattr(self.driver,'DATETIME'), + 'module.DATETIME must be defined.' + ) + + def test_ROWID(self): + self.failUnless(hasattr(self.driver,'ROWID'), + 'module.ROWID must be defined.' + ) + diff --git a/psycopg2/tests/dbapi20_tpc.py b/psycopg2/tests/dbapi20_tpc.py new file mode 100644 index 0000000..d4790f7 --- /dev/null +++ b/psycopg2/tests/dbapi20_tpc.py @@ -0,0 +1,144 @@ +""" Python DB API 2.0 driver Two Phase Commit compliance test suite. + +""" + +import unittest + + +class TwoPhaseCommitTests(unittest.TestCase): + + driver = None + + def connect(self): + """Make a database connection.""" + raise NotImplementedError + + _last_id = 0 + _global_id_prefix = "dbapi20_tpc:" + + def make_xid(self, con): + id = TwoPhaseCommitTests._last_id + TwoPhaseCommitTests._last_id += 1 + return con.xid(42, "%s%d" % (self._global_id_prefix, id), "qualifier") + + def test_xid(self): + con = self.connect() + try: + xid = con.xid(42, "global", "bqual") + except self.driver.NotSupportedError: + self.fail("Driver does not support transaction IDs.") + + self.assertEquals(xid[0], 42) + self.assertEquals(xid[1], "global") + self.assertEquals(xid[2], "bqual") + + # Try some extremes for the transaction ID: + xid = con.xid(0, "", "") + self.assertEquals(tuple(xid), (0, "", "")) + xid = con.xid(0x7fffffff, "a" * 64, "b" * 64) + self.assertEquals(tuple(xid), (0x7fffffff, "a" * 64, "b" * 64)) + + def test_tpc_begin(self): + con = self.connect() + try: + xid = self.make_xid(con) + try: + con.tpc_begin(xid) + except self.driver.NotSupportedError: + self.fail("Driver does not support tpc_begin()") + finally: + con.close() + + def test_tpc_commit_without_prepare(self): + con = self.connect() + try: + xid = self.make_xid(con) + con.tpc_begin(xid) + cursor = con.cursor() + cursor.execute("SELECT 1") + con.tpc_commit() + finally: + con.close() + + def test_tpc_rollback_without_prepare(self): + con = self.connect() + try: + xid = self.make_xid(con) + con.tpc_begin(xid) + cursor = con.cursor() + cursor.execute("SELECT 1") + con.tpc_rollback() + finally: + con.close() + + def test_tpc_commit_with_prepare(self): + con = self.connect() + try: + xid = self.make_xid(con) + con.tpc_begin(xid) + cursor = con.cursor() + cursor.execute("SELECT 1") + con.tpc_prepare() + con.tpc_commit() + finally: + con.close() + + def test_tpc_rollback_with_prepare(self): + con = self.connect() + try: + xid = self.make_xid(con) + con.tpc_begin(xid) + cursor = con.cursor() + cursor.execute("SELECT 1") + con.tpc_prepare() + con.tpc_rollback() + finally: + con.close() + + def test_tpc_begin_in_transaction_fails(self): + con = self.connect() + try: + xid = self.make_xid(con) + + cursor = con.cursor() + cursor.execute("SELECT 1") + self.assertRaises(self.driver.ProgrammingError, + con.tpc_begin, xid) + finally: + con.close() + + def test_tpc_begin_in_tpc_transaction_fails(self): + con = self.connect() + try: + xid = self.make_xid(con) + + cursor = con.cursor() + cursor.execute("SELECT 1") + self.assertRaises(self.driver.ProgrammingError, + con.tpc_begin, xid) + finally: + con.close() + + def test_commit_in_tpc_fails(self): + # calling commit() within a TPC transaction fails with + # ProgrammingError. + con = self.connect() + try: + xid = self.make_xid(con) + con.tpc_begin(xid) + + self.assertRaises(self.driver.ProgrammingError, con.commit) + finally: + con.close() + + def test_rollback_in_tpc_fails(self): + # calling rollback() within a TPC transaction fails with + # ProgrammingError. + con = self.connect() + try: + xid = self.make_xid(con) + con.tpc_begin(xid) + + self.assertRaises(self.driver.ProgrammingError, con.rollback) + finally: + con.close() diff --git a/psycopg2/tests/test_async.py b/psycopg2/tests/test_async.py new file mode 100644 index 0000000..981b49f --- /dev/null +++ b/psycopg2/tests/test_async.py @@ -0,0 +1,468 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# test_async.py - unit test for asynchronous API +# +# Copyright (C) 2010-2011 Jan Urbański +# +# psycopg2 is free software: you can redistribute it and/or modify it +# under the terms of the GNU Lesser General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# In addition, as a special exception, the copyright holders give +# permission to link this program with the OpenSSL library (or with +# modified versions of OpenSSL that use the same license as OpenSSL), +# and distribute linked combinations including the two. +# +# You must obey the GNU Lesser General Public License in all respects for +# all of the code used other than OpenSSL. +# +# psycopg2 is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + +from testutils import unittest, skip_before_postgres + +import psycopg2 +from psycopg2 import extensions + +import time +import select +import StringIO + +from testutils import ConnectingTestCase + +class PollableStub(object): + """A 'pollable' wrapper allowing analysis of the `poll()` calls.""" + def __init__(self, pollable): + self.pollable = pollable + self.polls = [] + + def fileno(self): + return self.pollable.fileno() + + def poll(self): + rv = self.pollable.poll() + self.polls.append(rv) + return rv + + +class AsyncTests(ConnectingTestCase): + + def setUp(self): + ConnectingTestCase.setUp(self) + + self.sync_conn = self.conn + self.conn = self.connect(async=True) + + self.wait(self.conn) + + curs = self.conn.cursor() + curs.execute(''' + CREATE TEMPORARY TABLE table1 ( + id int PRIMARY KEY + )''') + self.wait(curs) + + def wait(self, cur_or_conn): + pollable = cur_or_conn + if not hasattr(pollable, 'poll'): + pollable = cur_or_conn.connection + while True: + state = pollable.poll() + if state == psycopg2.extensions.POLL_OK: + break + elif state == psycopg2.extensions.POLL_READ: + select.select([pollable], [], []) + elif state == psycopg2.extensions.POLL_WRITE: + select.select([], [pollable], []) + else: + raise Exception("Unexpected result from poll: %r", state) + + def test_connection_setup(self): + cur = self.conn.cursor() + sync_cur = self.sync_conn.cursor() + + self.assert_(self.conn.async) + self.assert_(not self.sync_conn.async) + + # the async connection should be in isolevel 0 + self.assertEquals(self.conn.isolation_level, 0) + + # check other properties to be found on the connection + self.assert_(self.conn.server_version) + self.assert_(self.conn.protocol_version in (2,3)) + self.assert_(self.conn.encoding in psycopg2.extensions.encodings) + + def test_async_named_cursor(self): + self.assertRaises(psycopg2.ProgrammingError, + self.conn.cursor, "name") + + def test_async_select(self): + cur = self.conn.cursor() + self.assertFalse(self.conn.isexecuting()) + cur.execute("select 'a'") + self.assertTrue(self.conn.isexecuting()) + + self.wait(cur) + + self.assertFalse(self.conn.isexecuting()) + self.assertEquals(cur.fetchone()[0], "a") + + @skip_before_postgres(8, 2) + def test_async_callproc(self): + cur = self.conn.cursor() + cur.callproc("pg_sleep", (0.1, )) + self.assertTrue(self.conn.isexecuting()) + + self.wait(cur) + self.assertFalse(self.conn.isexecuting()) + self.assertEquals(cur.fetchall()[0][0], '') + + def test_async_after_async(self): + cur = self.conn.cursor() + cur2 = self.conn.cursor() + + cur.execute("insert into table1 values (1)") + + # an async execute after an async one raises an exception + self.assertRaises(psycopg2.ProgrammingError, + cur.execute, "select * from table1") + # same for callproc + self.assertRaises(psycopg2.ProgrammingError, + cur.callproc, "version") + # but after you've waited it should be good + self.wait(cur) + cur.execute("select * from table1") + self.wait(cur) + + self.assertEquals(cur.fetchall()[0][0], 1) + + cur.execute("delete from table1") + self.wait(cur) + + cur.execute("select * from table1") + self.wait(cur) + + self.assertEquals(cur.fetchone(), None) + + def test_fetch_after_async(self): + cur = self.conn.cursor() + cur.execute("select 'a'") + + # a fetch after an asynchronous query should raise an error + self.assertRaises(psycopg2.ProgrammingError, + cur.fetchall) + # but after waiting it should work + self.wait(cur) + self.assertEquals(cur.fetchall()[0][0], "a") + + def test_rollback_while_async(self): + cur = self.conn.cursor() + + cur.execute("select 'a'") + + # a rollback should not work in asynchronous mode + self.assertRaises(psycopg2.ProgrammingError, self.conn.rollback) + + def test_commit_while_async(self): + cur = self.conn.cursor() + + cur.execute("begin") + self.wait(cur) + + cur.execute("insert into table1 values (1)") + + # a commit should not work in asynchronous mode + self.assertRaises(psycopg2.ProgrammingError, self.conn.commit) + self.assertTrue(self.conn.isexecuting()) + + # but a manual commit should + self.wait(cur) + cur.execute("commit") + self.wait(cur) + + cur.execute("select * from table1") + self.wait(cur) + self.assertEquals(cur.fetchall()[0][0], 1) + + cur.execute("delete from table1") + self.wait(cur) + + cur.execute("select * from table1") + self.wait(cur) + self.assertEquals(cur.fetchone(), None) + + def test_set_parameters_while_async(self): + cur = self.conn.cursor() + + cur.execute("select 'c'") + self.assertTrue(self.conn.isexecuting()) + + # getting transaction status works + self.assertEquals(self.conn.get_transaction_status(), + extensions.TRANSACTION_STATUS_ACTIVE) + self.assertTrue(self.conn.isexecuting()) + + # setting connection encoding should fail + self.assertRaises(psycopg2.ProgrammingError, + self.conn.set_client_encoding, "LATIN1") + + # same for transaction isolation + self.assertRaises(psycopg2.ProgrammingError, + self.conn.set_isolation_level, 1) + + def test_reset_while_async(self): + cur = self.conn.cursor() + cur.execute("select 'c'") + self.assertTrue(self.conn.isexecuting()) + + # a reset should fail + self.assertRaises(psycopg2.ProgrammingError, self.conn.reset) + + def test_async_iter(self): + cur = self.conn.cursor() + + cur.execute("begin") + self.wait(cur) + cur.execute(""" + insert into table1 values (1); + insert into table1 values (2); + insert into table1 values (3); + """) + self.wait(cur) + cur.execute("select id from table1 order by id") + + # iteration fails if a query is underway + self.assertRaises(psycopg2.ProgrammingError, list, cur) + + # but after it's done it should work + self.wait(cur) + self.assertEquals(list(cur), [(1, ), (2, ), (3, )]) + self.assertFalse(self.conn.isexecuting()) + + def test_copy_while_async(self): + cur = self.conn.cursor() + cur.execute("select 'a'") + + # copy should fail + self.assertRaises(psycopg2.ProgrammingError, + cur.copy_from, + StringIO.StringIO("1\n3\n5\n\\.\n"), "table1") + + def test_lobject_while_async(self): + # large objects should be prohibited + self.assertRaises(psycopg2.ProgrammingError, + self.conn.lobject) + + def test_async_executemany(self): + cur = self.conn.cursor() + self.assertRaises( + psycopg2.ProgrammingError, + cur.executemany, "insert into table1 values (%s)", [1, 2, 3]) + + def test_async_scroll(self): + cur = self.conn.cursor() + cur.execute(""" + insert into table1 values (1); + insert into table1 values (2); + insert into table1 values (3); + """) + self.wait(cur) + cur.execute("select id from table1 order by id") + + # scroll should fail if a query is underway + self.assertRaises(psycopg2.ProgrammingError, cur.scroll, 1) + self.assertTrue(self.conn.isexecuting()) + + # but after it's done it should work + self.wait(cur) + cur.scroll(1) + self.assertEquals(cur.fetchall(), [(2, ), (3, )]) + + cur = self.conn.cursor() + cur.execute("select id from table1 order by id") + self.wait(cur) + + cur2 = self.conn.cursor() + self.assertRaises(psycopg2.ProgrammingError, cur2.scroll, 1) + + self.assertRaises(psycopg2.ProgrammingError, cur.scroll, 4) + + cur = self.conn.cursor() + cur.execute("select id from table1 order by id") + self.wait(cur) + cur.scroll(2) + cur.scroll(-1) + self.assertEquals(cur.fetchall(), [(2, ), (3, )]) + + def test_scroll(self): + cur = self.sync_conn.cursor() + cur.execute("create table table1 (id int)") + cur.execute(""" + insert into table1 values (1); + insert into table1 values (2); + insert into table1 values (3); + """) + cur.execute("select id from table1 order by id") + cur.scroll(2) + cur.scroll(-1) + self.assertEquals(cur.fetchall(), [(2, ), (3, )]) + + def test_async_dont_read_all(self): + cur = self.conn.cursor() + cur.execute("select repeat('a', 10000); select repeat('b', 10000)") + + # fetch the result + self.wait(cur) + + # it should be the result of the second query + self.assertEquals(cur.fetchone()[0], "b" * 10000) + + def test_async_subclass(self): + class MyConn(psycopg2.extensions.connection): + def __init__(self, dsn, async=0): + psycopg2.extensions.connection.__init__(self, dsn, async=async) + + conn = self.connect(connection_factory=MyConn, async=True) + self.assert_(isinstance(conn, MyConn)) + self.assert_(conn.async) + conn.close() + + def test_flush_on_write(self): + # a very large query requires a flush loop to be sent to the backend + curs = self.conn.cursor() + for mb in 1, 5, 10, 20, 50: + size = mb * 1024 * 1024 + stub = PollableStub(self.conn) + curs.execute("select %s;", ('x' * size,)) + self.wait(stub) + self.assertEqual(size, len(curs.fetchone()[0])) + if stub.polls.count(psycopg2.extensions.POLL_WRITE) > 1: + return + + # This is more a testing glitch than an error: it happens + # on high load on linux: probably because the kernel has more + # buffers ready. A warning may be useful during development, + # but an error is bad during regression testing. + import warnings + warnings.warn("sending a large query didn't trigger block on write.") + + def test_sync_poll(self): + cur = self.sync_conn.cursor() + cur.execute("select 1") + # polling with a sync query works + cur.connection.poll() + self.assertEquals(cur.fetchone()[0], 1) + + def test_notify(self): + cur = self.conn.cursor() + sync_cur = self.sync_conn.cursor() + + sync_cur.execute("listen test_notify") + self.sync_conn.commit() + cur.execute("notify test_notify") + self.wait(cur) + + self.assertEquals(self.sync_conn.notifies, []) + + pid = self.conn.get_backend_pid() + for _ in range(5): + self.wait(self.sync_conn) + if not self.sync_conn.notifies: + time.sleep(0.5) + continue + self.assertEquals(len(self.sync_conn.notifies), 1) + self.assertEquals(self.sync_conn.notifies.pop(), + (pid, "test_notify")) + return + self.fail("No NOTIFY in 2.5 seconds") + + def test_async_fetch_wrong_cursor(self): + cur1 = self.conn.cursor() + cur2 = self.conn.cursor() + cur1.execute("select 1") + + self.wait(cur1) + self.assertFalse(self.conn.isexecuting()) + # fetching from a cursor with no results is an error + self.assertRaises(psycopg2.ProgrammingError, cur2.fetchone) + # fetching from the correct cursor works + self.assertEquals(cur1.fetchone()[0], 1) + + def test_error(self): + cur = self.conn.cursor() + cur.execute("insert into table1 values (%s)", (1, )) + self.wait(cur) + cur.execute("insert into table1 values (%s)", (1, )) + # this should fail + self.assertRaises(psycopg2.IntegrityError, self.wait, cur) + cur.execute("insert into table1 values (%s); " + "insert into table1 values (%s)", (2, 2)) + # this should fail as well + self.assertRaises(psycopg2.IntegrityError, self.wait, cur) + # but this should work + cur.execute("insert into table1 values (%s)", (2, )) + self.wait(cur) + # and the cursor should be usable afterwards + cur.execute("insert into table1 values (%s)", (3, )) + self.wait(cur) + cur.execute("select * from table1 order by id") + self.wait(cur) + self.assertEquals(cur.fetchall(), [(1, ), (2, ), (3, )]) + cur.execute("delete from table1") + self.wait(cur) + + def test_error_two_cursors(self): + cur = self.conn.cursor() + cur2 = self.conn.cursor() + cur.execute("select * from no_such_table") + self.assertRaises(psycopg2.ProgrammingError, self.wait, cur) + cur2.execute("select 1") + self.wait(cur2) + self.assertEquals(cur2.fetchone()[0], 1) + + def test_notices(self): + del self.conn.notices[:] + cur = self.conn.cursor() + if self.conn.server_version >= 90300: + cur.execute("set client_min_messages=debug1") + self.wait(cur) + cur.execute("create temp table chatty (id serial primary key);") + self.wait(cur) + self.assertEqual("CREATE TABLE", cur.statusmessage) + self.assert_(self.conn.notices) + + def test_async_cursor_gone(self): + import gc + cur = self.conn.cursor() + cur.execute("select 42;"); + del cur + gc.collect() + self.assertRaises(psycopg2.InterfaceError, self.wait, self.conn) + + # The connection is still usable + cur = self.conn.cursor() + cur.execute("select 42;"); + self.wait(self.conn) + self.assertEqual(cur.fetchone(), (42,)) + + def test_async_connection_error_message(self): + try: + cnn = psycopg2.connect('dbname=thisdatabasedoesntexist', async=True) + self.wait(cnn) + except psycopg2.Error, e: + self.assertNotEqual(str(e), "asynchronous connection failed", + "connection error reason lost") + else: + self.fail("no exception raised") + + +def test_suite(): + return unittest.TestLoader().loadTestsFromName(__name__) + +if __name__ == "__main__": + unittest.main() + diff --git a/psycopg2/tests/test_bugX000.py b/psycopg2/tests/test_bugX000.py new file mode 100644 index 0000000..efa593e --- /dev/null +++ b/psycopg2/tests/test_bugX000.py @@ -0,0 +1,44 @@ +#!/usr/bin/env python + +# bugX000.py - test for DateTime object allocation bug +# +# Copyright (C) 2007-2011 Federico Di Gregorio +# +# psycopg2 is free software: you can redistribute it and/or modify it +# under the terms of the GNU Lesser General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# In addition, as a special exception, the copyright holders give +# permission to link this program with the OpenSSL library (or with +# modified versions of OpenSSL that use the same license as OpenSSL), +# and distribute linked combinations including the two. +# +# You must obey the GNU Lesser General Public License in all respects for +# all of the code used other than OpenSSL. +# +# psycopg2 is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + +import psycopg2 +import time +import unittest + +class DateTimeAllocationBugTestCase(unittest.TestCase): + def test_date_time_allocation_bug(self): + d1 = psycopg2.Date(2002,12,25) + d2 = psycopg2.DateFromTicks(time.mktime((2002,12,25,0,0,0,0,0,0))) + t1 = psycopg2.Time(13,45,30) + t2 = psycopg2.TimeFromTicks(time.mktime((2001,1,1,13,45,30,0,0,0))) + t1 = psycopg2.Timestamp(2002,12,25,13,45,30) + t2 = psycopg2.TimestampFromTicks( + time.mktime((2002,12,25,13,45,30,0,0,0))) + + +def test_suite(): + return unittest.TestLoader().loadTestsFromName(__name__) + +if __name__ == "__main__": + unittest.main() diff --git a/psycopg2/tests/test_bug_gc.py b/psycopg2/tests/test_bug_gc.py new file mode 100644 index 0000000..1551dc4 --- /dev/null +++ b/psycopg2/tests/test_bug_gc.py @@ -0,0 +1,48 @@ +#!/usr/bin/env python + +# bug_gc.py - test for refcounting/GC bug +# +# Copyright (C) 2010-2011 Federico Di Gregorio +# +# psycopg2 is free software: you can redistribute it and/or modify it +# under the terms of the GNU Lesser General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# In addition, as a special exception, the copyright holders give +# permission to link this program with the OpenSSL library (or with +# modified versions of OpenSSL that use the same license as OpenSSL), +# and distribute linked combinations including the two. +# +# You must obey the GNU Lesser General Public License in all respects for +# all of the code used other than OpenSSL. +# +# psycopg2 is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + +import psycopg2 +import psycopg2.extensions +import unittest +import gc + +from testutils import ConnectingTestCase, skip_if_no_uuid + +class StolenReferenceTestCase(ConnectingTestCase): + @skip_if_no_uuid + def test_stolen_reference_bug(self): + def fish(val, cur): + gc.collect() + return 42 + UUID = psycopg2.extensions.new_type((2950,), "UUID", fish) + psycopg2.extensions.register_type(UUID, self.conn) + curs = self.conn.cursor() + curs.execute("select 'b5219e01-19ab-4994-b71e-149225dc51e4'::uuid") + curs.fetchone() + +def test_suite(): + return unittest.TestLoader().loadTestsFromName(__name__) + +if __name__ == "__main__": + unittest.main() diff --git a/psycopg2/tests/test_cancel.py b/psycopg2/tests/test_cancel.py new file mode 100644 index 0000000..0ffa742 --- /dev/null +++ b/psycopg2/tests/test_cancel.py @@ -0,0 +1,111 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# test_cancel.py - unit test for query cancellation +# +# Copyright (C) 2010-2011 Jan Urbański +# +# psycopg2 is free software: you can redistribute it and/or modify it +# under the terms of the GNU Lesser General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# In addition, as a special exception, the copyright holders give +# permission to link this program with the OpenSSL library (or with +# modified versions of OpenSSL that use the same license as OpenSSL), +# and distribute linked combinations including the two. +# +# You must obey the GNU Lesser General Public License in all respects for +# all of the code used other than OpenSSL. +# +# psycopg2 is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + +import threading + +import psycopg2 +import psycopg2.extensions +from psycopg2 import extras + +from testconfig import dsn +from testutils import unittest, ConnectingTestCase, skip_before_postgres + +class CancelTests(ConnectingTestCase): + + def setUp(self): + ConnectingTestCase.setUp(self) + + cur = self.conn.cursor() + cur.execute(''' + CREATE TEMPORARY TABLE table1 ( + id int PRIMARY KEY + )''') + self.conn.commit() + + def test_empty_cancel(self): + self.conn.cancel() + + @skip_before_postgres(8, 2) + def test_cancel(self): + errors = [] + + def neverending(conn): + cur = conn.cursor() + try: + self.assertRaises(psycopg2.extensions.QueryCanceledError, + cur.execute, "select pg_sleep(60)") + # make sure the connection still works + conn.rollback() + cur.execute("select 1") + self.assertEqual(cur.fetchall(), [(1, )]) + except Exception, e: + errors.append(e) + raise + + def canceller(conn): + cur = conn.cursor() + try: + conn.cancel() + except Exception, e: + errors.append(e) + raise + + thread1 = threading.Thread(target=neverending, args=(self.conn, )) + # wait a bit to make sure that the other thread is already in + # pg_sleep -- ugly and racy, but the chances are ridiculously low + thread2 = threading.Timer(0.3, canceller, args=(self.conn, )) + thread1.start() + thread2.start() + thread1.join() + thread2.join() + + self.assertEqual(errors, []) + + @skip_before_postgres(8, 2) + def test_async_cancel(self): + async_conn = psycopg2.connect(dsn, async=True) + self.assertRaises(psycopg2.OperationalError, async_conn.cancel) + extras.wait_select(async_conn) + cur = async_conn.cursor() + cur.execute("select pg_sleep(10000)") + self.assertTrue(async_conn.isexecuting()) + async_conn.cancel() + self.assertRaises(psycopg2.extensions.QueryCanceledError, + extras.wait_select, async_conn) + cur.execute("select 1") + extras.wait_select(async_conn) + self.assertEqual(cur.fetchall(), [(1, )]) + + def test_async_connection_cancel(self): + async_conn = psycopg2.connect(dsn, async=True) + async_conn.close() + self.assertTrue(async_conn.closed) + + +def test_suite(): + return unittest.TestLoader().loadTestsFromName(__name__) + +if __name__ == "__main__": + unittest.main() diff --git a/psycopg2/tests/test_connection.py b/psycopg2/tests/test_connection.py new file mode 100644 index 0000000..26ad932 --- /dev/null +++ b/psycopg2/tests/test_connection.py @@ -0,0 +1,1053 @@ +#!/usr/bin/env python + +# test_connection.py - unit test for connection attributes +# +# Copyright (C) 2008-2011 James Henstridge +# +# psycopg2 is free software: you can redistribute it and/or modify it +# under the terms of the GNU Lesser General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# In addition, as a special exception, the copyright holders give +# permission to link this program with the OpenSSL library (or with +# modified versions of OpenSSL that use the same license as OpenSSL), +# and distribute linked combinations including the two. +# +# You must obey the GNU Lesser General Public License in all respects for +# all of the code used other than OpenSSL. +# +# psycopg2 is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + +import os +import time +import threading +from operator import attrgetter + +import psycopg2 +import psycopg2.errorcodes +import psycopg2.extensions + +from testutils import unittest, decorate_all_tests, skip_if_no_superuser +from testutils import skip_before_postgres, skip_after_postgres +from testutils import ConnectingTestCase, skip_if_tpc_disabled +from testconfig import dsn, dbname + + +class ConnectionTests(ConnectingTestCase): + def test_closed_attribute(self): + conn = self.conn + self.assertEqual(conn.closed, False) + conn.close() + self.assertEqual(conn.closed, True) + + def test_close_idempotent(self): + conn = self.conn + conn.close() + conn.close() + self.assert_(conn.closed) + + def test_cursor_closed_attribute(self): + conn = self.conn + curs = conn.cursor() + self.assertEqual(curs.closed, False) + curs.close() + self.assertEqual(curs.closed, True) + + # Closing the connection closes the cursor: + curs = conn.cursor() + conn.close() + self.assertEqual(curs.closed, True) + + @skip_before_postgres(8, 4) + @skip_if_no_superuser + def test_cleanup_on_badconn_close(self): + # ticket #148 + conn = self.conn + cur = conn.cursor() + try: + cur.execute("select pg_terminate_backend(pg_backend_pid())") + except psycopg2.OperationalError, e: + if e.pgcode != psycopg2.errorcodes.ADMIN_SHUTDOWN: + raise + except psycopg2.DatabaseError, e: + # curiously when disconnected in green mode we get a DatabaseError + # without pgcode. + if e.pgcode is not None: + raise + + self.assertEqual(conn.closed, 2) + conn.close() + self.assertEqual(conn.closed, 1) + + def test_reset(self): + conn = self.conn + # switch isolation level, then reset + level = conn.isolation_level + conn.set_isolation_level(0) + self.assertEqual(conn.isolation_level, 0) + conn.reset() + # now the isolation level should be equal to saved one + self.assertEqual(conn.isolation_level, level) + + def test_notices(self): + conn = self.conn + cur = conn.cursor() + if self.conn.server_version >= 90300: + cur.execute("set client_min_messages=debug1") + cur.execute("create temp table chatty (id serial primary key);") + self.assertEqual("CREATE TABLE", cur.statusmessage) + self.assert_(conn.notices) + + def test_notices_consistent_order(self): + conn = self.conn + cur = conn.cursor() + if self.conn.server_version >= 90300: + cur.execute("set client_min_messages=debug1") + cur.execute("create temp table table1 (id serial); create temp table table2 (id serial);") + cur.execute("create temp table table3 (id serial); create temp table table4 (id serial);") + self.assertEqual(4, len(conn.notices)) + self.assert_('table1' in conn.notices[0]) + self.assert_('table2' in conn.notices[1]) + self.assert_('table3' in conn.notices[2]) + self.assert_('table4' in conn.notices[3]) + + def test_notices_limited(self): + conn = self.conn + cur = conn.cursor() + if self.conn.server_version >= 90300: + cur.execute("set client_min_messages=debug1") + for i in range(0, 100, 10): + sql = " ".join(["create temp table table%d (id serial);" % j for j in range(i, i+10)]) + cur.execute(sql) + + self.assertEqual(50, len(conn.notices)) + self.assert_('table50' in conn.notices[0], conn.notices[0]) + self.assert_('table51' in conn.notices[1], conn.notices[1]) + self.assert_('table98' in conn.notices[-2], conn.notices[-2]) + self.assert_('table99' in conn.notices[-1], conn.notices[-1]) + + def test_server_version(self): + self.assert_(self.conn.server_version) + + def test_protocol_version(self): + self.assert_(self.conn.protocol_version in (2,3), + self.conn.protocol_version) + + def test_tpc_unsupported(self): + cnn = self.conn + if cnn.server_version >= 80100: + return self.skipTest("tpc is supported") + + self.assertRaises(psycopg2.NotSupportedError, + cnn.xid, 42, "foo", "bar") + + @skip_before_postgres(8, 2) + def test_concurrent_execution(self): + def slave(): + cnn = self.connect() + cur = cnn.cursor() + cur.execute("select pg_sleep(4)") + cur.close() + cnn.close() + + t1 = threading.Thread(target=slave) + t2 = threading.Thread(target=slave) + t0 = time.time() + t1.start() + t2.start() + t1.join() + t2.join() + self.assert_(time.time() - t0 < 7, + "something broken in concurrency") + + def test_encoding_name(self): + self.conn.set_client_encoding("EUC_JP") + # conn.encoding is 'EUCJP' now. + cur = self.conn.cursor() + psycopg2.extensions.register_type(psycopg2.extensions.UNICODE, cur) + cur.execute("select 'foo'::text;") + self.assertEqual(cur.fetchone()[0], u'foo') + + def test_connect_nonnormal_envvar(self): + # We must perform encoding normalization at connection time + self.conn.close() + oldenc = os.environ.get('PGCLIENTENCODING') + os.environ['PGCLIENTENCODING'] = 'utf-8' # malformed spelling + try: + self.conn = self.connect() + finally: + if oldenc is not None: + os.environ['PGCLIENTENCODING'] = oldenc + else: + del os.environ['PGCLIENTENCODING'] + + def test_weakref(self): + from weakref import ref + import gc + conn = psycopg2.connect(dsn) + w = ref(conn) + conn.close() + del conn + gc.collect() + self.assert_(w() is None) + + def test_commit_concurrency(self): + # The problem is the one reported in ticket #103. Because of bad + # status check, we commit even when a commit is already on its way. + # We can detect this condition by the warnings. + conn = self.conn + notices = [] + stop = [] + + def committer(): + while not stop: + conn.commit() + while conn.notices: + notices.append((2, conn.notices.pop())) + + cur = conn.cursor() + t1 = threading.Thread(target=committer) + t1.start() + i = 1 + for i in range(1000): + cur.execute("select %s;",(i,)) + conn.commit() + while conn.notices: + notices.append((1, conn.notices.pop())) + + # Stop the committer thread + stop.append(True) + + self.assert_(not notices, "%d notices raised" % len(notices)) + + def test_connect_cursor_factory(self): + import psycopg2.extras + conn = self.connect(cursor_factory=psycopg2.extras.DictCursor) + cur = conn.cursor() + cur.execute("select 1 as a") + self.assertEqual(cur.fetchone()['a'], 1) + + def test_cursor_factory(self): + self.assertEqual(self.conn.cursor_factory, None) + cur = self.conn.cursor() + cur.execute("select 1 as a") + self.assertRaises(TypeError, (lambda r: r['a']), cur.fetchone()) + + self.conn.cursor_factory = psycopg2.extras.DictCursor + self.assertEqual(self.conn.cursor_factory, psycopg2.extras.DictCursor) + cur = self.conn.cursor() + cur.execute("select 1 as a") + self.assertEqual(cur.fetchone()['a'], 1) + + self.conn.cursor_factory = None + self.assertEqual(self.conn.cursor_factory, None) + cur = self.conn.cursor() + cur.execute("select 1 as a") + self.assertRaises(TypeError, (lambda r: r['a']), cur.fetchone()) + + +class IsolationLevelsTestCase(ConnectingTestCase): + + def setUp(self): + ConnectingTestCase.setUp(self) + + conn = self.connect() + cur = conn.cursor() + try: + cur.execute("drop table isolevel;") + except psycopg2.ProgrammingError: + conn.rollback() + cur.execute("create table isolevel (id integer);") + conn.commit() + conn.close() + + def test_isolation_level(self): + conn = self.connect() + self.assertEqual( + conn.isolation_level, + psycopg2.extensions.ISOLATION_LEVEL_READ_COMMITTED) + + def test_encoding(self): + conn = self.connect() + self.assert_(conn.encoding in psycopg2.extensions.encodings) + + def test_set_isolation_level(self): + conn = self.connect() + curs = conn.cursor() + + levels = [ + (None, psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT), + ('read uncommitted', psycopg2.extensions.ISOLATION_LEVEL_READ_UNCOMMITTED), + ('read committed', psycopg2.extensions.ISOLATION_LEVEL_READ_COMMITTED), + ('repeatable read', psycopg2.extensions.ISOLATION_LEVEL_REPEATABLE_READ), + ('serializable', psycopg2.extensions.ISOLATION_LEVEL_SERIALIZABLE), + ] + for name, level in levels: + conn.set_isolation_level(level) + + # the only values available on prehistoric PG versions + if conn.server_version < 80000: + if level in ( + psycopg2.extensions.ISOLATION_LEVEL_READ_UNCOMMITTED, + psycopg2.extensions.ISOLATION_LEVEL_REPEATABLE_READ): + name, level = levels[levels.index((name, level)) + 1] + + self.assertEqual(conn.isolation_level, level) + + curs.execute('show transaction_isolation;') + got_name = curs.fetchone()[0] + + if name is None: + curs.execute('show default_transaction_isolation;') + name = curs.fetchone()[0] + + self.assertEqual(name, got_name) + conn.commit() + + self.assertRaises(ValueError, conn.set_isolation_level, -1) + self.assertRaises(ValueError, conn.set_isolation_level, 5) + + def test_set_isolation_level_abort(self): + conn = self.connect() + cur = conn.cursor() + + self.assertEqual(psycopg2.extensions.TRANSACTION_STATUS_IDLE, + conn.get_transaction_status()) + cur.execute("insert into isolevel values (10);") + self.assertEqual(psycopg2.extensions.TRANSACTION_STATUS_INTRANS, + conn.get_transaction_status()) + + conn.set_isolation_level( + psycopg2.extensions.ISOLATION_LEVEL_SERIALIZABLE) + self.assertEqual(psycopg2.extensions.TRANSACTION_STATUS_IDLE, + conn.get_transaction_status()) + cur.execute("select count(*) from isolevel;") + self.assertEqual(0, cur.fetchone()[0]) + + cur.execute("insert into isolevel values (10);") + self.assertEqual(psycopg2.extensions.TRANSACTION_STATUS_INTRANS, + conn.get_transaction_status()) + conn.set_isolation_level( + psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT) + self.assertEqual(psycopg2.extensions.TRANSACTION_STATUS_IDLE, + conn.get_transaction_status()) + cur.execute("select count(*) from isolevel;") + self.assertEqual(0, cur.fetchone()[0]) + + cur.execute("insert into isolevel values (10);") + self.assertEqual(psycopg2.extensions.TRANSACTION_STATUS_IDLE, + conn.get_transaction_status()) + conn.set_isolation_level( + psycopg2.extensions.ISOLATION_LEVEL_READ_COMMITTED) + self.assertEqual(psycopg2.extensions.TRANSACTION_STATUS_IDLE, + conn.get_transaction_status()) + cur.execute("select count(*) from isolevel;") + self.assertEqual(1, cur.fetchone()[0]) + + def test_isolation_level_autocommit(self): + cnn1 = self.connect() + cnn2 = self.connect() + cnn2.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT) + + cur1 = cnn1.cursor() + cur1.execute("select count(*) from isolevel;") + self.assertEqual(0, cur1.fetchone()[0]) + cnn1.commit() + + cur2 = cnn2.cursor() + cur2.execute("insert into isolevel values (10);") + + cur1.execute("select count(*) from isolevel;") + self.assertEqual(1, cur1.fetchone()[0]) + + def test_isolation_level_read_committed(self): + cnn1 = self.connect() + cnn2 = self.connect() + cnn2.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_READ_COMMITTED) + + cur1 = cnn1.cursor() + cur1.execute("select count(*) from isolevel;") + self.assertEqual(0, cur1.fetchone()[0]) + cnn1.commit() + + cur2 = cnn2.cursor() + cur2.execute("insert into isolevel values (10);") + cur1.execute("insert into isolevel values (20);") + + cur2.execute("select count(*) from isolevel;") + self.assertEqual(1, cur2.fetchone()[0]) + cnn1.commit() + cur2.execute("select count(*) from isolevel;") + self.assertEqual(2, cur2.fetchone()[0]) + + cur1.execute("select count(*) from isolevel;") + self.assertEqual(1, cur1.fetchone()[0]) + cnn2.commit() + cur1.execute("select count(*) from isolevel;") + self.assertEqual(2, cur1.fetchone()[0]) + + def test_isolation_level_serializable(self): + cnn1 = self.connect() + cnn2 = self.connect() + cnn2.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_SERIALIZABLE) + + cur1 = cnn1.cursor() + cur1.execute("select count(*) from isolevel;") + self.assertEqual(0, cur1.fetchone()[0]) + cnn1.commit() + + cur2 = cnn2.cursor() + cur2.execute("insert into isolevel values (10);") + cur1.execute("insert into isolevel values (20);") + + cur2.execute("select count(*) from isolevel;") + self.assertEqual(1, cur2.fetchone()[0]) + cnn1.commit() + cur2.execute("select count(*) from isolevel;") + self.assertEqual(1, cur2.fetchone()[0]) + + cur1.execute("select count(*) from isolevel;") + self.assertEqual(1, cur1.fetchone()[0]) + cnn2.commit() + cur1.execute("select count(*) from isolevel;") + self.assertEqual(2, cur1.fetchone()[0]) + + cur2.execute("select count(*) from isolevel;") + self.assertEqual(2, cur2.fetchone()[0]) + + def test_isolation_level_closed(self): + cnn = self.connect() + cnn.close() + self.assertRaises(psycopg2.InterfaceError, getattr, + cnn, 'isolation_level') + self.assertRaises(psycopg2.InterfaceError, + cnn.set_isolation_level, 0) + self.assertRaises(psycopg2.InterfaceError, + cnn.set_isolation_level, 1) + + +class ConnectionTwoPhaseTests(ConnectingTestCase): + def setUp(self): + ConnectingTestCase.setUp(self) + + self.make_test_table() + self.clear_test_xacts() + + def tearDown(self): + self.clear_test_xacts() + ConnectingTestCase.tearDown(self) + + def clear_test_xacts(self): + """Rollback all the prepared transaction in the testing db.""" + cnn = self.connect() + cnn.set_isolation_level(0) + cur = cnn.cursor() + try: + cur.execute( + "select gid from pg_prepared_xacts where database = %s", + (dbname,)) + except psycopg2.ProgrammingError: + cnn.rollback() + cnn.close() + return + + gids = [ r[0] for r in cur ] + for gid in gids: + cur.execute("rollback prepared %s;", (gid,)) + cnn.close() + + def make_test_table(self): + cnn = self.connect() + cur = cnn.cursor() + try: + cur.execute("DROP TABLE test_tpc;") + except psycopg2.ProgrammingError: + cnn.rollback() + cur.execute("CREATE TABLE test_tpc (data text);") + cnn.commit() + cnn.close() + + def count_xacts(self): + """Return the number of prepared xacts currently in the test db.""" + cnn = self.connect() + cur = cnn.cursor() + cur.execute(""" + select count(*) from pg_prepared_xacts + where database = %s;""", + (dbname,)) + rv = cur.fetchone()[0] + cnn.close() + return rv + + def count_test_records(self): + """Return the number of records in the test table.""" + cnn = self.connect() + cur = cnn.cursor() + cur.execute("select count(*) from test_tpc;") + rv = cur.fetchone()[0] + cnn.close() + return rv + + def test_tpc_commit(self): + cnn = self.connect() + xid = cnn.xid(1, "gtrid", "bqual") + self.assertEqual(cnn.status, psycopg2.extensions.STATUS_READY) + + cnn.tpc_begin(xid) + self.assertEqual(cnn.status, psycopg2.extensions.STATUS_BEGIN) + + cur = cnn.cursor() + cur.execute("insert into test_tpc values ('test_tpc_commit');") + self.assertEqual(0, self.count_xacts()) + self.assertEqual(0, self.count_test_records()) + + cnn.tpc_prepare() + self.assertEqual(cnn.status, psycopg2.extensions.STATUS_PREPARED) + self.assertEqual(1, self.count_xacts()) + self.assertEqual(0, self.count_test_records()) + + cnn.tpc_commit() + self.assertEqual(cnn.status, psycopg2.extensions.STATUS_READY) + self.assertEqual(0, self.count_xacts()) + self.assertEqual(1, self.count_test_records()) + + def test_tpc_commit_one_phase(self): + cnn = self.connect() + xid = cnn.xid(1, "gtrid", "bqual") + self.assertEqual(cnn.status, psycopg2.extensions.STATUS_READY) + + cnn.tpc_begin(xid) + self.assertEqual(cnn.status, psycopg2.extensions.STATUS_BEGIN) + + cur = cnn.cursor() + cur.execute("insert into test_tpc values ('test_tpc_commit_1p');") + self.assertEqual(0, self.count_xacts()) + self.assertEqual(0, self.count_test_records()) + + cnn.tpc_commit() + self.assertEqual(cnn.status, psycopg2.extensions.STATUS_READY) + self.assertEqual(0, self.count_xacts()) + self.assertEqual(1, self.count_test_records()) + + def test_tpc_commit_recovered(self): + cnn = self.connect() + xid = cnn.xid(1, "gtrid", "bqual") + self.assertEqual(cnn.status, psycopg2.extensions.STATUS_READY) + + cnn.tpc_begin(xid) + self.assertEqual(cnn.status, psycopg2.extensions.STATUS_BEGIN) + + cur = cnn.cursor() + cur.execute("insert into test_tpc values ('test_tpc_commit_rec');") + self.assertEqual(0, self.count_xacts()) + self.assertEqual(0, self.count_test_records()) + + cnn.tpc_prepare() + cnn.close() + self.assertEqual(1, self.count_xacts()) + self.assertEqual(0, self.count_test_records()) + + cnn = self.connect() + xid = cnn.xid(1, "gtrid", "bqual") + cnn.tpc_commit(xid) + + self.assertEqual(cnn.status, psycopg2.extensions.STATUS_READY) + self.assertEqual(0, self.count_xacts()) + self.assertEqual(1, self.count_test_records()) + + def test_tpc_rollback(self): + cnn = self.connect() + xid = cnn.xid(1, "gtrid", "bqual") + self.assertEqual(cnn.status, psycopg2.extensions.STATUS_READY) + + cnn.tpc_begin(xid) + self.assertEqual(cnn.status, psycopg2.extensions.STATUS_BEGIN) + + cur = cnn.cursor() + cur.execute("insert into test_tpc values ('test_tpc_rollback');") + self.assertEqual(0, self.count_xacts()) + self.assertEqual(0, self.count_test_records()) + + cnn.tpc_prepare() + self.assertEqual(cnn.status, psycopg2.extensions.STATUS_PREPARED) + self.assertEqual(1, self.count_xacts()) + self.assertEqual(0, self.count_test_records()) + + cnn.tpc_rollback() + self.assertEqual(cnn.status, psycopg2.extensions.STATUS_READY) + self.assertEqual(0, self.count_xacts()) + self.assertEqual(0, self.count_test_records()) + + def test_tpc_rollback_one_phase(self): + cnn = self.connect() + xid = cnn.xid(1, "gtrid", "bqual") + self.assertEqual(cnn.status, psycopg2.extensions.STATUS_READY) + + cnn.tpc_begin(xid) + self.assertEqual(cnn.status, psycopg2.extensions.STATUS_BEGIN) + + cur = cnn.cursor() + cur.execute("insert into test_tpc values ('test_tpc_rollback_1p');") + self.assertEqual(0, self.count_xacts()) + self.assertEqual(0, self.count_test_records()) + + cnn.tpc_rollback() + self.assertEqual(cnn.status, psycopg2.extensions.STATUS_READY) + self.assertEqual(0, self.count_xacts()) + self.assertEqual(0, self.count_test_records()) + + def test_tpc_rollback_recovered(self): + cnn = self.connect() + xid = cnn.xid(1, "gtrid", "bqual") + self.assertEqual(cnn.status, psycopg2.extensions.STATUS_READY) + + cnn.tpc_begin(xid) + self.assertEqual(cnn.status, psycopg2.extensions.STATUS_BEGIN) + + cur = cnn.cursor() + cur.execute("insert into test_tpc values ('test_tpc_commit_rec');") + self.assertEqual(0, self.count_xacts()) + self.assertEqual(0, self.count_test_records()) + + cnn.tpc_prepare() + cnn.close() + self.assertEqual(1, self.count_xacts()) + self.assertEqual(0, self.count_test_records()) + + cnn = self.connect() + xid = cnn.xid(1, "gtrid", "bqual") + cnn.tpc_rollback(xid) + + self.assertEqual(cnn.status, psycopg2.extensions.STATUS_READY) + self.assertEqual(0, self.count_xacts()) + self.assertEqual(0, self.count_test_records()) + + def test_status_after_recover(self): + cnn = self.connect() + self.assertEqual(psycopg2.extensions.STATUS_READY, cnn.status) + xns = cnn.tpc_recover() + self.assertEqual(psycopg2.extensions.STATUS_READY, cnn.status) + + cur = cnn.cursor() + cur.execute("select 1") + self.assertEqual(psycopg2.extensions.STATUS_BEGIN, cnn.status) + xns = cnn.tpc_recover() + self.assertEqual(psycopg2.extensions.STATUS_BEGIN, cnn.status) + + def test_recovered_xids(self): + # insert a few test xns + cnn = self.connect() + cnn.set_isolation_level(0) + cur = cnn.cursor() + cur.execute("begin; prepare transaction '1-foo';") + cur.execute("begin; prepare transaction '2-bar';") + + # read the values to return + cur.execute(""" + select gid, prepared, owner, database + from pg_prepared_xacts + where database = %s;""", + (dbname,)) + okvals = cur.fetchall() + okvals.sort() + + cnn = self.connect() + xids = cnn.tpc_recover() + xids = [ xid for xid in xids if xid.database == dbname ] + xids.sort(key=attrgetter('gtrid')) + + # check the values returned + self.assertEqual(len(okvals), len(xids)) + for (xid, (gid, prepared, owner, database)) in zip (xids, okvals): + self.assertEqual(xid.gtrid, gid) + self.assertEqual(xid.prepared, prepared) + self.assertEqual(xid.owner, owner) + self.assertEqual(xid.database, database) + + def test_xid_encoding(self): + cnn = self.connect() + xid = cnn.xid(42, "gtrid", "bqual") + cnn.tpc_begin(xid) + cnn.tpc_prepare() + + cnn = self.connect() + cur = cnn.cursor() + cur.execute("select gid from pg_prepared_xacts where database = %s;", + (dbname,)) + self.assertEqual('42_Z3RyaWQ=_YnF1YWw=', cur.fetchone()[0]) + + def test_xid_roundtrip(self): + for fid, gtrid, bqual in [ + (0, "", ""), + (42, "gtrid", "bqual"), + (0x7fffffff, "x" * 64, "y" * 64), + ]: + cnn = self.connect() + xid = cnn.xid(fid, gtrid, bqual) + cnn.tpc_begin(xid) + cnn.tpc_prepare() + cnn.close() + + cnn = self.connect() + xids = [ xid for xid in cnn.tpc_recover() + if xid.database == dbname ] + self.assertEqual(1, len(xids)) + xid = xids[0] + self.assertEqual(xid.format_id, fid) + self.assertEqual(xid.gtrid, gtrid) + self.assertEqual(xid.bqual, bqual) + + cnn.tpc_rollback(xid) + + def test_unparsed_roundtrip(self): + for tid in [ + '', + 'hello, world!', + 'x' * 199, # PostgreSQL's limit in transaction id length + ]: + cnn = self.connect() + cnn.tpc_begin(tid) + cnn.tpc_prepare() + cnn.close() + + cnn = self.connect() + xids = [ xid for xid in cnn.tpc_recover() + if xid.database == dbname ] + self.assertEqual(1, len(xids)) + xid = xids[0] + self.assertEqual(xid.format_id, None) + self.assertEqual(xid.gtrid, tid) + self.assertEqual(xid.bqual, None) + + cnn.tpc_rollback(xid) + + def test_xid_construction(self): + from psycopg2.extensions import Xid + + x1 = Xid(74, 'foo', 'bar') + self.assertEqual(74, x1.format_id) + self.assertEqual('foo', x1.gtrid) + self.assertEqual('bar', x1.bqual) + + def test_xid_from_string(self): + from psycopg2.extensions import Xid + + x2 = Xid.from_string('42_Z3RyaWQ=_YnF1YWw=') + self.assertEqual(42, x2.format_id) + self.assertEqual('gtrid', x2.gtrid) + self.assertEqual('bqual', x2.bqual) + + x3 = Xid.from_string('99_xxx_yyy') + self.assertEqual(None, x3.format_id) + self.assertEqual('99_xxx_yyy', x3.gtrid) + self.assertEqual(None, x3.bqual) + + def test_xid_to_string(self): + from psycopg2.extensions import Xid + + x1 = Xid.from_string('42_Z3RyaWQ=_YnF1YWw=') + self.assertEqual(str(x1), '42_Z3RyaWQ=_YnF1YWw=') + + x2 = Xid.from_string('99_xxx_yyy') + self.assertEqual(str(x2), '99_xxx_yyy') + + def test_xid_unicode(self): + cnn = self.connect() + x1 = cnn.xid(10, u'uni', u'code') + cnn.tpc_begin(x1) + cnn.tpc_prepare() + cnn.reset() + xid = [ xid for xid in cnn.tpc_recover() + if xid.database == dbname ][0] + self.assertEqual(10, xid.format_id) + self.assertEqual('uni', xid.gtrid) + self.assertEqual('code', xid.bqual) + + def test_xid_unicode_unparsed(self): + # We don't expect people shooting snowmen as transaction ids, + # so if something explodes in an encode error I don't mind. + # Let's just check uniconde is accepted as type. + cnn = self.connect() + cnn.set_client_encoding('utf8') + cnn.tpc_begin(u"transaction-id") + cnn.tpc_prepare() + cnn.reset() + + xid = [ xid for xid in cnn.tpc_recover() + if xid.database == dbname ][0] + self.assertEqual(None, xid.format_id) + self.assertEqual('transaction-id', xid.gtrid) + self.assertEqual(None, xid.bqual) + + def test_cancel_fails_prepared(self): + cnn = self.connect() + cnn.tpc_begin('cancel') + cnn.tpc_prepare() + self.assertRaises(psycopg2.ProgrammingError, cnn.cancel) + + def test_tpc_recover_non_dbapi_connection(self): + from psycopg2.extras import RealDictConnection + cnn = self.connect(connection_factory=RealDictConnection) + cnn.tpc_begin('dict-connection') + cnn.tpc_prepare() + cnn.reset() + + xids = cnn.tpc_recover() + xid = [ xid for xid in xids if xid.database == dbname ][0] + self.assertEqual(None, xid.format_id) + self.assertEqual('dict-connection', xid.gtrid) + self.assertEqual(None, xid.bqual) + + +decorate_all_tests(ConnectionTwoPhaseTests, skip_if_tpc_disabled) + + +class TransactionControlTests(ConnectingTestCase): + def test_closed(self): + self.conn.close() + self.assertRaises(psycopg2.InterfaceError, + self.conn.set_session, + psycopg2.extensions.ISOLATION_LEVEL_SERIALIZABLE) + + def test_not_in_transaction(self): + cur = self.conn.cursor() + cur.execute("select 1") + self.assertRaises(psycopg2.ProgrammingError, + self.conn.set_session, + psycopg2.extensions.ISOLATION_LEVEL_SERIALIZABLE) + + def test_set_isolation_level(self): + cur = self.conn.cursor() + self.conn.set_session( + psycopg2.extensions.ISOLATION_LEVEL_SERIALIZABLE) + cur.execute("SHOW default_transaction_isolation;") + self.assertEqual(cur.fetchone()[0], 'serializable') + self.conn.rollback() + + self.conn.set_session( + psycopg2.extensions.ISOLATION_LEVEL_REPEATABLE_READ) + cur.execute("SHOW default_transaction_isolation;") + if self.conn.server_version > 80000: + self.assertEqual(cur.fetchone()[0], 'repeatable read') + else: + self.assertEqual(cur.fetchone()[0], 'serializable') + self.conn.rollback() + + self.conn.set_session( + isolation_level=psycopg2.extensions.ISOLATION_LEVEL_READ_COMMITTED) + cur.execute("SHOW default_transaction_isolation;") + self.assertEqual(cur.fetchone()[0], 'read committed') + self.conn.rollback() + + self.conn.set_session( + isolation_level=psycopg2.extensions.ISOLATION_LEVEL_READ_UNCOMMITTED) + cur.execute("SHOW default_transaction_isolation;") + if self.conn.server_version > 80000: + self.assertEqual(cur.fetchone()[0], 'read uncommitted') + else: + self.assertEqual(cur.fetchone()[0], 'read committed') + self.conn.rollback() + + def test_set_isolation_level_str(self): + cur = self.conn.cursor() + self.conn.set_session("serializable") + cur.execute("SHOW default_transaction_isolation;") + self.assertEqual(cur.fetchone()[0], 'serializable') + self.conn.rollback() + + self.conn.set_session("repeatable read") + cur.execute("SHOW default_transaction_isolation;") + if self.conn.server_version > 80000: + self.assertEqual(cur.fetchone()[0], 'repeatable read') + else: + self.assertEqual(cur.fetchone()[0], 'serializable') + self.conn.rollback() + + self.conn.set_session("read committed") + cur.execute("SHOW default_transaction_isolation;") + self.assertEqual(cur.fetchone()[0], 'read committed') + self.conn.rollback() + + self.conn.set_session("read uncommitted") + cur.execute("SHOW default_transaction_isolation;") + if self.conn.server_version > 80000: + self.assertEqual(cur.fetchone()[0], 'read uncommitted') + else: + self.assertEqual(cur.fetchone()[0], 'read committed') + self.conn.rollback() + + def test_bad_isolation_level(self): + self.assertRaises(ValueError, self.conn.set_session, 0) + self.assertRaises(ValueError, self.conn.set_session, 5) + self.assertRaises(ValueError, self.conn.set_session, 'whatever') + + def test_set_read_only(self): + cur = self.conn.cursor() + self.conn.set_session(readonly=True) + cur.execute("SHOW default_transaction_read_only;") + self.assertEqual(cur.fetchone()[0], 'on') + self.conn.rollback() + cur.execute("SHOW default_transaction_read_only;") + self.assertEqual(cur.fetchone()[0], 'on') + self.conn.rollback() + + cur = self.conn.cursor() + self.conn.set_session(readonly=None) + cur.execute("SHOW default_transaction_read_only;") + self.assertEqual(cur.fetchone()[0], 'on') + self.conn.rollback() + + self.conn.set_session(readonly=False) + cur.execute("SHOW default_transaction_read_only;") + self.assertEqual(cur.fetchone()[0], 'off') + self.conn.rollback() + + def test_set_default(self): + cur = self.conn.cursor() + cur.execute("SHOW default_transaction_isolation;") + default_isolevel = cur.fetchone()[0] + cur.execute("SHOW default_transaction_read_only;") + default_readonly = cur.fetchone()[0] + self.conn.rollback() + + self.conn.set_session(isolation_level='serializable', readonly=True) + self.conn.set_session(isolation_level='default', readonly='default') + + cur.execute("SHOW default_transaction_isolation;") + self.assertEqual(cur.fetchone()[0], default_isolevel) + cur.execute("SHOW default_transaction_read_only;") + self.assertEqual(cur.fetchone()[0], default_readonly) + + @skip_before_postgres(9, 1) + def test_set_deferrable(self): + cur = self.conn.cursor() + self.conn.set_session(readonly=True, deferrable=True) + cur.execute("SHOW default_transaction_read_only;") + self.assertEqual(cur.fetchone()[0], 'on') + cur.execute("SHOW default_transaction_deferrable;") + self.assertEqual(cur.fetchone()[0], 'on') + self.conn.rollback() + cur.execute("SHOW default_transaction_deferrable;") + self.assertEqual(cur.fetchone()[0], 'on') + self.conn.rollback() + + self.conn.set_session(deferrable=False) + cur.execute("SHOW default_transaction_read_only;") + self.assertEqual(cur.fetchone()[0], 'on') + cur.execute("SHOW default_transaction_deferrable;") + self.assertEqual(cur.fetchone()[0], 'off') + self.conn.rollback() + + @skip_after_postgres(9, 1) + def test_set_deferrable_error(self): + self.assertRaises(psycopg2.ProgrammingError, + self.conn.set_session, readonly=True, deferrable=True) + + +class AutocommitTests(ConnectingTestCase): + def test_closed(self): + self.conn.close() + self.assertRaises(psycopg2.InterfaceError, + setattr, self.conn, 'autocommit', True) + + # The getter doesn't have a guard. We may change this in future + # to make it consistent with other methods; meanwhile let's just check + # it doesn't explode. + try: + self.assert_(self.conn.autocommit in (True, False)) + except psycopg2.InterfaceError: + pass + + def test_default_no_autocommit(self): + self.assert_(not self.conn.autocommit) + self.assertEqual(self.conn.status, psycopg2.extensions.STATUS_READY) + self.assertEqual(self.conn.get_transaction_status(), + psycopg2.extensions.TRANSACTION_STATUS_IDLE) + + cur = self.conn.cursor() + cur.execute('select 1;') + self.assertEqual(self.conn.status, psycopg2.extensions.STATUS_BEGIN) + self.assertEqual(self.conn.get_transaction_status(), + psycopg2.extensions.TRANSACTION_STATUS_INTRANS) + + self.conn.rollback() + self.assertEqual(self.conn.status, psycopg2.extensions.STATUS_READY) + self.assertEqual(self.conn.get_transaction_status(), + psycopg2.extensions.TRANSACTION_STATUS_IDLE) + + def test_set_autocommit(self): + self.conn.autocommit = True + self.assert_(self.conn.autocommit) + self.assertEqual(self.conn.status, psycopg2.extensions.STATUS_READY) + self.assertEqual(self.conn.get_transaction_status(), + psycopg2.extensions.TRANSACTION_STATUS_IDLE) + + cur = self.conn.cursor() + cur.execute('select 1;') + self.assertEqual(self.conn.status, psycopg2.extensions.STATUS_READY) + self.assertEqual(self.conn.get_transaction_status(), + psycopg2.extensions.TRANSACTION_STATUS_IDLE) + + self.conn.autocommit = False + self.assert_(not self.conn.autocommit) + self.assertEqual(self.conn.status, psycopg2.extensions.STATUS_READY) + self.assertEqual(self.conn.get_transaction_status(), + psycopg2.extensions.TRANSACTION_STATUS_IDLE) + + cur.execute('select 1;') + self.assertEqual(self.conn.status, psycopg2.extensions.STATUS_BEGIN) + self.assertEqual(self.conn.get_transaction_status(), + psycopg2.extensions.TRANSACTION_STATUS_INTRANS) + + def test_set_intrans_error(self): + cur = self.conn.cursor() + cur.execute('select 1;') + self.assertRaises(psycopg2.ProgrammingError, + setattr, self.conn, 'autocommit', True) + + def test_set_session_autocommit(self): + self.conn.set_session(autocommit=True) + self.assert_(self.conn.autocommit) + self.assertEqual(self.conn.status, psycopg2.extensions.STATUS_READY) + self.assertEqual(self.conn.get_transaction_status(), + psycopg2.extensions.TRANSACTION_STATUS_IDLE) + + cur = self.conn.cursor() + cur.execute('select 1;') + self.assertEqual(self.conn.status, psycopg2.extensions.STATUS_READY) + self.assertEqual(self.conn.get_transaction_status(), + psycopg2.extensions.TRANSACTION_STATUS_IDLE) + + self.conn.set_session(autocommit=False) + self.assert_(not self.conn.autocommit) + self.assertEqual(self.conn.status, psycopg2.extensions.STATUS_READY) + self.assertEqual(self.conn.get_transaction_status(), + psycopg2.extensions.TRANSACTION_STATUS_IDLE) + + cur.execute('select 1;') + self.assertEqual(self.conn.status, psycopg2.extensions.STATUS_BEGIN) + self.assertEqual(self.conn.get_transaction_status(), + psycopg2.extensions.TRANSACTION_STATUS_INTRANS) + self.conn.rollback() + + self.conn.set_session('serializable', readonly=True, autocommit=True) + self.assert_(self.conn.autocommit) + cur.execute('select 1;') + self.assertEqual(self.conn.status, psycopg2.extensions.STATUS_READY) + self.assertEqual(self.conn.get_transaction_status(), + psycopg2.extensions.TRANSACTION_STATUS_IDLE) + cur.execute("SHOW default_transaction_isolation;") + self.assertEqual(cur.fetchone()[0], 'serializable') + cur.execute("SHOW default_transaction_read_only;") + self.assertEqual(cur.fetchone()[0], 'on') + + +def test_suite(): + return unittest.TestLoader().loadTestsFromName(__name__) + +if __name__ == "__main__": + unittest.main() diff --git a/psycopg2/tests/test_copy.py b/psycopg2/tests/test_copy.py new file mode 100644 index 0000000..7764be6 --- /dev/null +++ b/psycopg2/tests/test_copy.py @@ -0,0 +1,283 @@ +#!/usr/bin/env python + +# test_copy.py - unit test for COPY support +# +# Copyright (C) 2010-2011 Daniele Varrazzo +# +# psycopg2 is free software: you can redistribute it and/or modify it +# under the terms of the GNU Lesser General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# In addition, as a special exception, the copyright holders give +# permission to link this program with the OpenSSL library (or with +# modified versions of OpenSSL that use the same license as OpenSSL), +# and distribute linked combinations including the two. +# +# You must obey the GNU Lesser General Public License in all respects for +# all of the code used other than OpenSSL. +# +# psycopg2 is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + +import sys +import string +from testutils import unittest, ConnectingTestCase, decorate_all_tests +from testutils import skip_if_no_iobase +from cStringIO import StringIO +from itertools import cycle, izip + +import psycopg2 +import psycopg2.extensions +from testutils import skip_copy_if_green + +if sys.version_info[0] < 3: + _base = object +else: + from io import TextIOBase as _base + +class MinimalRead(_base): + """A file wrapper exposing the minimal interface to copy from.""" + def __init__(self, f): + self.f = f + + def read(self, size): + return self.f.read(size) + + def readline(self): + return self.f.readline() + +class MinimalWrite(_base): + """A file wrapper exposing the minimal interface to copy to.""" + def __init__(self, f): + self.f = f + + def write(self, data): + return self.f.write(data) + + +class CopyTests(ConnectingTestCase): + + def setUp(self): + ConnectingTestCase.setUp(self) + self._create_temp_table() + + def _create_temp_table(self): + curs = self.conn.cursor() + curs.execute(''' + CREATE TEMPORARY TABLE tcopy ( + id serial PRIMARY KEY, + data text + )''') + + def test_copy_from(self): + curs = self.conn.cursor() + try: + self._copy_from(curs, nrecs=1024, srec=10*1024, copykw={}) + finally: + curs.close() + + def test_copy_from_insane_size(self): + # Trying to trigger a "would block" error + curs = self.conn.cursor() + try: + self._copy_from(curs, nrecs=10*1024, srec=10*1024, + copykw={'size': 20*1024*1024}) + finally: + curs.close() + + def test_copy_from_cols(self): + curs = self.conn.cursor() + f = StringIO() + for i in xrange(10): + f.write("%s\n" % (i,)) + + f.seek(0) + curs.copy_from(MinimalRead(f), "tcopy", columns=['id']) + + curs.execute("select * from tcopy order by id") + self.assertEqual([(i, None) for i in range(10)], curs.fetchall()) + + def test_copy_from_cols_err(self): + curs = self.conn.cursor() + f = StringIO() + for i in xrange(10): + f.write("%s\n" % (i,)) + + f.seek(0) + def cols(): + raise ZeroDivisionError() + yield 'id' + + self.assertRaises(ZeroDivisionError, + curs.copy_from, MinimalRead(f), "tcopy", columns=cols()) + + def test_copy_to(self): + curs = self.conn.cursor() + try: + self._copy_from(curs, nrecs=1024, srec=10*1024, copykw={}) + self._copy_to(curs, srec=10*1024) + finally: + curs.close() + + @skip_if_no_iobase + def test_copy_text(self): + self.conn.set_client_encoding('latin1') + self._create_temp_table() # the above call closed the xn + + if sys.version_info[0] < 3: + abin = ''.join(map(chr, range(32, 127) + range(160, 256))) + about = abin.decode('latin1').replace('\\', '\\\\') + + else: + abin = bytes(range(32, 127) + range(160, 256)).decode('latin1') + about = abin.replace('\\', '\\\\') + + curs = self.conn.cursor() + curs.execute('insert into tcopy values (%s, %s)', + (42, abin)) + + import io + f = io.StringIO() + curs.copy_to(f, 'tcopy', columns=('data',)) + f.seek(0) + self.assertEqual(f.readline().rstrip(), about) + + @skip_if_no_iobase + def test_copy_bytes(self): + self.conn.set_client_encoding('latin1') + self._create_temp_table() # the above call closed the xn + + if sys.version_info[0] < 3: + abin = ''.join(map(chr, range(32, 127) + range(160, 255))) + about = abin.replace('\\', '\\\\') + else: + abin = bytes(range(32, 127) + range(160, 255)).decode('latin1') + about = abin.replace('\\', '\\\\').encode('latin1') + + curs = self.conn.cursor() + curs.execute('insert into tcopy values (%s, %s)', + (42, abin)) + + import io + f = io.BytesIO() + curs.copy_to(f, 'tcopy', columns=('data',)) + f.seek(0) + self.assertEqual(f.readline().rstrip(), about) + + @skip_if_no_iobase + def test_copy_expert_textiobase(self): + self.conn.set_client_encoding('latin1') + self._create_temp_table() # the above call closed the xn + + if sys.version_info[0] < 3: + abin = ''.join(map(chr, range(32, 127) + range(160, 256))) + abin = abin.decode('latin1') + about = abin.replace('\\', '\\\\') + + else: + abin = bytes(range(32, 127) + range(160, 256)).decode('latin1') + about = abin.replace('\\', '\\\\') + + import io + f = io.StringIO() + f.write(about) + f.seek(0) + + curs = self.conn.cursor() + psycopg2.extensions.register_type( + psycopg2.extensions.UNICODE, curs) + + curs.copy_expert('COPY tcopy (data) FROM STDIN', f) + curs.execute("select data from tcopy;") + self.assertEqual(curs.fetchone()[0], abin) + + f = io.StringIO() + curs.copy_expert('COPY tcopy (data) TO STDOUT', f) + f.seek(0) + self.assertEqual(f.readline().rstrip(), about) + + # same tests with setting size + f = io.StringIO() + f.write(about) + f.seek(0) + exp_size = 123 + # hack here to leave file as is, only check size when reading + real_read = f.read + def read(_size, f=f, exp_size=exp_size): + self.assertEqual(_size, exp_size) + return real_read(_size) + f.read = read + curs.copy_expert('COPY tcopy (data) FROM STDIN', f, size=exp_size) + curs.execute("select data from tcopy;") + self.assertEqual(curs.fetchone()[0], abin) + + def _copy_from(self, curs, nrecs, srec, copykw): + f = StringIO() + for i, c in izip(xrange(nrecs), cycle(string.ascii_letters)): + l = c * srec + f.write("%s\t%s\n" % (i,l)) + + f.seek(0) + curs.copy_from(MinimalRead(f), "tcopy", **copykw) + + curs.execute("select count(*) from tcopy") + self.assertEqual(nrecs, curs.fetchone()[0]) + + curs.execute("select data from tcopy where id < %s order by id", + (len(string.ascii_letters),)) + for i, (l,) in enumerate(curs): + self.assertEqual(l, string.ascii_letters[i] * srec) + + def _copy_to(self, curs, srec): + f = StringIO() + curs.copy_to(MinimalWrite(f), "tcopy") + + f.seek(0) + ntests = 0 + for line in f: + n, s = line.split() + if int(n) < len(string.ascii_letters): + self.assertEqual(s, string.ascii_letters[int(n)] * srec) + ntests += 1 + + self.assertEqual(ntests, len(string.ascii_letters)) + + def test_copy_expert_file_refcount(self): + class Whatever(object): + pass + + f = Whatever() + curs = self.conn.cursor() + self.assertRaises(TypeError, + curs.copy_expert, 'COPY tcopy (data) FROM STDIN', f) + + def test_copy_no_column_limit(self): + cols = [ "c%050d" % i for i in range(200) ] + + curs = self.conn.cursor() + curs.execute('CREATE TEMPORARY TABLE manycols (%s)' % ',\n'.join( + [ "%s int" % c for c in cols])) + curs.execute("INSERT INTO manycols DEFAULT VALUES") + + f = StringIO() + curs.copy_to(f, "manycols", columns = cols) + f.seek(0) + self.assertEqual(f.read().split(), ['\\N'] * len(cols)) + + f.seek(0) + curs.copy_from(f, "manycols", columns = cols) + curs.execute("select count(*) from manycols;") + self.assertEqual(curs.fetchone()[0], 2) + + +decorate_all_tests(CopyTests, skip_copy_if_green) + + +def test_suite(): + return unittest.TestLoader().loadTestsFromName(__name__) + +if __name__ == "__main__": + unittest.main() diff --git a/psycopg2/tests/test_cursor.py b/psycopg2/tests/test_cursor.py new file mode 100644 index 0000000..c35d26c --- /dev/null +++ b/psycopg2/tests/test_cursor.py @@ -0,0 +1,421 @@ +#!/usr/bin/env python + +# test_cursor.py - unit test for cursor attributes +# +# Copyright (C) 2010-2011 Daniele Varrazzo +# +# psycopg2 is free software: you can redistribute it and/or modify it +# under the terms of the GNU Lesser General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# In addition, as a special exception, the copyright holders give +# permission to link this program with the OpenSSL library (or with +# modified versions of OpenSSL that use the same license as OpenSSL), +# and distribute linked combinations including the two. +# +# You must obey the GNU Lesser General Public License in all respects for +# all of the code used other than OpenSSL. +# +# psycopg2 is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + +import time +import psycopg2 +import psycopg2.extensions +from psycopg2.extensions import b +from testutils import unittest, ConnectingTestCase, skip_before_postgres +from testutils import skip_if_no_namedtuple, skip_if_no_getrefcount + +class CursorTests(ConnectingTestCase): + + def test_close_idempotent(self): + cur = self.conn.cursor() + cur.close() + cur.close() + self.assert_(cur.closed) + + def test_empty_query(self): + cur = self.conn.cursor() + self.assertRaises(psycopg2.ProgrammingError, cur.execute, "") + self.assertRaises(psycopg2.ProgrammingError, cur.execute, " ") + self.assertRaises(psycopg2.ProgrammingError, cur.execute, ";") + + def test_executemany_propagate_exceptions(self): + conn = self.conn + cur = conn.cursor() + cur.execute("create temp table test_exc (data int);") + def buggygen(): + yield 1//0 + self.assertRaises(ZeroDivisionError, + cur.executemany, "insert into test_exc values (%s)", buggygen()) + cur.close() + + def test_mogrify_unicode(self): + conn = self.conn + cur = conn.cursor() + + # test consistency between execute and mogrify. + + # unicode query containing only ascii data + cur.execute(u"SELECT 'foo';") + self.assertEqual('foo', cur.fetchone()[0]) + self.assertEqual(b("SELECT 'foo';"), cur.mogrify(u"SELECT 'foo';")) + + conn.set_client_encoding('UTF8') + snowman = u"\u2603" + + # unicode query with non-ascii data + cur.execute(u"SELECT '%s';" % snowman) + self.assertEqual(snowman.encode('utf8'), b(cur.fetchone()[0])) + self.assertEqual(("SELECT '%s';" % snowman).encode('utf8'), + cur.mogrify(u"SELECT '%s';" % snowman).replace(b("E'"), b("'"))) + + # unicode args + cur.execute("SELECT %s;", (snowman,)) + self.assertEqual(snowman.encode("utf-8"), b(cur.fetchone()[0])) + self.assertEqual(("SELECT '%s';" % snowman).encode('utf8'), + cur.mogrify("SELECT %s;", (snowman,)).replace(b("E'"), b("'"))) + + # unicode query and args + cur.execute(u"SELECT %s;", (snowman,)) + self.assertEqual(snowman.encode("utf-8"), b(cur.fetchone()[0])) + self.assertEqual(("SELECT '%s';" % snowman).encode('utf8'), + cur.mogrify(u"SELECT %s;", (snowman,)).replace(b("E'"), b("'"))) + + def test_mogrify_decimal_explodes(self): + # issue #7: explodes on windows with python 2.5 and psycopg 2.2.2 + try: + from decimal import Decimal + except: + return + + conn = self.conn + cur = conn.cursor() + self.assertEqual(b('SELECT 10.3;'), + cur.mogrify("SELECT %s;", (Decimal("10.3"),))) + + @skip_if_no_getrefcount + def test_mogrify_leak_on_multiple_reference(self): + # issue #81: reference leak when a parameter value is referenced + # more than once from a dict. + cur = self.conn.cursor() + i = lambda x: x + foo = i('foo') * 10 + import sys + nref1 = sys.getrefcount(foo) + cur.mogrify("select %(foo)s, %(foo)s, %(foo)s", {'foo': foo}) + nref2 = sys.getrefcount(foo) + self.assertEqual(nref1, nref2) + + def test_bad_placeholder(self): + cur = self.conn.cursor() + self.assertRaises(psycopg2.ProgrammingError, + cur.mogrify, "select %(foo", {}) + self.assertRaises(psycopg2.ProgrammingError, + cur.mogrify, "select %(foo", {'foo': 1}) + self.assertRaises(psycopg2.ProgrammingError, + cur.mogrify, "select %(foo, %(bar)", {'foo': 1}) + self.assertRaises(psycopg2.ProgrammingError, + cur.mogrify, "select %(foo, %(bar)", {'foo': 1, 'bar': 2}) + + def test_cast(self): + curs = self.conn.cursor() + + self.assertEqual(42, curs.cast(20, '42')) + self.assertAlmostEqual(3.14, curs.cast(700, '3.14')) + + try: + from decimal import Decimal + except ImportError: + self.assertAlmostEqual(123.45, curs.cast(1700, '123.45')) + else: + self.assertEqual(Decimal('123.45'), curs.cast(1700, '123.45')) + + from datetime import date + self.assertEqual(date(2011,1,2), curs.cast(1082, '2011-01-02')) + self.assertEqual("who am i?", curs.cast(705, 'who am i?')) # unknown + + def test_cast_specificity(self): + curs = self.conn.cursor() + self.assertEqual("foo", curs.cast(705, 'foo')) + + D = psycopg2.extensions.new_type((705,), "DOUBLING", lambda v, c: v * 2) + psycopg2.extensions.register_type(D, self.conn) + self.assertEqual("foofoo", curs.cast(705, 'foo')) + + T = psycopg2.extensions.new_type((705,), "TREBLING", lambda v, c: v * 3) + psycopg2.extensions.register_type(T, curs) + self.assertEqual("foofoofoo", curs.cast(705, 'foo')) + + curs2 = self.conn.cursor() + self.assertEqual("foofoo", curs2.cast(705, 'foo')) + + def test_weakref(self): + from weakref import ref + curs = self.conn.cursor() + w = ref(curs) + del curs + import gc; gc.collect() + self.assert_(w() is None) + + def test_null_name(self): + curs = self.conn.cursor(None) + self.assertEqual(curs.name, None) + + def test_invalid_name(self): + curs = self.conn.cursor() + curs.execute("create temp table invname (data int);") + for i in (10,20,30): + curs.execute("insert into invname values (%s)", (i,)) + curs.close() + + curs = self.conn.cursor(r'1-2-3 \ "test"') + curs.execute("select data from invname order by data") + self.assertEqual(curs.fetchall(), [(10,), (20,), (30,)]) + + def test_withhold(self): + self.assertRaises(psycopg2.ProgrammingError, self.conn.cursor, + withhold=True) + + curs = self.conn.cursor() + try: + curs.execute("drop table withhold") + except psycopg2.ProgrammingError: + self.conn.rollback() + curs.execute("create table withhold (data int)") + for i in (10, 20, 30): + curs.execute("insert into withhold values (%s)", (i,)) + curs.close() + + curs = self.conn.cursor("W") + self.assertEqual(curs.withhold, False); + curs.withhold = True + self.assertEqual(curs.withhold, True); + curs.execute("select data from withhold order by data") + self.conn.commit() + self.assertEqual(curs.fetchall(), [(10,), (20,), (30,)]) + curs.close() + + curs = self.conn.cursor("W", withhold=True) + self.assertEqual(curs.withhold, True); + curs.execute("select data from withhold order by data") + self.conn.commit() + self.assertEqual(curs.fetchall(), [(10,), (20,), (30,)]) + + curs = self.conn.cursor() + curs.execute("drop table withhold") + self.conn.commit() + + def test_scrollable(self): + self.assertRaises(psycopg2.ProgrammingError, self.conn.cursor, + scrollable=True) + + curs = self.conn.cursor() + curs.execute("create table scrollable (data int)") + curs.executemany("insert into scrollable values (%s)", + [ (i,) for i in range(100) ]) + curs.close() + + for t in range(2): + if not t: + curs = self.conn.cursor("S") + self.assertEqual(curs.scrollable, None); + curs.scrollable = True + else: + curs = self.conn.cursor("S", scrollable=True) + + self.assertEqual(curs.scrollable, True); + curs.itersize = 10 + + # complex enough to make postgres cursors declare without + # scroll/no scroll to fail + curs.execute(""" + select x.data + from scrollable x + join scrollable y on x.data = y.data + order by y.data""") + for i, (n,) in enumerate(curs): + self.assertEqual(i, n) + + curs.scroll(-1) + for i in range(99, -1, -1): + curs.scroll(-1) + self.assertEqual(i, curs.fetchone()[0]) + curs.scroll(-1) + + curs.close() + + def test_not_scrollable(self): + self.assertRaises(psycopg2.ProgrammingError, self.conn.cursor, + scrollable=False) + + curs = self.conn.cursor() + curs.execute("create table scrollable (data int)") + curs.executemany("insert into scrollable values (%s)", + [ (i,) for i in range(100) ]) + curs.close() + + curs = self.conn.cursor("S") # default scrollability + curs.execute("select * from scrollable") + self.assertEqual(curs.scrollable, None) + curs.scroll(2) + try: + curs.scroll(-1) + except psycopg2.OperationalError: + return self.skipTest("can't evaluate non-scrollable cursor") + curs.close() + + curs = self.conn.cursor("S", scrollable=False) + self.assertEqual(curs.scrollable, False) + curs.execute("select * from scrollable") + curs.scroll(2) + self.assertRaises(psycopg2.OperationalError, curs.scroll, -1) + + @skip_before_postgres(8, 2) + def test_iter_named_cursor_efficient(self): + curs = self.conn.cursor('tmp') + # if these records are fetched in the same roundtrip their + # timestamp will not be influenced by the pause in Python world. + curs.execute("""select clock_timestamp() from generate_series(1,2)""") + i = iter(curs) + t1 = (i.next())[0] # the brackets work around a 2to3 bug + time.sleep(0.2) + t2 = (i.next())[0] + self.assert_((t2 - t1).microseconds * 1e-6 < 0.1, + "named cursor records fetched in 2 roundtrips (delta: %s)" + % (t2 - t1)) + + @skip_before_postgres(8, 0) + def test_iter_named_cursor_default_itersize(self): + curs = self.conn.cursor('tmp') + curs.execute('select generate_series(1,50)') + rv = [ (r[0], curs.rownumber) for r in curs ] + # everything swallowed in one gulp + self.assertEqual(rv, [(i,i) for i in range(1,51)]) + + @skip_before_postgres(8, 0) + def test_iter_named_cursor_itersize(self): + curs = self.conn.cursor('tmp') + curs.itersize = 30 + curs.execute('select generate_series(1,50)') + rv = [ (r[0], curs.rownumber) for r in curs ] + # everything swallowed in two gulps + self.assertEqual(rv, [(i,((i - 1) % 30) + 1) for i in range(1,51)]) + + @skip_before_postgres(8, 0) + def test_iter_named_cursor_rownumber(self): + curs = self.conn.cursor('tmp') + # note: this fails if itersize < dataset: internally we check + # rownumber == rowcount to detect when to read anoter page, so we + # would need an extra attribute to have a monotonic rownumber. + curs.itersize = 20 + curs.execute('select generate_series(1,10)') + for i, rec in enumerate(curs): + self.assertEqual(i + 1, curs.rownumber) + + @skip_if_no_namedtuple + def test_namedtuple_description(self): + curs = self.conn.cursor() + curs.execute("""select + 3.14::decimal(10,2) as pi, + 'hello'::text as hi, + '2010-02-18'::date as now; + """) + self.assertEqual(len(curs.description), 3) + for c in curs.description: + self.assertEqual(len(c), 7) # DBAPI happy + for a in ('name', 'type_code', 'display_size', 'internal_size', + 'precision', 'scale', 'null_ok'): + self.assert_(hasattr(c, a), a) + + c = curs.description[0] + self.assertEqual(c.name, 'pi') + self.assert_(c.type_code in psycopg2.extensions.DECIMAL.values) + self.assert_(c.internal_size > 0) + self.assertEqual(c.precision, 10) + self.assertEqual(c.scale, 2) + + c = curs.description[1] + self.assertEqual(c.name, 'hi') + self.assert_(c.type_code in psycopg2.STRING.values) + self.assert_(c.internal_size < 0) + self.assertEqual(c.precision, None) + self.assertEqual(c.scale, None) + + c = curs.description[2] + self.assertEqual(c.name, 'now') + self.assert_(c.type_code in psycopg2.extensions.DATE.values) + self.assert_(c.internal_size > 0) + self.assertEqual(c.precision, None) + self.assertEqual(c.scale, None) + + @skip_before_postgres(8, 0) + def test_named_cursor_stealing(self): + # you can use a named cursor to iterate on a refcursor created + # somewhere else + cur1 = self.conn.cursor() + cur1.execute("DECLARE test CURSOR WITHOUT HOLD " + " FOR SELECT generate_series(1,7)") + + cur2 = self.conn.cursor('test') + # can call fetch without execute + self.assertEqual((1,), cur2.fetchone()) + self.assertEqual([(2,), (3,), (4,)], cur2.fetchmany(3)) + self.assertEqual([(5,), (6,), (7,)], cur2.fetchall()) + + @skip_before_postgres(8, 0) + def test_scroll(self): + cur = self.conn.cursor() + cur.execute("select generate_series(0,9)") + cur.scroll(2) + self.assertEqual(cur.fetchone(), (2,)) + cur.scroll(2) + self.assertEqual(cur.fetchone(), (5,)) + cur.scroll(2, mode='relative') + self.assertEqual(cur.fetchone(), (8,)) + cur.scroll(-1) + self.assertEqual(cur.fetchone(), (8,)) + cur.scroll(-2) + self.assertEqual(cur.fetchone(), (7,)) + cur.scroll(2, mode='absolute') + self.assertEqual(cur.fetchone(), (2,)) + + # on the boundary + cur.scroll(0, mode='absolute') + self.assertEqual(cur.fetchone(), (0,)) + self.assertRaises((IndexError, psycopg2.ProgrammingError), + cur.scroll, -1, mode='absolute') + cur.scroll(0, mode='absolute') + self.assertRaises((IndexError, psycopg2.ProgrammingError), + cur.scroll, -1) + + cur.scroll(9, mode='absolute') + self.assertEqual(cur.fetchone(), (9,)) + self.assertRaises((IndexError, psycopg2.ProgrammingError), + cur.scroll, 10, mode='absolute') + cur.scroll(9, mode='absolute') + self.assertRaises((IndexError, psycopg2.ProgrammingError), + cur.scroll, 1) + + @skip_before_postgres(8, 0) + def test_scroll_named(self): + cur = self.conn.cursor('tmp', scrollable=True) + cur.execute("select generate_series(0,9)") + cur.scroll(2) + self.assertEqual(cur.fetchone(), (2,)) + cur.scroll(2) + self.assertEqual(cur.fetchone(), (5,)) + cur.scroll(2, mode='relative') + self.assertEqual(cur.fetchone(), (8,)) + cur.scroll(9, mode='absolute') + self.assertEqual(cur.fetchone(), (9,)) + + +def test_suite(): + return unittest.TestLoader().loadTestsFromName(__name__) + +if __name__ == "__main__": + unittest.main() diff --git a/psycopg2/tests/test_dates.py b/psycopg2/tests/test_dates.py new file mode 100644 index 0000000..24c4a9a --- /dev/null +++ b/psycopg2/tests/test_dates.py @@ -0,0 +1,569 @@ +#!/usr/bin/env python + +# test_dates.py - unit test for dates handling +# +# Copyright (C) 2008-2011 James Henstridge +# +# psycopg2 is free software: you can redistribute it and/or modify it +# under the terms of the GNU Lesser General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# In addition, as a special exception, the copyright holders give +# permission to link this program with the OpenSSL library (or with +# modified versions of OpenSSL that use the same license as OpenSSL), +# and distribute linked combinations including the two. +# +# You must obey the GNU Lesser General Public License in all respects for +# all of the code used other than OpenSSL. +# +# psycopg2 is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + +import math +import psycopg2 +from psycopg2.tz import FixedOffsetTimezone, ZERO +from testutils import unittest, ConnectingTestCase + +class CommonDatetimeTestsMixin: + + def execute(self, *args): + self.curs.execute(*args) + return self.curs.fetchone()[0] + + def test_parse_date(self): + value = self.DATE('2007-01-01', self.curs) + self.assert_(value is not None) + self.assertEqual(value.year, 2007) + self.assertEqual(value.month, 1) + self.assertEqual(value.day, 1) + + def test_parse_null_date(self): + value = self.DATE(None, self.curs) + self.assertEqual(value, None) + + def test_parse_incomplete_date(self): + self.assertRaises(psycopg2.DataError, self.DATE, '2007', self.curs) + self.assertRaises(psycopg2.DataError, self.DATE, '2007-01', self.curs) + + def test_parse_time(self): + value = self.TIME('13:30:29', self.curs) + self.assert_(value is not None) + self.assertEqual(value.hour, 13) + self.assertEqual(value.minute, 30) + self.assertEqual(value.second, 29) + + def test_parse_null_time(self): + value = self.TIME(None, self.curs) + self.assertEqual(value, None) + + def test_parse_incomplete_time(self): + self.assertRaises(psycopg2.DataError, self.TIME, '13', self.curs) + self.assertRaises(psycopg2.DataError, self.TIME, '13:30', self.curs) + + def test_parse_datetime(self): + value = self.DATETIME('2007-01-01 13:30:29', self.curs) + self.assert_(value is not None) + self.assertEqual(value.year, 2007) + self.assertEqual(value.month, 1) + self.assertEqual(value.day, 1) + self.assertEqual(value.hour, 13) + self.assertEqual(value.minute, 30) + self.assertEqual(value.second, 29) + + def test_parse_null_datetime(self): + value = self.DATETIME(None, self.curs) + self.assertEqual(value, None) + + def test_parse_incomplete_datetime(self): + self.assertRaises(psycopg2.DataError, + self.DATETIME, '2007', self.curs) + self.assertRaises(psycopg2.DataError, + self.DATETIME, '2007-01', self.curs) + self.assertRaises(psycopg2.DataError, + self.DATETIME, '2007-01-01 13', self.curs) + self.assertRaises(psycopg2.DataError, + self.DATETIME, '2007-01-01 13:30', self.curs) + + def test_parse_null_interval(self): + value = self.INTERVAL(None, self.curs) + self.assertEqual(value, None) + + +class DatetimeTests(ConnectingTestCase, CommonDatetimeTestsMixin): + """Tests for the datetime based date handling in psycopg2.""" + + def setUp(self): + ConnectingTestCase.setUp(self) + self.curs = self.conn.cursor() + self.DATE = psycopg2.extensions.PYDATE + self.TIME = psycopg2.extensions.PYTIME + self.DATETIME = psycopg2.extensions.PYDATETIME + self.INTERVAL = psycopg2.extensions.PYINTERVAL + + def test_parse_bc_date(self): + # datetime does not support BC dates + self.assertRaises(ValueError, self.DATE, '00042-01-01 BC', self.curs) + + def test_parse_bc_datetime(self): + # datetime does not support BC dates + self.assertRaises(ValueError, self.DATETIME, + '00042-01-01 13:30:29 BC', self.curs) + + def test_parse_time_microseconds(self): + value = self.TIME('13:30:29.123456', self.curs) + self.assertEqual(value.second, 29) + self.assertEqual(value.microsecond, 123456) + + def test_parse_datetime_microseconds(self): + value = self.DATETIME('2007-01-01 13:30:29.123456', self.curs) + self.assertEqual(value.second, 29) + self.assertEqual(value.microsecond, 123456) + + def check_time_tz(self, str_offset, offset): + from datetime import time, timedelta + base = time(13, 30, 29) + base_str = '13:30:29' + + value = self.TIME(base_str + str_offset, self.curs) + + # Value has time zone info and correct UTC offset. + self.assertNotEqual(value.tzinfo, None), + self.assertEqual(value.utcoffset(), timedelta(seconds=offset)) + + # Time portion is correct. + self.assertEqual(value.replace(tzinfo=None), base) + + def test_parse_time_timezone(self): + self.check_time_tz("+01", 3600) + self.check_time_tz("-01", -3600) + self.check_time_tz("+01:15", 4500) + self.check_time_tz("-01:15", -4500) + # The Python datetime module does not support time zone + # offsets that are not a whole number of minutes. + # We round the offset to the nearest minute. + self.check_time_tz("+01:15:00", 60 * (60 + 15)) + self.check_time_tz("+01:15:29", 60 * (60 + 15)) + self.check_time_tz("+01:15:30", 60 * (60 + 16)) + self.check_time_tz("+01:15:59", 60 * (60 + 16)) + self.check_time_tz("-01:15:00", -60 * (60 + 15)) + self.check_time_tz("-01:15:29", -60 * (60 + 15)) + self.check_time_tz("-01:15:30", -60 * (60 + 16)) + self.check_time_tz("-01:15:59", -60 * (60 + 16)) + + def check_datetime_tz(self, str_offset, offset): + from datetime import datetime, timedelta + base = datetime(2007, 1, 1, 13, 30, 29) + base_str = '2007-01-01 13:30:29' + + value = self.DATETIME(base_str + str_offset, self.curs) + + # Value has time zone info and correct UTC offset. + self.assertNotEqual(value.tzinfo, None), + self.assertEqual(value.utcoffset(), timedelta(seconds=offset)) + + # Datetime is correct. + self.assertEqual(value.replace(tzinfo=None), base) + + # Conversion to UTC produces the expected offset. + UTC = FixedOffsetTimezone(0, "UTC") + value_utc = value.astimezone(UTC).replace(tzinfo=None) + self.assertEqual(base - value_utc, timedelta(seconds=offset)) + + def test_parse_datetime_timezone(self): + self.check_datetime_tz("+01", 3600) + self.check_datetime_tz("-01", -3600) + self.check_datetime_tz("+01:15", 4500) + self.check_datetime_tz("-01:15", -4500) + # The Python datetime module does not support time zone + # offsets that are not a whole number of minutes. + # We round the offset to the nearest minute. + self.check_datetime_tz("+01:15:00", 60 * (60 + 15)) + self.check_datetime_tz("+01:15:29", 60 * (60 + 15)) + self.check_datetime_tz("+01:15:30", 60 * (60 + 16)) + self.check_datetime_tz("+01:15:59", 60 * (60 + 16)) + self.check_datetime_tz("-01:15:00", -60 * (60 + 15)) + self.check_datetime_tz("-01:15:29", -60 * (60 + 15)) + self.check_datetime_tz("-01:15:30", -60 * (60 + 16)) + self.check_datetime_tz("-01:15:59", -60 * (60 + 16)) + + def test_parse_time_no_timezone(self): + self.assertEqual(self.TIME("13:30:29", self.curs).tzinfo, None) + self.assertEqual(self.TIME("13:30:29.123456", self.curs).tzinfo, None) + + def test_parse_datetime_no_timezone(self): + self.assertEqual( + self.DATETIME("2007-01-01 13:30:29", self.curs).tzinfo, None) + self.assertEqual( + self.DATETIME("2007-01-01 13:30:29.123456", self.curs).tzinfo, None) + + def test_parse_interval(self): + value = self.INTERVAL('42 days 12:34:56.123456', self.curs) + self.assertNotEqual(value, None) + self.assertEqual(value.days, 42) + self.assertEqual(value.seconds, 45296) + self.assertEqual(value.microseconds, 123456) + + def test_parse_negative_interval(self): + value = self.INTERVAL('-42 days -12:34:56.123456', self.curs) + self.assertNotEqual(value, None) + self.assertEqual(value.days, -43) + self.assertEqual(value.seconds, 41103) + self.assertEqual(value.microseconds, 876544) + + def test_parse_infinity(self): + value = self.DATETIME('-infinity', self.curs) + self.assertEqual(str(value), '0001-01-01 00:00:00') + value = self.DATETIME('infinity', self.curs) + self.assertEqual(str(value), '9999-12-31 23:59:59.999999') + value = self.DATE('infinity', self.curs) + self.assertEqual(str(value), '9999-12-31') + + def test_adapt_date(self): + from datetime import date + value = self.execute('select (%s)::date::text', + [date(2007, 1, 1)]) + self.assertEqual(value, '2007-01-01') + + def test_adapt_time(self): + from datetime import time + value = self.execute('select (%s)::time::text', + [time(13, 30, 29)]) + self.assertEqual(value, '13:30:29') + + def test_adapt_datetime(self): + from datetime import datetime + value = self.execute('select (%s)::timestamp::text', + [datetime(2007, 1, 1, 13, 30, 29)]) + self.assertEqual(value, '2007-01-01 13:30:29') + + def test_adapt_timedelta(self): + from datetime import timedelta + value = self.execute('select extract(epoch from (%s)::interval)', + [timedelta(days=42, seconds=45296, + microseconds=123456)]) + seconds = math.floor(value) + self.assertEqual(seconds, 3674096) + self.assertEqual(int(round((value - seconds) * 1000000)), 123456) + + def test_adapt_negative_timedelta(self): + from datetime import timedelta + value = self.execute('select extract(epoch from (%s)::interval)', + [timedelta(days=-42, seconds=45296, + microseconds=123456)]) + seconds = math.floor(value) + self.assertEqual(seconds, -3583504) + self.assertEqual(int(round((value - seconds) * 1000000)), 123456) + + def _test_type_roundtrip(self, o1): + o2 = self.execute("select %s;", (o1,)) + self.assertEqual(type(o1), type(o2)) + return o2 + + def _test_type_roundtrip_array(self, o1): + o1 = [o1] + o2 = self.execute("select %s;", (o1,)) + self.assertEqual(type(o1[0]), type(o2[0])) + + def test_type_roundtrip_date(self): + from datetime import date + self._test_type_roundtrip(date(2010,5,3)) + + def test_type_roundtrip_datetime(self): + from datetime import datetime + dt = self._test_type_roundtrip(datetime(2010,5,3,10,20,30)) + self.assertEqual(None, dt.tzinfo) + + def test_type_roundtrip_datetimetz(self): + from datetime import datetime + import psycopg2.tz + tz = psycopg2.tz.FixedOffsetTimezone(8*60) + dt1 = datetime(2010,5,3,10,20,30, tzinfo=tz) + dt2 = self._test_type_roundtrip(dt1) + self.assertNotEqual(None, dt2.tzinfo) + self.assertEqual(dt1, dt2) + + def test_type_roundtrip_time(self): + from datetime import time + self._test_type_roundtrip(time(10,20,30)) + + def test_type_roundtrip_interval(self): + from datetime import timedelta + self._test_type_roundtrip(timedelta(seconds=30)) + + def test_type_roundtrip_date_array(self): + from datetime import date + self._test_type_roundtrip_array(date(2010,5,3)) + + def test_type_roundtrip_datetime_array(self): + from datetime import datetime + self._test_type_roundtrip_array(datetime(2010,5,3,10,20,30)) + + def test_type_roundtrip_time_array(self): + from datetime import time + self._test_type_roundtrip_array(time(10,20,30)) + + def test_type_roundtrip_interval_array(self): + from datetime import timedelta + self._test_type_roundtrip_array(timedelta(seconds=30)) + + +# Only run the datetime tests if psycopg was compiled with support. +if not hasattr(psycopg2.extensions, 'PYDATETIME'): + del DatetimeTests + + +class mxDateTimeTests(ConnectingTestCase, CommonDatetimeTestsMixin): + """Tests for the mx.DateTime based date handling in psycopg2.""" + + def setUp(self): + ConnectingTestCase.setUp(self) + self.curs = self.conn.cursor() + self.DATE = psycopg2._psycopg.MXDATE + self.TIME = psycopg2._psycopg.MXTIME + self.DATETIME = psycopg2._psycopg.MXDATETIME + self.INTERVAL = psycopg2._psycopg.MXINTERVAL + + psycopg2.extensions.register_type(self.DATE, self.conn) + psycopg2.extensions.register_type(self.TIME, self.conn) + psycopg2.extensions.register_type(self.DATETIME, self.conn) + psycopg2.extensions.register_type(self.INTERVAL, self.conn) + psycopg2.extensions.register_type(psycopg2.extensions.MXDATEARRAY, self.conn) + psycopg2.extensions.register_type(psycopg2.extensions.MXTIMEARRAY, self.conn) + psycopg2.extensions.register_type(psycopg2.extensions.MXDATETIMEARRAY, self.conn) + psycopg2.extensions.register_type(psycopg2.extensions.MXINTERVALARRAY, self.conn) + + def tearDown(self): + self.conn.close() + + def test_parse_bc_date(self): + value = self.DATE('00042-01-01 BC', self.curs) + self.assert_(value is not None) + # mx.DateTime numbers BC dates from 0 rather than 1. + self.assertEqual(value.year, -41) + self.assertEqual(value.month, 1) + self.assertEqual(value.day, 1) + + def test_parse_bc_datetime(self): + value = self.DATETIME('00042-01-01 13:30:29 BC', self.curs) + self.assert_(value is not None) + # mx.DateTime numbers BC dates from 0 rather than 1. + self.assertEqual(value.year, -41) + self.assertEqual(value.month, 1) + self.assertEqual(value.day, 1) + self.assertEqual(value.hour, 13) + self.assertEqual(value.minute, 30) + self.assertEqual(value.second, 29) + + def test_parse_time_microseconds(self): + value = self.TIME('13:30:29.123456', self.curs) + self.assertEqual(math.floor(value.second), 29) + self.assertEqual( + int((value.second - math.floor(value.second)) * 1000000), 123456) + + def test_parse_datetime_microseconds(self): + value = self.DATETIME('2007-01-01 13:30:29.123456', self.curs) + self.assertEqual(math.floor(value.second), 29) + self.assertEqual( + int((value.second - math.floor(value.second)) * 1000000), 123456) + + def test_parse_time_timezone(self): + # Time zone information is ignored. + from mx.DateTime import Time + expected = Time(13, 30, 29) + self.assertEqual(expected, self.TIME("13:30:29+01", self.curs)) + self.assertEqual(expected, self.TIME("13:30:29-01", self.curs)) + self.assertEqual(expected, self.TIME("13:30:29+01:15", self.curs)) + self.assertEqual(expected, self.TIME("13:30:29-01:15", self.curs)) + self.assertEqual(expected, self.TIME("13:30:29+01:15:42", self.curs)) + self.assertEqual(expected, self.TIME("13:30:29-01:15:42", self.curs)) + + def test_parse_datetime_timezone(self): + # Time zone information is ignored. + from mx.DateTime import DateTime + expected = DateTime(2007, 1, 1, 13, 30, 29) + self.assertEqual( + expected, self.DATETIME("2007-01-01 13:30:29+01", self.curs)) + self.assertEqual( + expected, self.DATETIME("2007-01-01 13:30:29-01", self.curs)) + self.assertEqual( + expected, self.DATETIME("2007-01-01 13:30:29+01:15", self.curs)) + self.assertEqual( + expected, self.DATETIME("2007-01-01 13:30:29-01:15", self.curs)) + self.assertEqual( + expected, self.DATETIME("2007-01-01 13:30:29+01:15:42", self.curs)) + self.assertEqual( + expected, self.DATETIME("2007-01-01 13:30:29-01:15:42", self.curs)) + + def test_parse_interval(self): + value = self.INTERVAL('42 days 05:50:05', self.curs) + self.assert_(value is not None) + self.assertEqual(value.day, 42) + self.assertEqual(value.hour, 5) + self.assertEqual(value.minute, 50) + self.assertEqual(value.second, 5) + + def test_adapt_time(self): + from mx.DateTime import Time + value = self.execute('select (%s)::time::text', + [Time(13, 30, 29)]) + self.assertEqual(value, '13:30:29') + + def test_adapt_datetime(self): + from mx.DateTime import DateTime + value = self.execute('select (%s)::timestamp::text', + [DateTime(2007, 1, 1, 13, 30, 29.123456)]) + self.assertEqual(value, '2007-01-01 13:30:29.123456') + + def test_adapt_bc_datetime(self): + from mx.DateTime import DateTime + value = self.execute('select (%s)::timestamp::text', + [DateTime(-41, 1, 1, 13, 30, 29.123456)]) + # microsecs for BC timestamps look not available in PG < 8.4 + # but more likely it's determined at compile time. + self.assert_(value in ( + '0042-01-01 13:30:29.123456 BC', + '0042-01-01 13:30:29 BC'), value) + + def test_adapt_timedelta(self): + from mx.DateTime import DateTimeDeltaFrom + value = self.execute('select extract(epoch from (%s)::interval)', + [DateTimeDeltaFrom(days=42, + seconds=45296.123456)]) + seconds = math.floor(value) + self.assertEqual(seconds, 3674096) + self.assertEqual(int(round((value - seconds) * 1000000)), 123456) + + def test_adapt_negative_timedelta(self): + from mx.DateTime import DateTimeDeltaFrom + value = self.execute('select extract(epoch from (%s)::interval)', + [DateTimeDeltaFrom(days=-42, + seconds=45296.123456)]) + seconds = math.floor(value) + self.assertEqual(seconds, -3583504) + self.assertEqual(int(round((value - seconds) * 1000000)), 123456) + + def _test_type_roundtrip(self, o1): + o2 = self.execute("select %s;", (o1,)) + self.assertEqual(type(o1), type(o2)) + + def _test_type_roundtrip_array(self, o1): + o1 = [o1] + o2 = self.execute("select %s;", (o1,)) + self.assertEqual(type(o1[0]), type(o2[0])) + + def test_type_roundtrip_date(self): + from mx.DateTime import Date + self._test_type_roundtrip(Date(2010,5,3)) + + def test_type_roundtrip_datetime(self): + from mx.DateTime import DateTime + self._test_type_roundtrip(DateTime(2010,5,3,10,20,30)) + + def test_type_roundtrip_time(self): + from mx.DateTime import Time + self._test_type_roundtrip(Time(10,20,30)) + + def test_type_roundtrip_interval(self): + from mx.DateTime import DateTimeDeltaFrom + self._test_type_roundtrip(DateTimeDeltaFrom(seconds=30)) + + def test_type_roundtrip_date_array(self): + from mx.DateTime import Date + self._test_type_roundtrip_array(Date(2010,5,3)) + + def test_type_roundtrip_datetime_array(self): + from mx.DateTime import DateTime + self._test_type_roundtrip_array(DateTime(2010,5,3,10,20,30)) + + def test_type_roundtrip_time_array(self): + from mx.DateTime import Time + self._test_type_roundtrip_array(Time(10,20,30)) + + def test_type_roundtrip_interval_array(self): + from mx.DateTime import DateTimeDeltaFrom + self._test_type_roundtrip_array(DateTimeDeltaFrom(seconds=30)) + + +# Only run the mx.DateTime tests if psycopg was compiled with support. +try: + if not hasattr(psycopg2._psycopg, 'MXDATETIME'): + del mxDateTimeTests +except AttributeError: + del mxDateTimeTests + + +class FromTicksTestCase(unittest.TestCase): + # bug "TimestampFromTicks() throws ValueError (2-2.0.14)" + # reported by Jozsef Szalay on 2010-05-06 + def test_timestamp_value_error_sec_59_99(self): + from datetime import datetime + s = psycopg2.TimestampFromTicks(1273173119.99992) + self.assertEqual(s.adapted, + datetime(2010, 5, 6, 14, 11, 59, 999920, + tzinfo=FixedOffsetTimezone(-5 * 60))) + + def test_date_value_error_sec_59_99(self): + from datetime import date + s = psycopg2.DateFromTicks(1273173119.99992) + self.assertEqual(s.adapted, date(2010, 5, 6)) + + def test_time_value_error_sec_59_99(self): + from datetime import time + s = psycopg2.TimeFromTicks(1273173119.99992) + self.assertEqual(s.adapted.replace(hour=0), + time(0, 11, 59, 999920)) + + +class FixedOffsetTimezoneTests(unittest.TestCase): + + def test_init_with_no_args(self): + tzinfo = FixedOffsetTimezone() + self.assert_(tzinfo._offset is ZERO) + self.assert_(tzinfo._name is None) + + def test_repr_with_positive_offset(self): + tzinfo = FixedOffsetTimezone(5 * 60) + self.assertEqual(repr(tzinfo), "psycopg2.tz.FixedOffsetTimezone(offset=300, name=None)") + + def test_repr_with_negative_offset(self): + tzinfo = FixedOffsetTimezone(-5 * 60) + self.assertEqual(repr(tzinfo), "psycopg2.tz.FixedOffsetTimezone(offset=-300, name=None)") + + def test_repr_with_name(self): + tzinfo = FixedOffsetTimezone(name="FOO") + self.assertEqual(repr(tzinfo), "psycopg2.tz.FixedOffsetTimezone(offset=0, name='FOO')") + + def test_instance_caching(self): + self.assert_(FixedOffsetTimezone(name="FOO") is FixedOffsetTimezone(name="FOO")) + self.assert_(FixedOffsetTimezone(7 * 60) is FixedOffsetTimezone(7 * 60)) + self.assert_(FixedOffsetTimezone(-9 * 60, 'FOO') is FixedOffsetTimezone(-9 * 60, 'FOO')) + self.assert_(FixedOffsetTimezone(9 * 60) is not FixedOffsetTimezone(9 * 60, 'FOO')) + self.assert_(FixedOffsetTimezone(name='FOO') is not FixedOffsetTimezone(9 * 60, 'FOO')) + + def test_pickle(self): + # ticket #135 + import pickle + + tz11 = FixedOffsetTimezone(60) + tz12 = FixedOffsetTimezone(120) + for proto in [-1, 0, 1, 2]: + tz21, tz22 = pickle.loads(pickle.dumps([tz11, tz12], proto)) + self.assertEqual(tz11, tz21) + self.assertEqual(tz12, tz22) + + tz11 = FixedOffsetTimezone(60, name='foo') + tz12 = FixedOffsetTimezone(120, name='bar') + for proto in [-1, 0, 1, 2]: + tz21, tz22 = pickle.loads(pickle.dumps([tz11, tz12], proto)) + self.assertEqual(tz11, tz21) + self.assertEqual(tz12, tz22) + + +def test_suite(): + return unittest.TestLoader().loadTestsFromName(__name__) + +if __name__ == "__main__": + unittest.main() diff --git a/psycopg2/tests/test_extras_dictcursor.py b/psycopg2/tests/test_extras_dictcursor.py new file mode 100644 index 0000000..f2fefff --- /dev/null +++ b/psycopg2/tests/test_extras_dictcursor.py @@ -0,0 +1,483 @@ +#!/usr/bin/env python +# +# extras_dictcursor - test if DictCursor extension class works +# +# Copyright (C) 2004-2010 Federico Di Gregorio +# +# psycopg2 is free software: you can redistribute it and/or modify it +# under the terms of the GNU Lesser General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# psycopg2 is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + +import time +from datetime import timedelta +import psycopg2 +import psycopg2.extras +from testutils import unittest, ConnectingTestCase, skip_before_postgres +from testutils import skip_if_no_namedtuple + + +class ExtrasDictCursorTests(ConnectingTestCase): + """Test if DictCursor extension class works.""" + + def setUp(self): + ConnectingTestCase.setUp(self) + curs = self.conn.cursor() + curs.execute("CREATE TEMPORARY TABLE ExtrasDictCursorTests (foo text)") + curs.execute("INSERT INTO ExtrasDictCursorTests VALUES ('bar')") + self.conn.commit() + + def testDictConnCursorArgs(self): + self.conn.close() + self.conn = self.connect(connection_factory=psycopg2.extras.DictConnection) + cur = self.conn.cursor() + self.assert_(isinstance(cur, psycopg2.extras.DictCursor)) + self.assertEqual(cur.name, None) + # overridable + cur = self.conn.cursor('foo', cursor_factory=psycopg2.extras.NamedTupleCursor) + self.assertEqual(cur.name, 'foo') + self.assert_(isinstance(cur, psycopg2.extras.NamedTupleCursor)) + + def testDictCursorWithPlainCursorFetchOne(self): + self._testWithPlainCursor(lambda curs: curs.fetchone()) + + def testDictCursorWithPlainCursorFetchMany(self): + self._testWithPlainCursor(lambda curs: curs.fetchmany(100)[0]) + + def testDictCursorWithPlainCursorFetchManyNoarg(self): + self._testWithPlainCursor(lambda curs: curs.fetchmany()[0]) + + def testDictCursorWithPlainCursorFetchAll(self): + self._testWithPlainCursor(lambda curs: curs.fetchall()[0]) + + def testDictCursorWithPlainCursorIter(self): + def getter(curs): + for row in curs: + return row + self._testWithPlainCursor(getter) + + def testUpdateRow(self): + row = self._testWithPlainCursor(lambda curs: curs.fetchone()) + row['foo'] = 'qux' + self.failUnless(row['foo'] == 'qux') + self.failUnless(row[0] == 'qux') + + @skip_before_postgres(8, 0) + def testDictCursorWithPlainCursorIterRowNumber(self): + curs = self.conn.cursor(cursor_factory=psycopg2.extras.DictCursor) + self._testIterRowNumber(curs) + + def _testWithPlainCursor(self, getter): + curs = self.conn.cursor(cursor_factory=psycopg2.extras.DictCursor) + curs.execute("SELECT * FROM ExtrasDictCursorTests") + row = getter(curs) + self.failUnless(row['foo'] == 'bar') + self.failUnless(row[0] == 'bar') + return row + + + def testDictCursorWithPlainCursorRealFetchOne(self): + self._testWithPlainCursorReal(lambda curs: curs.fetchone()) + + def testDictCursorWithPlainCursorRealFetchMany(self): + self._testWithPlainCursorReal(lambda curs: curs.fetchmany(100)[0]) + + def testDictCursorWithPlainCursorRealFetchManyNoarg(self): + self._testWithPlainCursorReal(lambda curs: curs.fetchmany()[0]) + + def testDictCursorWithPlainCursorRealFetchAll(self): + self._testWithPlainCursorReal(lambda curs: curs.fetchall()[0]) + + def testDictCursorWithPlainCursorRealIter(self): + def getter(curs): + for row in curs: + return row + self._testWithPlainCursorReal(getter) + + @skip_before_postgres(8, 0) + def testDictCursorWithPlainCursorRealIterRowNumber(self): + curs = self.conn.cursor(cursor_factory=psycopg2.extras.RealDictCursor) + self._testIterRowNumber(curs) + + def _testWithPlainCursorReal(self, getter): + curs = self.conn.cursor(cursor_factory=psycopg2.extras.RealDictCursor) + curs.execute("SELECT * FROM ExtrasDictCursorTests") + row = getter(curs) + self.failUnless(row['foo'] == 'bar') + + + def testDictCursorWithNamedCursorFetchOne(self): + self._testWithNamedCursor(lambda curs: curs.fetchone()) + + def testDictCursorWithNamedCursorFetchMany(self): + self._testWithNamedCursor(lambda curs: curs.fetchmany(100)[0]) + + def testDictCursorWithNamedCursorFetchManyNoarg(self): + self._testWithNamedCursor(lambda curs: curs.fetchmany()[0]) + + def testDictCursorWithNamedCursorFetchAll(self): + self._testWithNamedCursor(lambda curs: curs.fetchall()[0]) + + def testDictCursorWithNamedCursorIter(self): + def getter(curs): + for row in curs: + return row + self._testWithNamedCursor(getter) + + @skip_before_postgres(8, 2) + def testDictCursorWithNamedCursorNotGreedy(self): + curs = self.conn.cursor('tmp', cursor_factory=psycopg2.extras.DictCursor) + self._testNamedCursorNotGreedy(curs) + + @skip_before_postgres(8, 0) + def testDictCursorWithNamedCursorIterRowNumber(self): + curs = self.conn.cursor('tmp', cursor_factory=psycopg2.extras.DictCursor) + self._testIterRowNumber(curs) + + def _testWithNamedCursor(self, getter): + curs = self.conn.cursor('aname', cursor_factory=psycopg2.extras.DictCursor) + curs.execute("SELECT * FROM ExtrasDictCursorTests") + row = getter(curs) + self.failUnless(row['foo'] == 'bar') + self.failUnless(row[0] == 'bar') + + + def testDictCursorRealWithNamedCursorFetchOne(self): + self._testWithNamedCursorReal(lambda curs: curs.fetchone()) + + def testDictCursorRealWithNamedCursorFetchMany(self): + self._testWithNamedCursorReal(lambda curs: curs.fetchmany(100)[0]) + + def testDictCursorRealWithNamedCursorFetchManyNoarg(self): + self._testWithNamedCursorReal(lambda curs: curs.fetchmany()[0]) + + def testDictCursorRealWithNamedCursorFetchAll(self): + self._testWithNamedCursorReal(lambda curs: curs.fetchall()[0]) + + def testDictCursorRealWithNamedCursorIter(self): + def getter(curs): + for row in curs: + return row + self._testWithNamedCursorReal(getter) + + @skip_before_postgres(8, 2) + def testDictCursorRealWithNamedCursorNotGreedy(self): + curs = self.conn.cursor('tmp', cursor_factory=psycopg2.extras.RealDictCursor) + self._testNamedCursorNotGreedy(curs) + + @skip_before_postgres(8, 0) + def testDictCursorRealWithNamedCursorIterRowNumber(self): + curs = self.conn.cursor('tmp', cursor_factory=psycopg2.extras.RealDictCursor) + self._testIterRowNumber(curs) + + def _testWithNamedCursorReal(self, getter): + curs = self.conn.cursor('aname', cursor_factory=psycopg2.extras.RealDictCursor) + curs.execute("SELECT * FROM ExtrasDictCursorTests") + row = getter(curs) + self.failUnless(row['foo'] == 'bar') + + + def _testNamedCursorNotGreedy(self, curs): + curs.itersize = 2 + curs.execute("""select clock_timestamp() as ts from generate_series(1,3)""") + recs = [] + for t in curs: + time.sleep(0.01) + recs.append(t) + + # check that the dataset was not fetched in a single gulp + self.assert_(recs[1]['ts'] - recs[0]['ts'] < timedelta(seconds=0.005)) + self.assert_(recs[2]['ts'] - recs[1]['ts'] > timedelta(seconds=0.0099)) + + def _testIterRowNumber(self, curs): + # Only checking for dataset < itersize: + # see CursorTests.test_iter_named_cursor_rownumber + curs.itersize = 20 + curs.execute("""select * from generate_series(1,10)""") + for i, r in enumerate(curs): + self.assertEqual(i + 1, curs.rownumber) + + def testPickleDictRow(self): + import pickle + curs = self.conn.cursor(cursor_factory=psycopg2.extras.DictCursor) + curs.execute("select 10 as a, 20 as b") + r = curs.fetchone() + d = pickle.dumps(r) + r1 = pickle.loads(d) + self.assertEqual(r, r1) + self.assertEqual(r[0], r1[0]) + self.assertEqual(r[1], r1[1]) + self.assertEqual(r['a'], r1['a']) + self.assertEqual(r['b'], r1['b']) + self.assertEqual(r._index, r1._index) + + def testPickleRealDictRow(self): + import pickle + curs = self.conn.cursor(cursor_factory=psycopg2.extras.RealDictCursor) + curs.execute("select 10 as a, 20 as b") + r = curs.fetchone() + d = pickle.dumps(r) + r1 = pickle.loads(d) + self.assertEqual(r, r1) + self.assertEqual(r['a'], r1['a']) + self.assertEqual(r['b'], r1['b']) + self.assertEqual(r._column_mapping, r1._column_mapping) + + +class NamedTupleCursorTest(ConnectingTestCase): + def setUp(self): + ConnectingTestCase.setUp(self) + from psycopg2.extras import NamedTupleConnection + + try: + from collections import namedtuple + except ImportError: + return + + self.conn = self.connect(connection_factory=NamedTupleConnection) + curs = self.conn.cursor() + curs.execute("CREATE TEMPORARY TABLE nttest (i int, s text)") + curs.execute("INSERT INTO nttest VALUES (1, 'foo')") + curs.execute("INSERT INTO nttest VALUES (2, 'bar')") + curs.execute("INSERT INTO nttest VALUES (3, 'baz')") + self.conn.commit() + + @skip_if_no_namedtuple + def test_cursor_args(self): + cur = self.conn.cursor('foo', cursor_factory=psycopg2.extras.DictCursor) + self.assertEqual(cur.name, 'foo') + self.assert_(isinstance(cur, psycopg2.extras.DictCursor)) + + @skip_if_no_namedtuple + def test_fetchone(self): + curs = self.conn.cursor() + curs.execute("select * from nttest order by 1") + t = curs.fetchone() + self.assertEqual(t[0], 1) + self.assertEqual(t.i, 1) + self.assertEqual(t[1], 'foo') + self.assertEqual(t.s, 'foo') + self.assertEqual(curs.rownumber, 1) + self.assertEqual(curs.rowcount, 3) + + @skip_if_no_namedtuple + def test_fetchmany_noarg(self): + curs = self.conn.cursor() + curs.arraysize = 2 + curs.execute("select * from nttest order by 1") + res = curs.fetchmany() + self.assertEqual(2, len(res)) + self.assertEqual(res[0].i, 1) + self.assertEqual(res[0].s, 'foo') + self.assertEqual(res[1].i, 2) + self.assertEqual(res[1].s, 'bar') + self.assertEqual(curs.rownumber, 2) + self.assertEqual(curs.rowcount, 3) + + @skip_if_no_namedtuple + def test_fetchmany(self): + curs = self.conn.cursor() + curs.execute("select * from nttest order by 1") + res = curs.fetchmany(2) + self.assertEqual(2, len(res)) + self.assertEqual(res[0].i, 1) + self.assertEqual(res[0].s, 'foo') + self.assertEqual(res[1].i, 2) + self.assertEqual(res[1].s, 'bar') + self.assertEqual(curs.rownumber, 2) + self.assertEqual(curs.rowcount, 3) + + @skip_if_no_namedtuple + def test_fetchall(self): + curs = self.conn.cursor() + curs.execute("select * from nttest order by 1") + res = curs.fetchall() + self.assertEqual(3, len(res)) + self.assertEqual(res[0].i, 1) + self.assertEqual(res[0].s, 'foo') + self.assertEqual(res[1].i, 2) + self.assertEqual(res[1].s, 'bar') + self.assertEqual(res[2].i, 3) + self.assertEqual(res[2].s, 'baz') + self.assertEqual(curs.rownumber, 3) + self.assertEqual(curs.rowcount, 3) + + @skip_if_no_namedtuple + def test_executemany(self): + curs = self.conn.cursor() + curs.executemany("delete from nttest where i = %s", + [(1,), (2,)]) + curs.execute("select * from nttest order by 1") + res = curs.fetchall() + self.assertEqual(1, len(res)) + self.assertEqual(res[0].i, 3) + self.assertEqual(res[0].s, 'baz') + + @skip_if_no_namedtuple + def test_iter(self): + curs = self.conn.cursor() + curs.execute("select * from nttest order by 1") + i = iter(curs) + self.assertEqual(curs.rownumber, 0) + + t = i.next() + self.assertEqual(t.i, 1) + self.assertEqual(t.s, 'foo') + self.assertEqual(curs.rownumber, 1) + self.assertEqual(curs.rowcount, 3) + + t = i.next() + self.assertEqual(t.i, 2) + self.assertEqual(t.s, 'bar') + self.assertEqual(curs.rownumber, 2) + self.assertEqual(curs.rowcount, 3) + + t = i.next() + self.assertEqual(t.i, 3) + self.assertEqual(t.s, 'baz') + self.assertRaises(StopIteration, i.next) + self.assertEqual(curs.rownumber, 3) + self.assertEqual(curs.rowcount, 3) + + def test_error_message(self): + try: + from collections import namedtuple + except ImportError: + # an import error somewhere + from psycopg2.extras import NamedTupleConnection + try: + self.conn = self.connect( + connection_factory=NamedTupleConnection) + curs = self.conn.cursor() + curs.execute("select 1") + curs.fetchone() + except ImportError: + pass + else: + self.fail("expecting ImportError") + else: + return self.skipTest("namedtuple available") + + @skip_if_no_namedtuple + def test_record_updated(self): + curs = self.conn.cursor() + curs.execute("select 1 as foo;") + r = curs.fetchone() + self.assertEqual(r.foo, 1) + + curs.execute("select 2 as bar;") + r = curs.fetchone() + self.assertEqual(r.bar, 2) + self.assertRaises(AttributeError, getattr, r, 'foo') + + @skip_if_no_namedtuple + def test_no_result_no_surprise(self): + curs = self.conn.cursor() + curs.execute("update nttest set s = s") + self.assertRaises(psycopg2.ProgrammingError, curs.fetchone) + + curs.execute("update nttest set s = s") + self.assertRaises(psycopg2.ProgrammingError, curs.fetchall) + + @skip_if_no_namedtuple + def test_minimal_generation(self): + # Instrument the class to verify it gets called the minimum number of times. + from psycopg2.extras import NamedTupleCursor + f_orig = NamedTupleCursor._make_nt + calls = [0] + def f_patched(self_): + calls[0] += 1 + return f_orig(self_) + + NamedTupleCursor._make_nt = f_patched + + try: + curs = self.conn.cursor() + curs.execute("select * from nttest order by 1") + curs.fetchone() + curs.fetchone() + curs.fetchone() + self.assertEqual(1, calls[0]) + + curs.execute("select * from nttest order by 1") + curs.fetchone() + curs.fetchall() + self.assertEqual(2, calls[0]) + + curs.execute("select * from nttest order by 1") + curs.fetchone() + curs.fetchmany(1) + self.assertEqual(3, calls[0]) + + finally: + NamedTupleCursor._make_nt = f_orig + + @skip_if_no_namedtuple + @skip_before_postgres(8, 0) + def test_named(self): + curs = self.conn.cursor('tmp') + curs.execute("""select i from generate_series(0,9) i""") + recs = [] + recs.extend(curs.fetchmany(5)) + recs.append(curs.fetchone()) + recs.extend(curs.fetchall()) + self.assertEqual(range(10), [t.i for t in recs]) + + @skip_if_no_namedtuple + def test_named_fetchone(self): + curs = self.conn.cursor('tmp') + curs.execute("""select 42 as i""") + t = curs.fetchone() + self.assertEqual(t.i, 42) + + @skip_if_no_namedtuple + def test_named_fetchmany(self): + curs = self.conn.cursor('tmp') + curs.execute("""select 42 as i""") + recs = curs.fetchmany(10) + self.assertEqual(recs[0].i, 42) + + @skip_if_no_namedtuple + def test_named_fetchall(self): + curs = self.conn.cursor('tmp') + curs.execute("""select 42 as i""") + recs = curs.fetchall() + self.assertEqual(recs[0].i, 42) + + @skip_if_no_namedtuple + @skip_before_postgres(8, 2) + def test_not_greedy(self): + curs = self.conn.cursor('tmp') + curs.itersize = 2 + curs.execute("""select clock_timestamp() as ts from generate_series(1,3)""") + recs = [] + for t in curs: + time.sleep(0.01) + recs.append(t) + + # check that the dataset was not fetched in a single gulp + self.assert_(recs[1].ts - recs[0].ts < timedelta(seconds=0.005)) + self.assert_(recs[2].ts - recs[1].ts > timedelta(seconds=0.0099)) + + @skip_if_no_namedtuple + @skip_before_postgres(8, 0) + def test_named_rownumber(self): + curs = self.conn.cursor('tmp') + # Only checking for dataset < itersize: + # see CursorTests.test_iter_named_cursor_rownumber + curs.itersize = 4 + curs.execute("""select * from generate_series(1,3)""") + for i, t in enumerate(curs): + self.assertEqual(i + 1, curs.rownumber) + + +def test_suite(): + return unittest.TestLoader().loadTestsFromName(__name__) + +if __name__ == "__main__": + unittest.main() diff --git a/psycopg2/tests/test_green.py b/psycopg2/tests/test_green.py new file mode 100644 index 0000000..506b38f --- /dev/null +++ b/psycopg2/tests/test_green.py @@ -0,0 +1,116 @@ +#!/usr/bin/env python + +# test_green.py - unit test for async wait callback +# +# Copyright (C) 2010-2011 Daniele Varrazzo +# +# psycopg2 is free software: you can redistribute it and/or modify it +# under the terms of the GNU Lesser General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# In addition, as a special exception, the copyright holders give +# permission to link this program with the OpenSSL library (or with +# modified versions of OpenSSL that use the same license as OpenSSL), +# and distribute linked combinations including the two. +# +# You must obey the GNU Lesser General Public License in all respects for +# all of the code used other than OpenSSL. +# +# psycopg2 is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + +import unittest +import psycopg2 +import psycopg2.extensions +import psycopg2.extras + +from testutils import ConnectingTestCase + +class ConnectionStub(object): + """A `connection` wrapper allowing analysis of the `poll()` calls.""" + def __init__(self, conn): + self.conn = conn + self.polls = [] + + def fileno(self): + return self.conn.fileno() + + def poll(self): + rv = self.conn.poll() + self.polls.append(rv) + return rv + +class GreenTestCase(ConnectingTestCase): + def setUp(self): + self._cb = psycopg2.extensions.get_wait_callback() + psycopg2.extensions.set_wait_callback(psycopg2.extras.wait_select) + ConnectingTestCase.setUp(self) + + def tearDown(self): + ConnectingTestCase.tearDown(self) + psycopg2.extensions.set_wait_callback(self._cb) + + def set_stub_wait_callback(self, conn): + stub = ConnectionStub(conn) + psycopg2.extensions.set_wait_callback( + lambda conn: psycopg2.extras.wait_select(stub)) + return stub + + def test_flush_on_write(self): + # a very large query requires a flush loop to be sent to the backend + conn = self.conn + stub = self.set_stub_wait_callback(conn) + curs = conn.cursor() + for mb in 1, 5, 10, 20, 50: + size = mb * 1024 * 1024 + del stub.polls[:] + curs.execute("select %s;", ('x' * size,)) + self.assertEqual(size, len(curs.fetchone()[0])) + if stub.polls.count(psycopg2.extensions.POLL_WRITE) > 1: + return + + # This is more a testing glitch than an error: it happens + # on high load on linux: probably because the kernel has more + # buffers ready. A warning may be useful during development, + # but an error is bad during regression testing. + import warnings + warnings.warn("sending a large query didn't trigger block on write.") + + def test_error_in_callback(self): + # behaviour changed after issue #113: if there is an error in the + # callback for the moment we don't have a way to reset the connection + # without blocking (ticket #113) so just close it. + conn = self.conn + curs = conn.cursor() + curs.execute("select 1") # have a BEGIN + curs.fetchone() + + # now try to do something that will fail in the callback + psycopg2.extensions.set_wait_callback(lambda conn: 1//0) + self.assertRaises(ZeroDivisionError, curs.execute, "select 2") + + self.assert_(conn.closed) + + def test_dont_freak_out(self): + # if there is an error in a green query, don't freak out and close + # the connection + conn = self.conn + curs = conn.cursor() + self.assertRaises(psycopg2.ProgrammingError, + curs.execute, "select the unselectable") + + # check that the connection is left in an usable state + self.assert_(not conn.closed) + conn.rollback() + curs.execute("select 1") + self.assertEqual(curs.fetchone()[0], 1) + + +def test_suite(): + return unittest.TestLoader().loadTestsFromName(__name__) + +if __name__ == "__main__": + unittest.main() diff --git a/psycopg2/tests/test_lobject.py b/psycopg2/tests/test_lobject.py new file mode 100644 index 0000000..e62e0d8 --- /dev/null +++ b/psycopg2/tests/test_lobject.py @@ -0,0 +1,440 @@ +#!/usr/bin/env python + +# test_lobject.py - unit test for large objects support +# +# Copyright (C) 2008-2011 James Henstridge +# +# psycopg2 is free software: you can redistribute it and/or modify it +# under the terms of the GNU Lesser General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# In addition, as a special exception, the copyright holders give +# permission to link this program with the OpenSSL library (or with +# modified versions of OpenSSL that use the same license as OpenSSL), +# and distribute linked combinations including the two. +# +# You must obey the GNU Lesser General Public License in all respects for +# all of the code used other than OpenSSL. +# +# psycopg2 is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + +import os +import shutil +import tempfile +from functools import wraps + +import psycopg2 +import psycopg2.extensions +from psycopg2.extensions import b +from testutils import unittest, decorate_all_tests, skip_if_tpc_disabled +from testutils import ConnectingTestCase, skip_if_green + +def skip_if_no_lo(f): + @wraps(f) + def skip_if_no_lo_(self): + if self.conn.server_version < 80100: + return self.skipTest("large objects only supported from PG 8.1") + else: + return f(self) + + return skip_if_no_lo_ + +skip_lo_if_green = skip_if_green("libpq doesn't support LO in async mode") + + +class LargeObjectTestCase(ConnectingTestCase): + def setUp(self): + ConnectingTestCase.setUp(self) + self.lo_oid = None + self.tmpdir = None + + def tearDown(self): + if self.tmpdir: + shutil.rmtree(self.tmpdir, ignore_errors=True) + + if self.conn.closed: + return + + if self.lo_oid is not None: + self.conn.rollback() + try: + lo = self.conn.lobject(self.lo_oid, "n") + except psycopg2.OperationalError: + pass + else: + lo.unlink() + + ConnectingTestCase.tearDown(self) + + +class LargeObjectTests(LargeObjectTestCase): + def test_create(self): + lo = self.conn.lobject() + self.assertNotEqual(lo, None) + self.assertEqual(lo.mode[0], "w") + + def test_connection_needed(self): + self.assertRaises(TypeError, + psycopg2.extensions.lobject, []) + + def test_open_non_existent(self): + # By creating then removing a large object, we get an Oid that + # should be unused. + lo = self.conn.lobject() + lo.unlink() + self.assertRaises(psycopg2.OperationalError, self.conn.lobject, lo.oid) + + def test_open_existing(self): + lo = self.conn.lobject() + lo2 = self.conn.lobject(lo.oid) + self.assertNotEqual(lo2, None) + self.assertEqual(lo2.oid, lo.oid) + self.assertEqual(lo2.mode[0], "r") + + def test_open_for_write(self): + lo = self.conn.lobject() + lo2 = self.conn.lobject(lo.oid, "w") + self.assertEqual(lo2.mode[0], "w") + lo2.write(b("some data")) + + def test_open_mode_n(self): + # Openning an object in mode "n" gives us a closed lobject. + lo = self.conn.lobject() + lo.close() + + lo2 = self.conn.lobject(lo.oid, "n") + self.assertEqual(lo2.oid, lo.oid) + self.assertEqual(lo2.closed, True) + + def test_close_connection_gone(self): + lo = self.conn.lobject() + self.conn.close() + lo.close() + + def test_create_with_oid(self): + # Create and delete a large object to get an unused Oid. + lo = self.conn.lobject() + oid = lo.oid + lo.unlink() + + lo = self.conn.lobject(0, "w", oid) + self.assertEqual(lo.oid, oid) + + def test_create_with_existing_oid(self): + lo = self.conn.lobject() + lo.close() + + self.assertRaises(psycopg2.OperationalError, + self.conn.lobject, 0, "w", lo.oid) + + def test_import(self): + self.tmpdir = tempfile.mkdtemp() + filename = os.path.join(self.tmpdir, "data.txt") + fp = open(filename, "wb") + fp.write(b("some data")) + fp.close() + + lo = self.conn.lobject(0, "r", 0, filename) + self.assertEqual(lo.read(), "some data") + + def test_close(self): + lo = self.conn.lobject() + self.assertEqual(lo.closed, False) + lo.close() + self.assertEqual(lo.closed, True) + + def test_write(self): + lo = self.conn.lobject() + self.assertEqual(lo.write(b("some data")), len("some data")) + + def test_write_large(self): + lo = self.conn.lobject() + data = "data" * 1000000 + self.assertEqual(lo.write(data), len(data)) + + def test_read(self): + lo = self.conn.lobject() + length = lo.write(b("some data")) + lo.close() + + lo = self.conn.lobject(lo.oid) + x = lo.read(4) + self.assertEqual(type(x), type('')) + self.assertEqual(x, "some") + self.assertEqual(lo.read(), " data") + + def test_read_binary(self): + lo = self.conn.lobject() + length = lo.write(b("some data")) + lo.close() + + lo = self.conn.lobject(lo.oid, "rb") + x = lo.read(4) + self.assertEqual(type(x), type(b(''))) + self.assertEqual(x, b("some")) + self.assertEqual(lo.read(), b(" data")) + + def test_read_text(self): + lo = self.conn.lobject() + snowman = u"\u2603" + length = lo.write(u"some data " + snowman) + lo.close() + + lo = self.conn.lobject(lo.oid, "rt") + x = lo.read(4) + self.assertEqual(type(x), type(u'')) + self.assertEqual(x, u"some") + self.assertEqual(lo.read(), u" data " + snowman) + + def test_read_large(self): + lo = self.conn.lobject() + data = "data" * 1000000 + length = lo.write("some" + data) + lo.close() + + lo = self.conn.lobject(lo.oid) + self.assertEqual(lo.read(4), "some") + data1 = lo.read() + # avoid dumping megacraps in the console in case of error + self.assert_(data == data1, + "%r... != %r..." % (data[:100], data1[:100])) + + def test_seek_tell(self): + lo = self.conn.lobject() + length = lo.write(b("some data")) + self.assertEqual(lo.tell(), length) + lo.close() + lo = self.conn.lobject(lo.oid) + + self.assertEqual(lo.seek(5, 0), 5) + self.assertEqual(lo.tell(), 5) + self.assertEqual(lo.read(), "data") + + # SEEK_CUR: relative current location + lo.seek(5) + self.assertEqual(lo.seek(2, 1), 7) + self.assertEqual(lo.tell(), 7) + self.assertEqual(lo.read(), "ta") + + # SEEK_END: relative to end of file + self.assertEqual(lo.seek(-2, 2), length - 2) + self.assertEqual(lo.read(), "ta") + + def test_unlink(self): + lo = self.conn.lobject() + lo.unlink() + + # the object doesn't exist now, so we can't reopen it. + self.assertRaises(psycopg2.OperationalError, self.conn.lobject, lo.oid) + # And the object has been closed. + self.assertEquals(lo.closed, True) + + def test_export(self): + lo = self.conn.lobject() + lo.write(b("some data")) + + self.tmpdir = tempfile.mkdtemp() + filename = os.path.join(self.tmpdir, "data.txt") + lo.export(filename) + self.assertTrue(os.path.exists(filename)) + f = open(filename, "rb") + try: + self.assertEqual(f.read(), b("some data")) + finally: + f.close() + + def test_close_twice(self): + lo = self.conn.lobject() + lo.close() + lo.close() + + def test_write_after_close(self): + lo = self.conn.lobject() + lo.close() + self.assertRaises(psycopg2.InterfaceError, lo.write, b("some data")) + + def test_read_after_close(self): + lo = self.conn.lobject() + lo.close() + self.assertRaises(psycopg2.InterfaceError, lo.read, 5) + + def test_seek_after_close(self): + lo = self.conn.lobject() + lo.close() + self.assertRaises(psycopg2.InterfaceError, lo.seek, 0) + + def test_tell_after_close(self): + lo = self.conn.lobject() + lo.close() + self.assertRaises(psycopg2.InterfaceError, lo.tell) + + def test_unlink_after_close(self): + lo = self.conn.lobject() + lo.close() + # Unlink works on closed files. + lo.unlink() + + def test_export_after_close(self): + lo = self.conn.lobject() + lo.write(b("some data")) + lo.close() + + self.tmpdir = tempfile.mkdtemp() + filename = os.path.join(self.tmpdir, "data.txt") + lo.export(filename) + self.assertTrue(os.path.exists(filename)) + f = open(filename, "rb") + try: + self.assertEqual(f.read(), b("some data")) + finally: + f.close() + + def test_close_after_commit(self): + lo = self.conn.lobject() + self.lo_oid = lo.oid + self.conn.commit() + + # Closing outside of the transaction is okay. + lo.close() + + def test_write_after_commit(self): + lo = self.conn.lobject() + self.lo_oid = lo.oid + self.conn.commit() + + self.assertRaises(psycopg2.ProgrammingError, lo.write, b("some data")) + + def test_read_after_commit(self): + lo = self.conn.lobject() + self.lo_oid = lo.oid + self.conn.commit() + + self.assertRaises(psycopg2.ProgrammingError, lo.read, 5) + + def test_seek_after_commit(self): + lo = self.conn.lobject() + self.lo_oid = lo.oid + self.conn.commit() + + self.assertRaises(psycopg2.ProgrammingError, lo.seek, 0) + + def test_tell_after_commit(self): + lo = self.conn.lobject() + self.lo_oid = lo.oid + self.conn.commit() + + self.assertRaises(psycopg2.ProgrammingError, lo.tell) + + def test_unlink_after_commit(self): + lo = self.conn.lobject() + self.lo_oid = lo.oid + self.conn.commit() + + # Unlink of stale lobject is okay + lo.unlink() + + def test_export_after_commit(self): + lo = self.conn.lobject() + lo.write(b("some data")) + self.conn.commit() + + self.tmpdir = tempfile.mkdtemp() + filename = os.path.join(self.tmpdir, "data.txt") + lo.export(filename) + self.assertTrue(os.path.exists(filename)) + f = open(filename, "rb") + try: + self.assertEqual(f.read(), b("some data")) + finally: + f.close() + + @skip_if_tpc_disabled + def test_read_after_tpc_commit(self): + self.conn.tpc_begin('test_lobject') + lo = self.conn.lobject() + self.lo_oid = lo.oid + self.conn.tpc_commit() + + self.assertRaises(psycopg2.ProgrammingError, lo.read, 5) + + @skip_if_tpc_disabled + def test_read_after_tpc_prepare(self): + self.conn.tpc_begin('test_lobject') + lo = self.conn.lobject() + self.lo_oid = lo.oid + self.conn.tpc_prepare() + + try: + self.assertRaises(psycopg2.ProgrammingError, lo.read, 5) + finally: + self.conn.tpc_commit() + + +decorate_all_tests(LargeObjectTests, skip_if_no_lo, skip_lo_if_green) + + +def skip_if_no_truncate(f): + @wraps(f) + def skip_if_no_truncate_(self): + if self.conn.server_version < 80300: + return self.skipTest( + "the server doesn't support large object truncate") + + if not hasattr(psycopg2.extensions.lobject, 'truncate'): + return self.skipTest( + "psycopg2 has been built against a libpq " + "without large object truncate support.") + + return f(self) + + return skip_if_no_truncate_ + +class LargeObjectTruncateTests(LargeObjectTestCase): + def test_truncate(self): + lo = self.conn.lobject() + lo.write("some data") + lo.close() + + lo = self.conn.lobject(lo.oid, "w") + lo.truncate(4) + + # seek position unchanged + self.assertEqual(lo.tell(), 0) + # data truncated + self.assertEqual(lo.read(), "some") + + lo.truncate(6) + lo.seek(0) + # large object extended with zeroes + self.assertEqual(lo.read(), "some\x00\x00") + + lo.truncate() + lo.seek(0) + # large object empty + self.assertEqual(lo.read(), "") + + def test_truncate_after_close(self): + lo = self.conn.lobject() + lo.close() + self.assertRaises(psycopg2.InterfaceError, lo.truncate) + + def test_truncate_after_commit(self): + lo = self.conn.lobject() + self.lo_oid = lo.oid + self.conn.commit() + + self.assertRaises(psycopg2.ProgrammingError, lo.truncate) + +decorate_all_tests(LargeObjectTruncateTests, + skip_if_no_lo, skip_lo_if_green, skip_if_no_truncate) + + +def test_suite(): + return unittest.TestLoader().loadTestsFromName(__name__) + +if __name__ == "__main__": + unittest.main() diff --git a/psycopg2/tests/test_module.py b/psycopg2/tests/test_module.py new file mode 100644 index 0000000..b2f5279 --- /dev/null +++ b/psycopg2/tests/test_module.py @@ -0,0 +1,302 @@ +#!/usr/bin/env python + +# test_module.py - unit test for the module interface +# +# Copyright (C) 2011 Daniele Varrazzo +# +# psycopg2 is free software: you can redistribute it and/or modify it +# under the terms of the GNU Lesser General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# In addition, as a special exception, the copyright holders give +# permission to link this program with the OpenSSL library (or with +# modified versions of OpenSSL that use the same license as OpenSSL), +# and distribute linked combinations including the two. +# +# You must obey the GNU Lesser General Public License in all respects for +# all of the code used other than OpenSSL. +# +# psycopg2 is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + +from testutils import unittest, skip_before_python, skip_before_postgres +from testutils import ConnectingTestCase, skip_copy_if_green + +import psycopg2 + +class ConnectTestCase(unittest.TestCase): + def setUp(self): + self.args = None + def conect_stub(dsn, connection_factory=None, async=False): + self.args = (dsn, connection_factory, async) + + self._connect_orig = psycopg2._connect + psycopg2._connect = conect_stub + + def tearDown(self): + psycopg2._connect = self._connect_orig + + def test_there_has_to_be_something(self): + self.assertRaises(TypeError, psycopg2.connect) + self.assertRaises(TypeError, psycopg2.connect, + connection_factory=lambda dsn, async=False: None) + self.assertRaises(TypeError, psycopg2.connect, + async=True) + + def test_no_keywords(self): + psycopg2.connect('') + self.assertEqual(self.args[0], '') + self.assertEqual(self.args[1], None) + self.assertEqual(self.args[2], False) + + def test_dsn(self): + psycopg2.connect('dbname=blah x=y') + self.assertEqual(self.args[0], 'dbname=blah x=y') + self.assertEqual(self.args[1], None) + self.assertEqual(self.args[2], False) + + def test_supported_keywords(self): + psycopg2.connect(database='foo') + self.assertEqual(self.args[0], 'dbname=foo') + psycopg2.connect(user='postgres') + self.assertEqual(self.args[0], 'user=postgres') + psycopg2.connect(password='secret') + self.assertEqual(self.args[0], 'password=secret') + psycopg2.connect(port=5432) + self.assertEqual(self.args[0], 'port=5432') + psycopg2.connect(sslmode='require') + self.assertEqual(self.args[0], 'sslmode=require') + + psycopg2.connect(database='foo', + user='postgres', password='secret', port=5432) + self.assert_('dbname=foo' in self.args[0]) + self.assert_('user=postgres' in self.args[0]) + self.assert_('password=secret' in self.args[0]) + self.assert_('port=5432' in self.args[0]) + self.assertEqual(len(self.args[0].split()), 4) + + def test_generic_keywords(self): + psycopg2.connect(foo='bar') + self.assertEqual(self.args[0], 'foo=bar') + + def test_factory(self): + def f(dsn, async=False): + pass + + psycopg2.connect(database='foo', bar='baz', connection_factory=f) + self.assertEqual(self.args[0], 'dbname=foo bar=baz') + self.assertEqual(self.args[1], f) + self.assertEqual(self.args[2], False) + + psycopg2.connect("dbname=foo bar=baz", connection_factory=f) + self.assertEqual(self.args[0], 'dbname=foo bar=baz') + self.assertEqual(self.args[1], f) + self.assertEqual(self.args[2], False) + + def test_async(self): + psycopg2.connect(database='foo', bar='baz', async=1) + self.assertEqual(self.args[0], 'dbname=foo bar=baz') + self.assertEqual(self.args[1], None) + self.assert_(self.args[2]) + + psycopg2.connect("dbname=foo bar=baz", async=True) + self.assertEqual(self.args[0], 'dbname=foo bar=baz') + self.assertEqual(self.args[1], None) + self.assert_(self.args[2]) + + def test_empty_param(self): + psycopg2.connect(database='sony', password='') + self.assertEqual(self.args[0], "dbname=sony password=''") + + def test_escape(self): + psycopg2.connect(database='hello world') + self.assertEqual(self.args[0], "dbname='hello world'") + + psycopg2.connect(database=r'back\slash') + self.assertEqual(self.args[0], r"dbname=back\\slash") + + psycopg2.connect(database="quo'te") + self.assertEqual(self.args[0], r"dbname=quo\'te") + + psycopg2.connect(database="with\ttab") + self.assertEqual(self.args[0], "dbname='with\ttab'") + + psycopg2.connect(database=r"\every thing'") + self.assertEqual(self.args[0], r"dbname='\\every thing\''") + + def test_no_kwargs_swallow(self): + self.assertRaises(TypeError, + psycopg2.connect, 'dbname=foo', database='foo') + self.assertRaises(TypeError, + psycopg2.connect, 'dbname=foo', user='postgres') + self.assertRaises(TypeError, + psycopg2.connect, 'dbname=foo', no_such_param='meh') + + +class ExceptionsTestCase(ConnectingTestCase): + def test_attributes(self): + cur = self.conn.cursor() + try: + cur.execute("select * from nonexist") + except psycopg2.Error, exc: + e = exc + + self.assertEqual(e.pgcode, '42P01') + self.assert_(e.pgerror) + self.assert_(e.cursor is cur) + + def test_diagnostics_attributes(self): + cur = self.conn.cursor() + try: + cur.execute("select * from nonexist") + except psycopg2.Error, exc: + e = exc + + diag = e.diag + self.assert_(isinstance(diag, psycopg2.extensions.Diagnostics)) + for attr in [ + 'column_name', 'constraint_name', 'context', 'datatype_name', + 'internal_position', 'internal_query', 'message_detail', + 'message_hint', 'message_primary', 'schema_name', 'severity', + 'source_file', 'source_function', 'source_line', 'sqlstate', + 'statement_position', 'table_name', ]: + v = getattr(diag, attr) + if v is not None: + self.assert_(isinstance(v, str)) + + def test_diagnostics_values(self): + cur = self.conn.cursor() + try: + cur.execute("select * from nonexist") + except psycopg2.Error, exc: + e = exc + + self.assertEqual(e.diag.sqlstate, '42P01') + self.assertEqual(e.diag.severity, 'ERROR') + + def test_diagnostics_life(self): + import gc + from weakref import ref + + def tmp(): + cur = self.conn.cursor() + try: + cur.execute("select * from nonexist") + except psycopg2.Error, exc: + return cur, exc + + cur, e = tmp() + diag = e.diag + w = ref(cur) + + del e, cur + gc.collect() + assert(w() is not None) + + self.assertEqual(diag.sqlstate, '42P01') + + del diag + gc.collect(); gc.collect() + assert(w() is None) + + @skip_copy_if_green + def test_diagnostics_copy(self): + from StringIO import StringIO + f = StringIO() + cur = self.conn.cursor() + try: + cur.copy_to(f, 'nonexist') + except psycopg2.Error, exc: + diag = exc.diag + + self.assertEqual(diag.sqlstate, '42P01') + + def test_diagnostics_independent(self): + cur = self.conn.cursor() + try: + cur.execute("l'acqua e' poca e 'a papera nun galleggia") + except Exception, exc: + diag1 = exc.diag + + self.conn.rollback() + + try: + cur.execute("select level from water where ducks > 1") + except psycopg2.Error, exc: + diag2 = exc.diag + + self.assertEqual(diag1.sqlstate, '42601') + self.assertEqual(diag2.sqlstate, '42P01') + + def test_diagnostics_from_commit(self): + cur = self.conn.cursor() + cur.execute(""" + create temp table test_deferred ( + data int primary key, + ref int references test_deferred (data) + deferrable initially deferred) + """) + cur.execute("insert into test_deferred values (1,2)") + try: + self.conn.commit() + except psycopg2.Error, exc: + e = exc + self.assertEqual(e.diag.sqlstate, '23503') + + @skip_before_postgres(9, 3) + def test_9_3_diagnostics(self): + cur = self.conn.cursor() + cur.execute(""" + create temp table test_exc ( + data int constraint chk_eq1 check (data = 1) + )""") + try: + cur.execute("insert into test_exc values(2)") + except psycopg2.Error, exc: + e = exc + self.assertEqual(e.pgcode, '23514') + self.assertEqual(e.diag.schema_name[:7], "pg_temp") + self.assertEqual(e.diag.table_name, "test_exc") + self.assertEqual(e.diag.column_name, None) + self.assertEqual(e.diag.constraint_name, "chk_eq1") + self.assertEqual(e.diag.datatype_name, None) + + @skip_before_python(2, 5) + def test_pickle(self): + import pickle + cur = self.conn.cursor() + try: + cur.execute("select * from nonexist") + except psycopg2.Error, exc: + e = exc + + e1 = pickle.loads(pickle.dumps(e)) + + self.assertEqual(e.pgerror, e1.pgerror) + self.assertEqual(e.pgcode, e1.pgcode) + self.assert_(e1.cursor is None) + + @skip_before_python(2, 5) + def test_pickle_connection_error(self): + # segfaults on psycopg 2.5.1 - see ticket #170 + import pickle + try: + psycopg2.connect('dbname=nosuchdatabasemate') + except psycopg2.Error, exc: + e = exc + + e1 = pickle.loads(pickle.dumps(e)) + + self.assertEqual(e.pgerror, e1.pgerror) + self.assertEqual(e.pgcode, e1.pgcode) + self.assert_(e1.cursor is None) + + +def test_suite(): + return unittest.TestLoader().loadTestsFromName(__name__) + +if __name__ == "__main__": + unittest.main() diff --git a/psycopg2/tests/test_notify.py b/psycopg2/tests/test_notify.py new file mode 100644 index 0000000..d048241 --- /dev/null +++ b/psycopg2/tests/test_notify.py @@ -0,0 +1,199 @@ +#!/usr/bin/env python + +# test_notify.py - unit test for async notifications +# +# Copyright (C) 2010-2011 Daniele Varrazzo +# +# psycopg2 is free software: you can redistribute it and/or modify it +# under the terms of the GNU Lesser General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# In addition, as a special exception, the copyright holders give +# permission to link this program with the OpenSSL library (or with +# modified versions of OpenSSL that use the same license as OpenSSL), +# and distribute linked combinations including the two. +# +# You must obey the GNU Lesser General Public License in all respects for +# all of the code used other than OpenSSL. +# +# psycopg2 is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + +from testutils import unittest + +import psycopg2 +from psycopg2 import extensions +from testutils import ConnectingTestCase, script_to_py3 +from testconfig import dsn + +import sys +import time +import select +from subprocess import Popen, PIPE + + +class NotifiesTests(ConnectingTestCase): + + def autocommit(self, conn): + """Set a connection in autocommit mode.""" + conn.set_isolation_level(extensions.ISOLATION_LEVEL_AUTOCOMMIT) + + def listen(self, name): + """Start listening for a name on self.conn.""" + curs = self.conn.cursor() + curs.execute("LISTEN " + name) + curs.close() + + def notify(self, name, sec=0, payload=None): + """Send a notification to the database, eventually after some time.""" + if payload is None: + payload = '' + else: + payload = ", %r" % payload + + script = ("""\ +import time +time.sleep(%(sec)s) +import psycopg2 +import psycopg2.extensions +conn = psycopg2.connect(%(dsn)r) +conn.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT) +print conn.get_backend_pid() +curs = conn.cursor() +curs.execute("NOTIFY " %(name)r %(payload)r) +curs.close() +conn.close() +""" + % { 'dsn': dsn, 'sec': sec, 'name': name, 'payload': payload}) + + return Popen([sys.executable, '-c', script_to_py3(script)], stdout=PIPE) + + def test_notifies_received_on_poll(self): + self.autocommit(self.conn) + self.listen('foo') + + proc = self.notify('foo', 1) + + t0 = time.time() + ready = select.select([self.conn], [], [], 5) + t1 = time.time() + self.assert_(0.99 < t1 - t0 < 4, t1 - t0) + + pid = int(proc.communicate()[0]) + self.assertEqual(0, len(self.conn.notifies)) + self.assertEqual(extensions.POLL_OK, self.conn.poll()) + self.assertEqual(1, len(self.conn.notifies)) + self.assertEqual(pid, self.conn.notifies[0][0]) + self.assertEqual('foo', self.conn.notifies[0][1]) + + def test_many_notifies(self): + self.autocommit(self.conn) + for name in ['foo', 'bar', 'baz']: + self.listen(name) + + pids = {} + for name in ['foo', 'bar', 'baz', 'qux']: + pids[name] = int(self.notify(name).communicate()[0]) + + self.assertEqual(0, len(self.conn.notifies)) + for i in range(10): + self.assertEqual(extensions.POLL_OK, self.conn.poll()) + self.assertEqual(3, len(self.conn.notifies)) + + names = dict.fromkeys(['foo', 'bar', 'baz']) + for (pid, name) in self.conn.notifies: + self.assertEqual(pids[name], pid) + names.pop(name) # raise if name found twice + + def test_notifies_received_on_execute(self): + self.autocommit(self.conn) + self.listen('foo') + pid = int(self.notify('foo').communicate()[0]) + self.assertEqual(0, len(self.conn.notifies)) + self.conn.cursor().execute('select 1;') + self.assertEqual(1, len(self.conn.notifies)) + self.assertEqual(pid, self.conn.notifies[0][0]) + self.assertEqual('foo', self.conn.notifies[0][1]) + + def test_notify_object(self): + self.autocommit(self.conn) + self.listen('foo') + self.notify('foo').communicate() + time.sleep(0.5) + self.conn.poll() + notify = self.conn.notifies[0] + self.assert_(isinstance(notify, psycopg2.extensions.Notify)) + + def test_notify_attributes(self): + self.autocommit(self.conn) + self.listen('foo') + pid = int(self.notify('foo').communicate()[0]) + time.sleep(0.5) + self.conn.poll() + self.assertEqual(1, len(self.conn.notifies)) + notify = self.conn.notifies[0] + self.assertEqual(pid, notify.pid) + self.assertEqual('foo', notify.channel) + self.assertEqual('', notify.payload) + + def test_notify_payload(self): + if self.conn.server_version < 90000: + return self.skipTest("server version %s doesn't support notify payload" + % self.conn.server_version) + self.autocommit(self.conn) + self.listen('foo') + pid = int(self.notify('foo', payload="Hello, world!").communicate()[0]) + time.sleep(0.5) + self.conn.poll() + self.assertEqual(1, len(self.conn.notifies)) + notify = self.conn.notifies[0] + self.assertEqual(pid, notify.pid) + self.assertEqual('foo', notify.channel) + self.assertEqual('Hello, world!', notify.payload) + + def test_notify_init(self): + n = psycopg2.extensions.Notify(10, 'foo') + self.assertEqual(10, n.pid) + self.assertEqual('foo', n.channel) + self.assertEqual('', n.payload) + (pid, channel) = n + self.assertEqual((pid, channel), (10, 'foo')) + + n = psycopg2.extensions.Notify(42, 'bar', 'baz') + self.assertEqual(42, n.pid) + self.assertEqual('bar', n.channel) + self.assertEqual('baz', n.payload) + (pid, channel) = n + self.assertEqual((pid, channel), (42, 'bar')) + + def test_compare(self): + data = [(10, 'foo'), (20, 'foo'), (10, 'foo', 'bar'), (10, 'foo', 'baz')] + for d1 in data: + for d2 in data: + n1 = psycopg2.extensions.Notify(*d1) + n2 = psycopg2.extensions.Notify(*d2) + self.assertEqual((n1 == n2), (d1 == d2)) + self.assertEqual((n1 != n2), (d1 != d2)) + + def test_compare_tuple(self): + from psycopg2.extensions import Notify + self.assertEqual((10, 'foo'), Notify(10, 'foo')) + self.assertEqual((10, 'foo'), Notify(10, 'foo', 'bar')) + self.assertNotEqual((10, 'foo'), Notify(20, 'foo')) + self.assertNotEqual((10, 'foo'), Notify(10, 'bar')) + + def test_hash(self): + from psycopg2.extensions import Notify + self.assertEqual(hash((10, 'foo')), hash(Notify(10, 'foo'))) + self.assertNotEqual(hash(Notify(10, 'foo', 'bar')), + hash(Notify(10, 'foo'))) + +def test_suite(): + return unittest.TestLoader().loadTestsFromName(__name__) + +if __name__ == "__main__": + unittest.main() + diff --git a/psycopg2/tests/test_psycopg2_dbapi20.py b/psycopg2/tests/test_psycopg2_dbapi20.py new file mode 100644 index 0000000..744d322 --- /dev/null +++ b/psycopg2/tests/test_psycopg2_dbapi20.py @@ -0,0 +1,62 @@ +#!/usr/bin/env python + +# test_psycopg2_dbapi20.py - DB API conformance test for psycopg2 +# +# Copyright (C) 2006-2011 Federico Di Gregorio +# +# psycopg2 is free software: you can redistribute it and/or modify it +# under the terms of the GNU Lesser General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# In addition, as a special exception, the copyright holders give +# permission to link this program with the OpenSSL library (or with +# modified versions of OpenSSL that use the same license as OpenSSL), +# and distribute linked combinations including the two. +# +# You must obey the GNU Lesser General Public License in all respects for +# all of the code used other than OpenSSL. +# +# psycopg2 is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + +import dbapi20 +import dbapi20_tpc +from testutils import skip_if_tpc_disabled +from testutils import unittest, decorate_all_tests +import psycopg2 + +from testconfig import dsn + +class Psycopg2Tests(dbapi20.DatabaseAPI20Test): + driver = psycopg2 + connect_args = () + connect_kw_args = {'dsn': dsn} + + lower_func = 'lower' # For stored procedure test + + def test_setoutputsize(self): + # psycopg2's setoutputsize() is a no-op + pass + + def test_nextset(self): + # psycopg2 does not implement nextset() + pass + + +class Psycopg2TPCTests(dbapi20_tpc.TwoPhaseCommitTests, unittest.TestCase): + driver = psycopg2 + + def connect(self): + return psycopg2.connect(dsn=dsn) + +decorate_all_tests(Psycopg2TPCTests, skip_if_tpc_disabled) + + +def test_suite(): + return unittest.TestLoader().loadTestsFromName(__name__) + +if __name__ == '__main__': + unittest.main() diff --git a/psycopg2/tests/test_quote.py b/psycopg2/tests/test_quote.py new file mode 100644 index 0000000..e7b3c31 --- /dev/null +++ b/psycopg2/tests/test_quote.py @@ -0,0 +1,173 @@ +#!/usr/bin/env python + +# test_quote.py - unit test for strings quoting +# +# Copyright (C) 2007-2011 Daniele Varrazzo +# +# psycopg2 is free software: you can redistribute it and/or modify it +# under the terms of the GNU Lesser General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# In addition, as a special exception, the copyright holders give +# permission to link this program with the OpenSSL library (or with +# modified versions of OpenSSL that use the same license as OpenSSL), +# and distribute linked combinations including the two. +# +# You must obey the GNU Lesser General Public License in all respects for +# all of the code used other than OpenSSL. +# +# psycopg2 is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + +import sys +from testutils import unittest, ConnectingTestCase + +import psycopg2 +import psycopg2.extensions +from psycopg2.extensions import b + +class QuotingTestCase(ConnectingTestCase): + r"""Checks the correct quoting of strings and binary objects. + + Since ver. 8.1, PostgreSQL is moving towards SQL standard conforming + strings, where the backslash (\) is treated as literal character, + not as escape. To treat the backslash as a C-style escapes, PG supports + the E'' quotes. + + This test case checks that the E'' quotes are used whenever they are + needed. The tests are expected to pass with all PostgreSQL server versions + (currently tested with 7.4 <= PG <= 8.3beta) and with any + 'standard_conforming_strings' server parameter value. + The tests also check that no warning is raised ('escape_string_warning' + should be on). + + http://www.postgresql.org/docs/current/static/sql-syntax-lexical.html#SQL-SYNTAX-STRINGS + http://www.postgresql.org/docs/current/static/runtime-config-compatible.html + """ + def test_string(self): + data = """some data with \t chars + to escape into, 'quotes' and \\ a backslash too. + """ + data += "".join(map(chr, range(1,127))) + + curs = self.conn.cursor() + curs.execute("SELECT %s;", (data,)) + res = curs.fetchone()[0] + + self.assertEqual(res, data) + self.assert_(not self.conn.notices) + + def test_binary(self): + data = b("""some data with \000\013 binary + stuff into, 'quotes' and \\ a backslash too. + """) + if sys.version_info[0] < 3: + data += "".join(map(chr, range(256))) + else: + data += bytes(range(256)) + + curs = self.conn.cursor() + curs.execute("SELECT %s::bytea;", (psycopg2.Binary(data),)) + if sys.version_info[0] < 3: + res = str(curs.fetchone()[0]) + else: + res = curs.fetchone()[0].tobytes() + + if res[0] in (b('x'), ord(b('x'))) and self.conn.server_version >= 90000: + return self.skipTest( + "bytea broken with server >= 9.0, libpq < 9") + + self.assertEqual(res, data) + self.assert_(not self.conn.notices) + + def test_unicode(self): + curs = self.conn.cursor() + curs.execute("SHOW server_encoding") + server_encoding = curs.fetchone()[0] + if server_encoding != "UTF8": + return self.skipTest( + "Unicode test skipped since server encoding is %s" + % server_encoding) + + data = u"""some data with \t chars + to escape into, 'quotes', \u20ac euro sign and \\ a backslash too. + """ + data += u"".join(map(unichr, [ u for u in range(1,65536) + if not 0xD800 <= u <= 0xDFFF ])) # surrogate area + self.conn.set_client_encoding('UNICODE') + + psycopg2.extensions.register_type(psycopg2.extensions.UNICODE, self.conn) + curs.execute("SELECT %s::text;", (data,)) + res = curs.fetchone()[0] + + self.assertEqual(res, data) + self.assert_(not self.conn.notices) + + def test_latin1(self): + self.conn.set_client_encoding('LATIN1') + curs = self.conn.cursor() + if sys.version_info[0] < 3: + data = ''.join(map(chr, range(32, 127) + range(160, 256))) + else: + data = bytes(range(32, 127) + range(160, 256)).decode('latin1') + + # as string + curs.execute("SELECT %s::text;", (data,)) + res = curs.fetchone()[0] + self.assertEqual(res, data) + self.assert_(not self.conn.notices) + + # as unicode + if sys.version_info[0] < 3: + psycopg2.extensions.register_type(psycopg2.extensions.UNICODE, self.conn) + data = data.decode('latin1') + + curs.execute("SELECT %s::text;", (data,)) + res = curs.fetchone()[0] + self.assertEqual(res, data) + self.assert_(not self.conn.notices) + + def test_koi8(self): + self.conn.set_client_encoding('KOI8') + curs = self.conn.cursor() + if sys.version_info[0] < 3: + data = ''.join(map(chr, range(32, 127) + range(128, 256))) + else: + data = bytes(range(32, 127) + range(128, 256)).decode('koi8_r') + + # as string + curs.execute("SELECT %s::text;", (data,)) + res = curs.fetchone()[0] + self.assertEqual(res, data) + self.assert_(not self.conn.notices) + + # as unicode + if sys.version_info[0] < 3: + psycopg2.extensions.register_type(psycopg2.extensions.UNICODE, self.conn) + data = data.decode('koi8_r') + + curs.execute("SELECT %s::text;", (data,)) + res = curs.fetchone()[0] + self.assertEqual(res, data) + self.assert_(not self.conn.notices) + + +class TestQuotedString(ConnectingTestCase): + def test_encoding(self): + q = psycopg2.extensions.QuotedString('hi') + self.assertEqual(q.encoding, 'latin1') + + self.conn.set_client_encoding('utf_8') + q.prepare(self.conn) + self.assertEqual(q.encoding, 'utf_8') + + +def test_suite(): + return unittest.TestLoader().loadTestsFromName(__name__) + +if __name__ == "__main__": + unittest.main() + diff --git a/psycopg2/tests/test_transaction.py b/psycopg2/tests/test_transaction.py new file mode 100644 index 0000000..724d0d8 --- /dev/null +++ b/psycopg2/tests/test_transaction.py @@ -0,0 +1,247 @@ +#!/usr/bin/env python + +# test_transaction - unit test on transaction behaviour +# +# Copyright (C) 2007-2011 Federico Di Gregorio +# +# psycopg2 is free software: you can redistribute it and/or modify it +# under the terms of the GNU Lesser General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# In addition, as a special exception, the copyright holders give +# permission to link this program with the OpenSSL library (or with +# modified versions of OpenSSL that use the same license as OpenSSL), +# and distribute linked combinations including the two. +# +# You must obey the GNU Lesser General Public License in all respects for +# all of the code used other than OpenSSL. +# +# psycopg2 is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + +import threading +from testutils import unittest, ConnectingTestCase, skip_before_postgres + +import psycopg2 +from psycopg2.extensions import ( + ISOLATION_LEVEL_SERIALIZABLE, STATUS_BEGIN, STATUS_READY) + +class TransactionTests(ConnectingTestCase): + + def setUp(self): + ConnectingTestCase.setUp(self) + self.conn.set_isolation_level(ISOLATION_LEVEL_SERIALIZABLE) + curs = self.conn.cursor() + curs.execute(''' + CREATE TEMPORARY TABLE table1 ( + id int PRIMARY KEY + )''') + # The constraint is set to deferrable for the commit_failed test + curs.execute(''' + CREATE TEMPORARY TABLE table2 ( + id int PRIMARY KEY, + table1_id int, + CONSTRAINT table2__table1_id__fk + FOREIGN KEY (table1_id) REFERENCES table1(id) DEFERRABLE)''') + curs.execute('INSERT INTO table1 VALUES (1)') + curs.execute('INSERT INTO table2 VALUES (1, 1)') + self.conn.commit() + + def test_rollback(self): + # Test that rollback undoes changes + curs = self.conn.cursor() + curs.execute('INSERT INTO table2 VALUES (2, 1)') + # Rollback takes us from BEGIN state to READY state + self.assertEqual(self.conn.status, STATUS_BEGIN) + self.conn.rollback() + self.assertEqual(self.conn.status, STATUS_READY) + curs.execute('SELECT id, table1_id FROM table2 WHERE id = 2') + self.assertEqual(curs.fetchall(), []) + + def test_commit(self): + # Test that commit stores changes + curs = self.conn.cursor() + curs.execute('INSERT INTO table2 VALUES (2, 1)') + # Rollback takes us from BEGIN state to READY state + self.assertEqual(self.conn.status, STATUS_BEGIN) + self.conn.commit() + self.assertEqual(self.conn.status, STATUS_READY) + # Now rollback and show that the new record is still there: + self.conn.rollback() + curs.execute('SELECT id, table1_id FROM table2 WHERE id = 2') + self.assertEqual(curs.fetchall(), [(2, 1)]) + + def test_failed_commit(self): + # Test that we can recover from a failed commit. + # We use a deferred constraint to cause a failure on commit. + curs = self.conn.cursor() + curs.execute('SET CONSTRAINTS table2__table1_id__fk DEFERRED') + curs.execute('INSERT INTO table2 VALUES (2, 42)') + # The commit should fail, and move the cursor back to READY state + self.assertEqual(self.conn.status, STATUS_BEGIN) + self.assertRaises(psycopg2.IntegrityError, self.conn.commit) + self.assertEqual(self.conn.status, STATUS_READY) + # The connection should be ready to use for the next transaction: + curs.execute('SELECT 1') + self.assertEqual(curs.fetchone()[0], 1) + + +class DeadlockSerializationTests(ConnectingTestCase): + """Test deadlock and serialization failure errors.""" + + def connect(self): + conn = ConnectingTestCase.connect(self) + conn.set_isolation_level(ISOLATION_LEVEL_SERIALIZABLE) + return conn + + def setUp(self): + ConnectingTestCase.setUp(self) + + curs = self.conn.cursor() + # Drop table if it already exists + try: + curs.execute("DROP TABLE table1") + self.conn.commit() + except psycopg2.DatabaseError: + self.conn.rollback() + try: + curs.execute("DROP TABLE table2") + self.conn.commit() + except psycopg2.DatabaseError: + self.conn.rollback() + # Create sample data + curs.execute(""" + CREATE TABLE table1 ( + id int PRIMARY KEY, + name text) + """) + curs.execute("INSERT INTO table1 VALUES (1, 'hello')") + curs.execute("CREATE TABLE table2 (id int PRIMARY KEY)") + self.conn.commit() + + def tearDown(self): + curs = self.conn.cursor() + curs.execute("DROP TABLE table1") + curs.execute("DROP TABLE table2") + self.conn.commit() + + ConnectingTestCase.tearDown(self) + + def test_deadlock(self): + self.thread1_error = self.thread2_error = None + step1 = threading.Event() + step2 = threading.Event() + + def task1(): + try: + conn = self.connect() + curs = conn.cursor() + curs.execute("LOCK table1 IN ACCESS EXCLUSIVE MODE") + step1.set() + step2.wait() + curs.execute("LOCK table2 IN ACCESS EXCLUSIVE MODE") + except psycopg2.DatabaseError, exc: + self.thread1_error = exc + step1.set() + conn.close() + def task2(): + try: + conn = self.connect() + curs = conn.cursor() + step1.wait() + curs.execute("LOCK table2 IN ACCESS EXCLUSIVE MODE") + step2.set() + curs.execute("LOCK table1 IN ACCESS EXCLUSIVE MODE") + except psycopg2.DatabaseError, exc: + self.thread2_error = exc + step2.set() + conn.close() + + # Run the threads in parallel. The "step1" and "step2" events + # ensure that the two transactions overlap. + thread1 = threading.Thread(target=task1) + thread2 = threading.Thread(target=task2) + thread1.start() + thread2.start() + thread1.join() + thread2.join() + + # Exactly one of the threads should have failed with + # TransactionRollbackError: + self.assertFalse(self.thread1_error and self.thread2_error) + error = self.thread1_error or self.thread2_error + self.assertTrue(isinstance( + error, psycopg2.extensions.TransactionRollbackError)) + + def test_serialisation_failure(self): + self.thread1_error = self.thread2_error = None + step1 = threading.Event() + step2 = threading.Event() + + def task1(): + try: + conn = self.connect() + curs = conn.cursor() + curs.execute("SELECT name FROM table1 WHERE id = 1") + curs.fetchall() + step1.set() + step2.wait() + curs.execute("UPDATE table1 SET name='task1' WHERE id = 1") + conn.commit() + except psycopg2.DatabaseError, exc: + self.thread1_error = exc + step1.set() + conn.close() + def task2(): + try: + conn = self.connect() + curs = conn.cursor() + step1.wait() + curs.execute("UPDATE table1 SET name='task2' WHERE id = 1") + conn.commit() + except psycopg2.DatabaseError, exc: + self.thread2_error = exc + step2.set() + conn.close() + + # Run the threads in parallel. The "step1" and "step2" events + # ensure that the two transactions overlap. + thread1 = threading.Thread(target=task1) + thread2 = threading.Thread(target=task2) + thread1.start() + thread2.start() + thread1.join() + thread2.join() + + # Exactly one of the threads should have failed with + # TransactionRollbackError: + self.assertFalse(self.thread1_error and self.thread2_error) + error = self.thread1_error or self.thread2_error + self.assertTrue(isinstance( + error, psycopg2.extensions.TransactionRollbackError)) + + +class QueryCancellationTests(ConnectingTestCase): + """Tests for query cancellation.""" + + def setUp(self): + ConnectingTestCase.setUp(self) + self.conn.set_isolation_level(ISOLATION_LEVEL_SERIALIZABLE) + + @skip_before_postgres(8, 2) + def test_statement_timeout(self): + curs = self.conn.cursor() + # Set a low statement timeout, then sleep for a longer period. + curs.execute('SET statement_timeout TO 10') + self.assertRaises(psycopg2.extensions.QueryCanceledError, + curs.execute, 'SELECT pg_sleep(50)') + + +def test_suite(): + return unittest.TestLoader().loadTestsFromName(__name__) + +if __name__ == "__main__": + unittest.main() diff --git a/psycopg2/tests/test_types_basic.py b/psycopg2/tests/test_types_basic.py new file mode 100644 index 0000000..6c4cc97 --- /dev/null +++ b/psycopg2/tests/test_types_basic.py @@ -0,0 +1,467 @@ +#!/usr/bin/env python +# +# types_basic.py - tests for basic types conversions +# +# Copyright (C) 2004-2010 Federico Di Gregorio +# +# psycopg2 is free software: you can redistribute it and/or modify it +# under the terms of the GNU Lesser General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# In addition, as a special exception, the copyright holders give +# permission to link this program with the OpenSSL library (or with +# modified versions of OpenSSL that use the same license as OpenSSL), +# and distribute linked combinations including the two. +# +# You must obey the GNU Lesser General Public License in all respects for +# all of the code used other than OpenSSL. +# +# psycopg2 is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + +import decimal + +import sys +from functools import wraps +import testutils +from testutils import unittest, ConnectingTestCase, decorate_all_tests + +import psycopg2 +from psycopg2.extensions import b + + +class TypesBasicTests(ConnectingTestCase): + """Test that all type conversions are working.""" + + def execute(self, *args): + curs = self.conn.cursor() + curs.execute(*args) + return curs.fetchone()[0] + + def testQuoting(self): + s = "Quote'this\\! ''ok?''" + self.failUnless(self.execute("SELECT %s AS foo", (s,)) == s, + "wrong quoting: " + s) + + def testUnicode(self): + s = u"Quote'this\\! ''ok?''" + self.failUnless(self.execute("SELECT %s AS foo", (s,)) == s, + "wrong unicode quoting: " + s) + + def testNumber(self): + s = self.execute("SELECT %s AS foo", (1971,)) + self.failUnless(s == 1971, "wrong integer quoting: " + str(s)) + s = self.execute("SELECT %s AS foo", (1971L,)) + self.failUnless(s == 1971L, "wrong integer quoting: " + str(s)) + + def testBoolean(self): + x = self.execute("SELECT %s as foo", (False,)) + self.assert_(x is False) + x = self.execute("SELECT %s as foo", (True,)) + self.assert_(x is True) + + def testDecimal(self): + s = self.execute("SELECT %s AS foo", (decimal.Decimal("19.10"),)) + self.failUnless(s - decimal.Decimal("19.10") == 0, + "wrong decimal quoting: " + str(s)) + s = self.execute("SELECT %s AS foo", (decimal.Decimal("NaN"),)) + self.failUnless(str(s) == "NaN", "wrong decimal quoting: " + str(s)) + self.failUnless(type(s) == decimal.Decimal, "wrong decimal conversion: " + repr(s)) + s = self.execute("SELECT %s AS foo", (decimal.Decimal("infinity"),)) + self.failUnless(str(s) == "NaN", "wrong decimal quoting: " + str(s)) + self.failUnless(type(s) == decimal.Decimal, "wrong decimal conversion: " + repr(s)) + s = self.execute("SELECT %s AS foo", (decimal.Decimal("-infinity"),)) + self.failUnless(str(s) == "NaN", "wrong decimal quoting: " + str(s)) + self.failUnless(type(s) == decimal.Decimal, "wrong decimal conversion: " + repr(s)) + + def testFloatNan(self): + try: + float("nan") + except ValueError: + return self.skipTest("nan not available on this platform") + + s = self.execute("SELECT %s AS foo", (float("nan"),)) + self.failUnless(str(s) == "nan", "wrong float quoting: " + str(s)) + self.failUnless(type(s) == float, "wrong float conversion: " + repr(s)) + + def testFloatInf(self): + try: + self.execute("select 'inf'::float") + except psycopg2.DataError: + return self.skipTest("inf::float not available on the server") + except ValueError: + return self.skipTest("inf not available on this platform") + s = self.execute("SELECT %s AS foo", (float("inf"),)) + self.failUnless(str(s) == "inf", "wrong float quoting: " + str(s)) + self.failUnless(type(s) == float, "wrong float conversion: " + repr(s)) + + s = self.execute("SELECT %s AS foo", (float("-inf"),)) + self.failUnless(str(s) == "-inf", "wrong float quoting: " + str(s)) + + def testBinary(self): + if sys.version_info[0] < 3: + s = ''.join([chr(x) for x in range(256)]) + b = psycopg2.Binary(s) + buf = self.execute("SELECT %s::bytea AS foo", (b,)) + self.assertEqual(s, str(buf)) + else: + s = bytes(range(256)) + b = psycopg2.Binary(s) + buf = self.execute("SELECT %s::bytea AS foo", (b,)) + self.assertEqual(s, buf.tobytes()) + + def testBinaryNone(self): + b = psycopg2.Binary(None) + buf = self.execute("SELECT %s::bytea AS foo", (b,)) + self.assertEqual(buf, None) + + def testBinaryEmptyString(self): + # test to make sure an empty Binary is converted to an empty string + if sys.version_info[0] < 3: + b = psycopg2.Binary('') + self.assertEqual(str(b), "''::bytea") + else: + b = psycopg2.Binary(bytes([])) + self.assertEqual(str(b), "''::bytea") + + def testBinaryRoundTrip(self): + # test to make sure buffers returned by psycopg2 are + # understood by execute: + if sys.version_info[0] < 3: + s = ''.join([chr(x) for x in range(256)]) + buf = self.execute("SELECT %s::bytea AS foo", (psycopg2.Binary(s),)) + buf2 = self.execute("SELECT %s::bytea AS foo", (buf,)) + self.assertEqual(s, str(buf2)) + else: + s = bytes(range(256)) + buf = self.execute("SELECT %s::bytea AS foo", (psycopg2.Binary(s),)) + buf2 = self.execute("SELECT %s::bytea AS foo", (buf,)) + self.assertEqual(s, buf2.tobytes()) + + def testArray(self): + s = self.execute("SELECT %s AS foo", ([[1,2],[3,4]],)) + self.failUnlessEqual(s, [[1,2],[3,4]]) + s = self.execute("SELECT %s AS foo", (['one', 'two', 'three'],)) + self.failUnlessEqual(s, ['one', 'two', 'three']) + + def testEmptyArrayRegression(self): + # ticket #42 + import datetime + curs = self.conn.cursor() + curs.execute("create table array_test (id integer, col timestamp without time zone[])") + + curs.execute("insert into array_test values (%s, %s)", (1, [datetime.date(2011,2,14)])) + curs.execute("select col from array_test where id = 1") + self.assertEqual(curs.fetchone()[0], [datetime.datetime(2011, 2, 14, 0, 0)]) + + curs.execute("insert into array_test values (%s, %s)", (2, [])) + curs.execute("select col from array_test where id = 2") + self.assertEqual(curs.fetchone()[0], []) + + def testEmptyArray(self): + s = self.execute("SELECT '{}' AS foo") + self.failUnlessEqual(s, []) + s = self.execute("SELECT '{}'::text[] AS foo") + self.failUnlessEqual(s, []) + s = self.execute("SELECT %s AS foo", ([],)) + self.failUnlessEqual(s, []) + s = self.execute("SELECT 1 != ALL(%s)", ([],)) + self.failUnlessEqual(s, True) + # but don't break the strings :) + s = self.execute("SELECT '{}'::text AS foo") + self.failUnlessEqual(s, "{}") + + def testArrayEscape(self): + ss = ['', '\\', '"', '\\\\', '\\"'] + for s in ss: + r = self.execute("SELECT %s AS foo", (s,)) + self.failUnlessEqual(s, r) + r = self.execute("SELECT %s AS foo", ([s],)) + self.failUnlessEqual([s], r) + + r = self.execute("SELECT %s AS foo", (ss,)) + self.failUnlessEqual(ss, r) + + def testArrayMalformed(self): + curs = self.conn.cursor() + ss = ['', '{', '{}}', '{' * 20 + '}' * 20] + for s in ss: + self.assertRaises(psycopg2.DataError, + psycopg2.extensions.STRINGARRAY, b(s), curs) + + @testutils.skip_from_python(3) + def testTypeRoundtripBuffer(self): + o1 = buffer("".join(map(chr, range(256)))) + o2 = self.execute("select %s;", (o1,)) + self.assertEqual(type(o1), type(o2)) + + # Test with an empty buffer + o1 = buffer("") + o2 = self.execute("select %s;", (o1,)) + self.assertEqual(type(o1), type(o2)) + self.assertEqual(str(o1), str(o2)) + + @testutils.skip_from_python(3) + def testTypeRoundtripBufferArray(self): + o1 = buffer("".join(map(chr, range(256)))) + o1 = [o1] + o2 = self.execute("select %s;", (o1,)) + self.assertEqual(type(o1[0]), type(o2[0])) + self.assertEqual(str(o1[0]), str(o2[0])) + + @testutils.skip_before_python(3) + def testTypeRoundtripBytes(self): + o1 = bytes(range(256)) + o2 = self.execute("select %s;", (o1,)) + self.assertEqual(memoryview, type(o2)) + + # Test with an empty buffer + o1 = bytes([]) + o2 = self.execute("select %s;", (o1,)) + self.assertEqual(memoryview, type(o2)) + + @testutils.skip_before_python(3) + def testTypeRoundtripBytesArray(self): + o1 = bytes(range(256)) + o1 = [o1] + o2 = self.execute("select %s;", (o1,)) + self.assertEqual(memoryview, type(o2[0])) + + @testutils.skip_before_python(2, 6) + def testAdaptBytearray(self): + o1 = bytearray(range(256)) + o2 = self.execute("select %s;", (o1,)) + + if sys.version_info[0] < 3: + self.assertEqual(buffer, type(o2)) + else: + self.assertEqual(memoryview, type(o2)) + + self.assertEqual(len(o1), len(o2)) + for c1, c2 in zip(o1, o2): + self.assertEqual(c1, ord(c2)) + + # Test with an empty buffer + o1 = bytearray([]) + o2 = self.execute("select %s;", (o1,)) + + self.assertEqual(len(o2), 0) + if sys.version_info[0] < 3: + self.assertEqual(buffer, type(o2)) + else: + self.assertEqual(memoryview, type(o2)) + + @testutils.skip_before_python(2, 7) + def testAdaptMemoryview(self): + o1 = memoryview(bytearray(range(256))) + o2 = self.execute("select %s;", (o1,)) + if sys.version_info[0] < 3: + self.assertEqual(buffer, type(o2)) + else: + self.assertEqual(memoryview, type(o2)) + + # Test with an empty buffer + o1 = memoryview(bytearray([])) + o2 = self.execute("select %s;", (o1,)) + if sys.version_info[0] < 3: + self.assertEqual(buffer, type(o2)) + else: + self.assertEqual(memoryview, type(o2)) + + def testByteaHexCheckFalsePositive(self): + # the check \x -> x to detect bad bytea decode + # may be fooled if the first char is really an 'x' + o1 = psycopg2.Binary(b('x')) + o2 = self.execute("SELECT %s::bytea AS foo", (o1,)) + self.assertEqual(b('x'), o2[0]) + + def testNegNumber(self): + d1 = self.execute("select -%s;", (decimal.Decimal('-1.0'),)) + self.assertEqual(1, d1) + f1 = self.execute("select -%s;", (-1.0,)) + self.assertEqual(1, f1) + i1 = self.execute("select -%s;", (-1,)) + self.assertEqual(1, i1) + l1 = self.execute("select -%s;", (-1L,)) + self.assertEqual(1, l1) + + def testGenericArray(self): + a = self.execute("select '{1,2,3}'::int4[]") + self.assertEqual(a, [1,2,3]) + a = self.execute("select array['a','b','''']::text[]") + self.assertEqual(a, ['a','b',"'"]) + + @testutils.skip_before_postgres(8, 2) + def testGenericArrayNull(self): + def caster(s, cur): + if s is None: return "nada" + return int(s) * 2 + base = psycopg2.extensions.new_type((23,), "INT4", caster) + array = psycopg2.extensions.new_array_type((1007,), "INT4ARRAY", base) + + psycopg2.extensions.register_type(array, self.conn) + a = self.execute("select '{1,2,3}'::int4[]") + self.assertEqual(a, [2,4,6]) + a = self.execute("select '{1,2,NULL}'::int4[]") + self.assertEqual(a, [2,4,'nada']) + + +class AdaptSubclassTest(unittest.TestCase): + def test_adapt_subtype(self): + from psycopg2.extensions import adapt + class Sub(str): pass + s1 = "hel'lo" + s2 = Sub(s1) + self.assertEqual(adapt(s1).getquoted(), adapt(s2).getquoted()) + + def test_adapt_most_specific(self): + from psycopg2.extensions import adapt, register_adapter, AsIs + + class A(object): pass + class B(A): pass + class C(B): pass + + register_adapter(A, lambda a: AsIs("a")) + register_adapter(B, lambda b: AsIs("b")) + try: + self.assertEqual(b('b'), adapt(C()).getquoted()) + finally: + del psycopg2.extensions.adapters[A, psycopg2.extensions.ISQLQuote] + del psycopg2.extensions.adapters[B, psycopg2.extensions.ISQLQuote] + + @testutils.skip_from_python(3) + def test_no_mro_no_joy(self): + from psycopg2.extensions import adapt, register_adapter, AsIs + + class A: pass + class B(A): pass + + register_adapter(A, lambda a: AsIs("a")) + try: + self.assertRaises(psycopg2.ProgrammingError, adapt, B()) + finally: + del psycopg2.extensions.adapters[A, psycopg2.extensions.ISQLQuote] + + + @testutils.skip_before_python(3) + def test_adapt_subtype_3(self): + from psycopg2.extensions import adapt, register_adapter, AsIs + + class A: pass + class B(A): pass + + register_adapter(A, lambda a: AsIs("a")) + try: + self.assertEqual(b("a"), adapt(B()).getquoted()) + finally: + del psycopg2.extensions.adapters[A, psycopg2.extensions.ISQLQuote] + + +class ByteaParserTest(unittest.TestCase): + """Unit test for our bytea format parser.""" + def setUp(self): + try: + self._cast = self._import_cast() + except Exception, e: + self._cast = None + self._exc = e + + def _import_cast(self): + """Use ctypes to access the C function. + + Raise any sort of error: we just support this where ctypes works as + expected. + """ + import ctypes + lib = ctypes.cdll.LoadLibrary(psycopg2._psycopg.__file__) + cast = lib.typecast_BINARY_cast + cast.argtypes = [ctypes.c_char_p, ctypes.c_size_t, ctypes.py_object] + cast.restype = ctypes.py_object + return cast + + def cast(self, buffer): + """Cast a buffer from the output format""" + l = buffer and len(buffer) or 0 + rv = self._cast(buffer, l, None) + + if rv is None: + return None + + if sys.version_info[0] < 3: + return str(rv) + else: + return rv.tobytes() + + def test_null(self): + rv = self.cast(None) + self.assertEqual(rv, None) + + def test_blank(self): + rv = self.cast(b('')) + self.assertEqual(rv, b('')) + + def test_blank_hex(self): + # Reported as problematic in ticket #48 + rv = self.cast(b('\\x')) + self.assertEqual(rv, b('')) + + def test_full_hex(self, upper=False): + buf = ''.join(("%02x" % i) for i in range(256)) + if upper: buf = buf.upper() + buf = '\\x' + buf + rv = self.cast(b(buf)) + if sys.version_info[0] < 3: + self.assertEqual(rv, ''.join(map(chr, range(256)))) + else: + self.assertEqual(rv, bytes(range(256))) + + def test_full_hex_upper(self): + return self.test_full_hex(upper=True) + + def test_full_escaped_octal(self): + buf = ''.join(("\\%03o" % i) for i in range(256)) + rv = self.cast(b(buf)) + if sys.version_info[0] < 3: + self.assertEqual(rv, ''.join(map(chr, range(256)))) + else: + self.assertEqual(rv, bytes(range(256))) + + def test_escaped_mixed(self): + import string + buf = ''.join(("\\%03o" % i) for i in range(32)) + buf += string.ascii_letters + buf += ''.join('\\' + c for c in string.ascii_letters) + buf += '\\\\' + rv = self.cast(b(buf)) + if sys.version_info[0] < 3: + tgt = ''.join(map(chr, range(32))) \ + + string.ascii_letters * 2 + '\\' + else: + tgt = bytes(range(32)) + \ + (string.ascii_letters * 2 + '\\').encode('ascii') + + self.assertEqual(rv, tgt) + +def skip_if_cant_cast(f): + @wraps(f) + def skip_if_cant_cast_(self, *args, **kwargs): + if self._cast is None: + return self.skipTest("can't test bytea parser: %s - %s" + % (self._exc.__class__.__name__, self._exc)) + + return f(self, *args, **kwargs) + + return skip_if_cant_cast_ + +decorate_all_tests(ByteaParserTest, skip_if_cant_cast) + + +def test_suite(): + return unittest.TestLoader().loadTestsFromName(__name__) + +if __name__ == "__main__": + unittest.main() + diff --git a/psycopg2/tests/test_types_extras.py b/psycopg2/tests/test_types_extras.py new file mode 100644 index 0000000..96ffcd3 --- /dev/null +++ b/psycopg2/tests/test_types_extras.py @@ -0,0 +1,1562 @@ +#!/usr/bin/env python +# +# types_extras.py - tests for extras types conversions +# +# Copyright (C) 2008-2010 Federico Di Gregorio +# +# psycopg2 is free software: you can redistribute it and/or modify it +# under the terms of the GNU Lesser General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# psycopg2 is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + +import re +import sys +from decimal import Decimal +from datetime import date, datetime +from functools import wraps + +from testutils import unittest, skip_if_no_uuid, skip_before_postgres +from testutils import ConnectingTestCase, decorate_all_tests + +import psycopg2 +import psycopg2.extras +from psycopg2.extensions import b + + +def filter_scs(conn, s): + if conn.get_parameter_status("standard_conforming_strings") == 'off': + return s + else: + return s.replace(b("E'"), b("'")) + +class TypesExtrasTests(ConnectingTestCase): + """Test that all type conversions are working.""" + + def execute(self, *args): + curs = self.conn.cursor() + curs.execute(*args) + return curs.fetchone()[0] + + @skip_if_no_uuid + def testUUID(self): + import uuid + psycopg2.extras.register_uuid() + u = uuid.UUID('9c6d5a77-7256-457e-9461-347b4358e350') + s = self.execute("SELECT %s AS foo", (u,)) + self.failUnless(u == s) + # must survive NULL cast to a uuid + s = self.execute("SELECT NULL::uuid AS foo") + self.failUnless(s is None) + + @skip_if_no_uuid + def testUUIDARRAY(self): + import uuid + psycopg2.extras.register_uuid() + u = [uuid.UUID('9c6d5a77-7256-457e-9461-347b4358e350'), uuid.UUID('9c6d5a77-7256-457e-9461-347b4358e352')] + s = self.execute("SELECT %s AS foo", (u,)) + self.failUnless(u == s) + # array with a NULL element + u = [uuid.UUID('9c6d5a77-7256-457e-9461-347b4358e350'), None] + s = self.execute("SELECT %s AS foo", (u,)) + self.failUnless(u == s) + # must survive NULL cast to a uuid[] + s = self.execute("SELECT NULL::uuid[] AS foo") + self.failUnless(s is None) + # what about empty arrays? + s = self.execute("SELECT '{}'::uuid[] AS foo") + self.failUnless(type(s) == list and len(s) == 0) + + def testINET(self): + psycopg2.extras.register_inet() + i = psycopg2.extras.Inet("192.168.1.0/24") + s = self.execute("SELECT %s AS foo", (i,)) + self.failUnless(i.addr == s.addr) + # must survive NULL cast to inet + s = self.execute("SELECT NULL::inet AS foo") + self.failUnless(s is None) + + def testINETARRAY(self): + psycopg2.extras.register_inet() + i = psycopg2.extras.Inet("192.168.1.0/24") + s = self.execute("SELECT %s AS foo", ([i],)) + self.failUnless(i.addr == s[0].addr) + # must survive NULL cast to inet + s = self.execute("SELECT NULL::inet[] AS foo") + self.failUnless(s is None) + + def test_inet_conform(self): + from psycopg2.extras import Inet + i = Inet("192.168.1.0/24") + a = psycopg2.extensions.adapt(i) + a.prepare(self.conn) + self.assertEqual( + filter_scs(self.conn, b("E'192.168.1.0/24'::inet")), + a.getquoted()) + + # adapts ok with unicode too + i = Inet(u"192.168.1.0/24") + a = psycopg2.extensions.adapt(i) + a.prepare(self.conn) + self.assertEqual( + filter_scs(self.conn, b("E'192.168.1.0/24'::inet")), + a.getquoted()) + + def test_adapt_fail(self): + class Foo(object): pass + self.assertRaises(psycopg2.ProgrammingError, + psycopg2.extensions.adapt, Foo(), psycopg2.extensions.ISQLQuote, None) + try: + psycopg2.extensions.adapt(Foo(), psycopg2.extensions.ISQLQuote, None) + except psycopg2.ProgrammingError, err: + self.failUnless(str(err) == "can't adapt type 'Foo'") + + +def skip_if_no_hstore(f): + @wraps(f) + def skip_if_no_hstore_(self): + from psycopg2.extras import HstoreAdapter + oids = HstoreAdapter.get_oids(self.conn) + if oids is None or not oids[0]: + return self.skipTest("hstore not available in test database") + return f(self) + + return skip_if_no_hstore_ + +class HstoreTestCase(ConnectingTestCase): + def test_adapt_8(self): + if self.conn.server_version >= 90000: + return self.skipTest("skipping dict adaptation with PG pre-9 syntax") + + from psycopg2.extras import HstoreAdapter + + o = {'a': '1', 'b': "'", 'c': None} + if self.conn.encoding == 'UTF8': + o['d'] = u'\xe0' + + a = HstoreAdapter(o) + a.prepare(self.conn) + q = a.getquoted() + + self.assert_(q.startswith(b("((")), q) + ii = q[1:-1].split(b("||")) + ii.sort() + + self.assertEqual(len(ii), len(o)) + self.assertEqual(ii[0], filter_scs(self.conn, b("(E'a' => E'1')"))) + self.assertEqual(ii[1], filter_scs(self.conn, b("(E'b' => E'''')"))) + self.assertEqual(ii[2], filter_scs(self.conn, b("(E'c' => NULL)"))) + if 'd' in o: + encc = u'\xe0'.encode(psycopg2.extensions.encodings[self.conn.encoding]) + self.assertEqual(ii[3], filter_scs(self.conn, b("(E'd' => E'") + encc + b("')"))) + + def test_adapt_9(self): + if self.conn.server_version < 90000: + return self.skipTest("skipping dict adaptation with PG 9 syntax") + + from psycopg2.extras import HstoreAdapter + + o = {'a': '1', 'b': "'", 'c': None} + if self.conn.encoding == 'UTF8': + o['d'] = u'\xe0' + + a = HstoreAdapter(o) + a.prepare(self.conn) + q = a.getquoted() + + m = re.match(b(r'hstore\(ARRAY\[([^\]]+)\], ARRAY\[([^\]]+)\]\)'), q) + self.assert_(m, repr(q)) + + kk = m.group(1).split(b(", ")) + vv = m.group(2).split(b(", ")) + ii = zip(kk, vv) + ii.sort() + + def f(*args): + return tuple([filter_scs(self.conn, s) for s in args]) + + self.assertEqual(len(ii), len(o)) + self.assertEqual(ii[0], f(b("E'a'"), b("E'1'"))) + self.assertEqual(ii[1], f(b("E'b'"), b("E''''"))) + self.assertEqual(ii[2], f(b("E'c'"), b("NULL"))) + if 'd' in o: + encc = u'\xe0'.encode(psycopg2.extensions.encodings[self.conn.encoding]) + self.assertEqual(ii[3], f(b("E'd'"), b("E'") + encc + b("'"))) + + def test_parse(self): + from psycopg2.extras import HstoreAdapter + + def ok(s, d): + self.assertEqual(HstoreAdapter.parse(s, None), d) + + ok(None, None) + ok('', {}) + ok('"a"=>"1", "b"=>"2"', {'a': '1', 'b': '2'}) + ok('"a" => "1" ,"b" => "2"', {'a': '1', 'b': '2'}) + ok('"a"=>NULL, "b"=>"2"', {'a': None, 'b': '2'}) + ok(r'"a"=>"\"", "\""=>"2"', {'a': '"', '"': '2'}) + ok('"a"=>"\'", "\'"=>"2"', {'a': "'", "'": '2'}) + ok('"a"=>"1", "b"=>NULL', {'a': '1', 'b': None}) + ok(r'"a\\"=>"1"', {'a\\': '1'}) + ok(r'"a\""=>"1"', {'a"': '1'}) + ok(r'"a\\\""=>"1"', {r'a\"': '1'}) + ok(r'"a\\\\\""=>"1"', {r'a\\"': '1'}) + + def ko(s): + self.assertRaises(psycopg2.InterfaceError, + HstoreAdapter.parse, s, None) + + ko('a') + ko('"a"') + ko(r'"a\\""=>"1"') + ko(r'"a\\\\""=>"1"') + ko('"a=>"1"') + ko('"a"=>"1", "b"=>NUL') + + @skip_if_no_hstore + def test_register_conn(self): + from psycopg2.extras import register_hstore + + register_hstore(self.conn) + cur = self.conn.cursor() + cur.execute("select null::hstore, ''::hstore, 'a => b'::hstore") + t = cur.fetchone() + self.assert_(t[0] is None) + self.assertEqual(t[1], {}) + self.assertEqual(t[2], {'a': 'b'}) + + @skip_if_no_hstore + def test_register_curs(self): + from psycopg2.extras import register_hstore + + cur = self.conn.cursor() + register_hstore(cur) + cur.execute("select null::hstore, ''::hstore, 'a => b'::hstore") + t = cur.fetchone() + self.assert_(t[0] is None) + self.assertEqual(t[1], {}) + self.assertEqual(t[2], {'a': 'b'}) + + @skip_if_no_hstore + def test_register_unicode(self): + from psycopg2.extras import register_hstore + + register_hstore(self.conn, unicode=True) + cur = self.conn.cursor() + cur.execute("select null::hstore, ''::hstore, 'a => b'::hstore") + t = cur.fetchone() + self.assert_(t[0] is None) + self.assertEqual(t[1], {}) + self.assertEqual(t[2], {u'a': u'b'}) + self.assert_(isinstance(t[2].keys()[0], unicode)) + self.assert_(isinstance(t[2].values()[0], unicode)) + + @skip_if_no_hstore + def test_register_globally(self): + from psycopg2.extras import register_hstore, HstoreAdapter + + oids = HstoreAdapter.get_oids(self.conn) + try: + register_hstore(self.conn, globally=True) + conn2 = self.connect() + try: + cur2 = self.conn.cursor() + cur2.execute("select 'a => b'::hstore") + r = cur2.fetchone() + self.assert_(isinstance(r[0], dict)) + finally: + conn2.close() + finally: + psycopg2.extensions.string_types.pop(oids[0][0]) + + # verify the caster is not around anymore + cur = self.conn.cursor() + cur.execute("select 'a => b'::hstore") + r = cur.fetchone() + self.assert_(isinstance(r[0], str)) + + @skip_if_no_hstore + def test_roundtrip(self): + from psycopg2.extras import register_hstore + register_hstore(self.conn) + cur = self.conn.cursor() + + def ok(d): + cur.execute("select %s", (d,)) + d1 = cur.fetchone()[0] + self.assertEqual(len(d), len(d1)) + for k in d: + self.assert_(k in d1, k) + self.assertEqual(d[k], d1[k]) + + ok({}) + ok({'a': 'b', 'c': None}) + + ab = map(chr, range(32, 128)) + ok(dict(zip(ab, ab))) + ok({''.join(ab): ''.join(ab)}) + + self.conn.set_client_encoding('latin1') + if sys.version_info[0] < 3: + ab = map(chr, range(32, 127) + range(160, 255)) + else: + ab = bytes(range(32, 127) + range(160, 255)).decode('latin1') + + ok({''.join(ab): ''.join(ab)}) + ok(dict(zip(ab, ab))) + + @skip_if_no_hstore + def test_roundtrip_unicode(self): + from psycopg2.extras import register_hstore + register_hstore(self.conn, unicode=True) + cur = self.conn.cursor() + + def ok(d): + cur.execute("select %s", (d,)) + d1 = cur.fetchone()[0] + self.assertEqual(len(d), len(d1)) + for k, v in d1.iteritems(): + self.assert_(k in d, k) + self.assertEqual(d[k], v) + self.assert_(isinstance(k, unicode)) + self.assert_(v is None or isinstance(v, unicode)) + + ok({}) + ok({'a': 'b', 'c': None, 'd': u'\u20ac', u'\u2603': 'e'}) + + ab = map(unichr, range(1, 1024)) + ok({u''.join(ab): u''.join(ab)}) + ok(dict(zip(ab, ab))) + + @skip_if_no_hstore + def test_oid(self): + cur = self.conn.cursor() + cur.execute("select 'hstore'::regtype::oid") + oid = cur.fetchone()[0] + + # Note: None as conn_or_cursor is just for testing: not public + # interface and it may break in future. + from psycopg2.extras import register_hstore + register_hstore(None, globally=True, oid=oid) + try: + cur.execute("select null::hstore, ''::hstore, 'a => b'::hstore") + t = cur.fetchone() + self.assert_(t[0] is None) + self.assertEqual(t[1], {}) + self.assertEqual(t[2], {'a': 'b'}) + + finally: + psycopg2.extensions.string_types.pop(oid) + + @skip_if_no_hstore + @skip_before_postgres(8, 3) + def test_roundtrip_array(self): + from psycopg2.extras import register_hstore + register_hstore(self.conn) + + ds = [] + ds.append({}) + ds.append({'a': 'b', 'c': None}) + + ab = map(chr, range(32, 128)) + ds.append(dict(zip(ab, ab))) + ds.append({''.join(ab): ''.join(ab)}) + + self.conn.set_client_encoding('latin1') + if sys.version_info[0] < 3: + ab = map(chr, range(32, 127) + range(160, 255)) + else: + ab = bytes(range(32, 127) + range(160, 255)).decode('latin1') + + ds.append({''.join(ab): ''.join(ab)}) + ds.append(dict(zip(ab, ab))) + + cur = self.conn.cursor() + cur.execute("select %s", (ds,)) + ds1 = cur.fetchone()[0] + self.assertEqual(ds, ds1) + + @skip_if_no_hstore + @skip_before_postgres(8, 3) + def test_array_cast(self): + from psycopg2.extras import register_hstore + register_hstore(self.conn) + cur = self.conn.cursor() + cur.execute("select array['a=>1'::hstore, 'b=>2'::hstore];") + a = cur.fetchone()[0] + self.assertEqual(a, [{'a': '1'}, {'b': '2'}]) + + @skip_if_no_hstore + def test_array_cast_oid(self): + cur = self.conn.cursor() + cur.execute("select 'hstore'::regtype::oid, 'hstore[]'::regtype::oid") + oid, aoid = cur.fetchone() + + from psycopg2.extras import register_hstore + register_hstore(None, globally=True, oid=oid, array_oid=aoid) + try: + cur.execute("select null::hstore, ''::hstore, 'a => b'::hstore, '{a=>b}'::hstore[]") + t = cur.fetchone() + self.assert_(t[0] is None) + self.assertEqual(t[1], {}) + self.assertEqual(t[2], {'a': 'b'}) + self.assertEqual(t[3], [{'a': 'b'}]) + + finally: + psycopg2.extensions.string_types.pop(oid) + psycopg2.extensions.string_types.pop(aoid) + + @skip_if_no_hstore + def test_non_dbapi_connection(self): + from psycopg2.extras import RealDictConnection + from psycopg2.extras import register_hstore + + conn = self.connect(connection_factory=RealDictConnection) + try: + register_hstore(conn) + curs = conn.cursor() + curs.execute("select ''::hstore as x") + self.assertEqual(curs.fetchone()['x'], {}) + finally: + conn.close() + + conn = self.connect(connection_factory=RealDictConnection) + try: + curs = conn.cursor() + register_hstore(curs) + curs.execute("select ''::hstore as x") + self.assertEqual(curs.fetchone()['x'], {}) + finally: + conn.close() + + +def skip_if_no_composite(f): + @wraps(f) + def skip_if_no_composite_(self): + if self.conn.server_version < 80000: + return self.skipTest( + "server version %s doesn't support composite types" + % self.conn.server_version) + + return f(self) + + return skip_if_no_composite_ + +class AdaptTypeTestCase(ConnectingTestCase): + @skip_if_no_composite + def test_none_in_record(self): + curs = self.conn.cursor() + s = curs.mogrify("SELECT %s;", [(42, None)]) + self.assertEqual(b("SELECT (42, NULL);"), s) + curs.execute("SELECT %s;", [(42, None)]) + d = curs.fetchone()[0] + self.assertEqual("(42,)", d) + + def test_none_fast_path(self): + # the None adapter is not actually invoked in regular adaptation + ext = psycopg2.extensions + + class WonkyAdapter(object): + def __init__(self, obj): pass + def getquoted(self): return "NOPE!" + + curs = self.conn.cursor() + + orig_adapter = ext.adapters[type(None), ext.ISQLQuote] + try: + ext.register_adapter(type(None), WonkyAdapter) + self.assertEqual(ext.adapt(None).getquoted(), "NOPE!") + + s = curs.mogrify("SELECT %s;", (None,)) + self.assertEqual(b("SELECT NULL;"), s) + + finally: + ext.register_adapter(type(None), orig_adapter) + + def test_tokenization(self): + from psycopg2.extras import CompositeCaster + def ok(s, v): + self.assertEqual(CompositeCaster.tokenize(s), v) + + ok("(,)", [None, None]) + ok('(,"")', [None, '']) + ok('(hello,,10.234,2010-11-11)', ['hello', None, '10.234', '2010-11-11']) + ok('(10,"""")', ['10', '"']) + ok('(10,",")', ['10', ',']) + ok(r'(10,"\\")', ['10', '\\']) + ok(r'''(10,"\\',""")''', ['10', '''\\',"''']) + ok('(10,"(20,""(30,40)"")")', ['10', '(20,"(30,40)")']) + ok('(10,"(20,""(30,""""(40,50)"""")"")")', ['10', '(20,"(30,""(40,50)"")")']) + ok('(,"(,""(a\nb\tc)"")")', [None, '(,"(a\nb\tc)")']) + ok('(\x01,\x02,\x03,\x04,\x05,\x06,\x07,\x08,"\t","\n","\x0b",' + '"\x0c","\r",\x0e,\x0f,\x10,\x11,\x12,\x13,\x14,\x15,\x16,' + '\x17,\x18,\x19,\x1a,\x1b,\x1c,\x1d,\x1e,\x1f," ",!,"""",#,' + '$,%,&,\',"(",")",*,+,",",-,.,/,0,1,2,3,4,5,6,7,8,9,:,;,<,=,>,?,' + '@,A,B,C,D,E,F,G,H,I,J,K,L,M,N,O,P,Q,R,S,T,U,V,W,X,Y,Z,[,"\\\\",],' + '^,_,`,a,b,c,d,e,f,g,h,i,j,k,l,m,n,o,p,q,r,s,t,u,v,w,x,y,z,{,|,},' + '~,\x7f)', + map(chr, range(1, 128))) + ok('(,"\x01\x02\x03\x04\x05\x06\x07\x08\t\n\x0b\x0c\r\x0e\x0f' + '\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f !' + '""#$%&\'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\\\]' + '^_`abcdefghijklmnopqrstuvwxyz{|}~\x7f")', + [None, ''.join(map(chr, range(1, 128)))]) + + @skip_if_no_composite + def test_cast_composite(self): + oid = self._create_type("type_isd", + [('anint', 'integer'), ('astring', 'text'), ('adate', 'date')]) + + t = psycopg2.extras.register_composite("type_isd", self.conn) + self.assertEqual(t.name, 'type_isd') + self.assertEqual(t.schema, 'public') + self.assertEqual(t.oid, oid) + self.assert_(issubclass(t.type, tuple)) + self.assertEqual(t.attnames, ['anint', 'astring', 'adate']) + self.assertEqual(t.atttypes, [23,25,1082]) + + curs = self.conn.cursor() + r = (10, 'hello', date(2011,1,2)) + curs.execute("select %s::type_isd;", (r,)) + v = curs.fetchone()[0] + self.assert_(isinstance(v, t.type)) + self.assertEqual(v[0], 10) + self.assertEqual(v[1], "hello") + self.assertEqual(v[2], date(2011,1,2)) + + try: + from collections import namedtuple + except ImportError: + pass + else: + self.assert_(t.type is not tuple) + self.assertEqual(v.anint, 10) + self.assertEqual(v.astring, "hello") + self.assertEqual(v.adate, date(2011,1,2)) + + @skip_if_no_composite + def test_empty_string(self): + # issue #141 + self._create_type("type_ss", [('s1', 'text'), ('s2', 'text')]) + curs = self.conn.cursor() + psycopg2.extras.register_composite("type_ss", curs) + + def ok(t): + curs.execute("select %s::type_ss", (t,)) + rv = curs.fetchone()[0] + self.assertEqual(t, rv) + + ok(('a', 'b')) + ok(('a', '')) + ok(('', 'b')) + ok(('a', None)) + ok((None, 'b')) + ok(('', '')) + ok((None, None)) + + @skip_if_no_composite + def test_cast_nested(self): + self._create_type("type_is", + [("anint", "integer"), ("astring", "text")]) + self._create_type("type_r_dt", + [("adate", "date"), ("apair", "type_is")]) + self._create_type("type_r_ft", + [("afloat", "float8"), ("anotherpair", "type_r_dt")]) + + psycopg2.extras.register_composite("type_is", self.conn) + psycopg2.extras.register_composite("type_r_dt", self.conn) + psycopg2.extras.register_composite("type_r_ft", self.conn) + + curs = self.conn.cursor() + r = (0.25, (date(2011,1,2), (42, "hello"))) + curs.execute("select %s::type_r_ft;", (r,)) + v = curs.fetchone()[0] + + self.assertEqual(r, v) + + try: + from collections import namedtuple + except ImportError: + pass + else: + self.assertEqual(v.anotherpair.apair.astring, "hello") + + @skip_if_no_composite + def test_register_on_cursor(self): + self._create_type("type_ii", [("a", "integer"), ("b", "integer")]) + + curs1 = self.conn.cursor() + curs2 = self.conn.cursor() + psycopg2.extras.register_composite("type_ii", curs1) + curs1.execute("select (1,2)::type_ii") + self.assertEqual(curs1.fetchone()[0], (1,2)) + curs2.execute("select (1,2)::type_ii") + self.assertEqual(curs2.fetchone()[0], "(1,2)") + + @skip_if_no_composite + def test_register_on_connection(self): + self._create_type("type_ii", [("a", "integer"), ("b", "integer")]) + + conn1 = self.connect() + conn2 = self.connect() + try: + psycopg2.extras.register_composite("type_ii", conn1) + curs1 = conn1.cursor() + curs2 = conn2.cursor() + curs1.execute("select (1,2)::type_ii") + self.assertEqual(curs1.fetchone()[0], (1,2)) + curs2.execute("select (1,2)::type_ii") + self.assertEqual(curs2.fetchone()[0], "(1,2)") + finally: + conn1.close() + conn2.close() + + @skip_if_no_composite + def test_register_globally(self): + self._create_type("type_ii", [("a", "integer"), ("b", "integer")]) + + conn1 = self.connect() + conn2 = self.connect() + try: + t = psycopg2.extras.register_composite("type_ii", conn1, globally=True) + try: + curs1 = conn1.cursor() + curs2 = conn2.cursor() + curs1.execute("select (1,2)::type_ii") + self.assertEqual(curs1.fetchone()[0], (1,2)) + curs2.execute("select (1,2)::type_ii") + self.assertEqual(curs2.fetchone()[0], (1,2)) + finally: + # drop the registered typecasters to help the refcounting + # script to return precise values. + del psycopg2.extensions.string_types[t.typecaster.values[0]] + if t.array_typecaster: + del psycopg2.extensions.string_types[ + t.array_typecaster.values[0]] + + finally: + conn1.close() + conn2.close() + + @skip_if_no_composite + def test_composite_namespace(self): + curs = self.conn.cursor() + curs.execute(""" + select nspname from pg_namespace + where nspname = 'typens'; + """) + if not curs.fetchone(): + curs.execute("create schema typens;") + self.conn.commit() + + self._create_type("typens.typens_ii", + [("a", "integer"), ("b", "integer")]) + t = psycopg2.extras.register_composite( + "typens.typens_ii", self.conn) + self.assertEqual(t.schema, 'typens') + curs.execute("select (4,8)::typens.typens_ii") + self.assertEqual(curs.fetchone()[0], (4,8)) + + @skip_if_no_composite + @skip_before_postgres(8, 4) + def test_composite_array(self): + oid = self._create_type("type_isd", + [('anint', 'integer'), ('astring', 'text'), ('adate', 'date')]) + + t = psycopg2.extras.register_composite("type_isd", self.conn) + + curs = self.conn.cursor() + r1 = (10, 'hello', date(2011,1,2)) + r2 = (20, 'world', date(2011,1,3)) + curs.execute("select %s::type_isd[];", ([r1, r2],)) + v = curs.fetchone()[0] + self.assertEqual(len(v), 2) + self.assert_(isinstance(v[0], t.type)) + self.assertEqual(v[0][0], 10) + self.assertEqual(v[0][1], "hello") + self.assertEqual(v[0][2], date(2011,1,2)) + self.assert_(isinstance(v[1], t.type)) + self.assertEqual(v[1][0], 20) + self.assertEqual(v[1][1], "world") + self.assertEqual(v[1][2], date(2011,1,3)) + + @skip_if_no_composite + def test_wrong_schema(self): + oid = self._create_type("type_ii", [("a", "integer"), ("b", "integer")]) + from psycopg2.extras import CompositeCaster + c = CompositeCaster('type_ii', oid, [('a', 23), ('b', 23), ('c', 23)]) + curs = self.conn.cursor() + psycopg2.extensions.register_type(c.typecaster, curs) + curs.execute("select (1,2)::type_ii") + self.assertRaises(psycopg2.DataError, curs.fetchone) + + @skip_if_no_composite + @skip_before_postgres(8, 4) + def test_from_tables(self): + curs = self.conn.cursor() + curs.execute("""create table ctest1 ( + id integer primary key, + temp int, + label varchar + );""") + + curs.execute("""alter table ctest1 drop temp;""") + + curs.execute("""create table ctest2 ( + id serial primary key, + label varchar, + test_id integer references ctest1(id) + );""") + + curs.execute("""insert into ctest1 (id, label) values + (1, 'test1'), + (2, 'test2');""") + curs.execute("""insert into ctest2 (label, test_id) values + ('testa', 1), + ('testb', 1), + ('testc', 2), + ('testd', 2);""") + + psycopg2.extras.register_composite("ctest1", curs) + psycopg2.extras.register_composite("ctest2", curs) + + curs.execute(""" + select ctest1, array_agg(ctest2) as test2s + from ( + select ctest1, ctest2 + from ctest1 inner join ctest2 on ctest1.id = ctest2.test_id + order by ctest1.id, ctest2.label + ) x group by ctest1;""") + + r = curs.fetchone() + self.assertEqual(r[0], (1, 'test1')) + self.assertEqual(r[1], [(1, 'testa', 1), (2, 'testb', 1)]) + r = curs.fetchone() + self.assertEqual(r[0], (2, 'test2')) + self.assertEqual(r[1], [(3, 'testc', 2), (4, 'testd', 2)]) + + @skip_if_no_composite + def test_non_dbapi_connection(self): + from psycopg2.extras import RealDictConnection + from psycopg2.extras import register_composite + self._create_type("type_ii", [("a", "integer"), ("b", "integer")]) + + conn = self.connect(connection_factory=RealDictConnection) + try: + register_composite('type_ii', conn) + curs = conn.cursor() + curs.execute("select '(1,2)'::type_ii as x") + self.assertEqual(curs.fetchone()['x'], (1,2)) + finally: + conn.close() + + conn = self.connect(connection_factory=RealDictConnection) + try: + curs = conn.cursor() + register_composite('type_ii', conn) + curs.execute("select '(1,2)'::type_ii as x") + self.assertEqual(curs.fetchone()['x'], (1,2)) + finally: + conn.close() + + @skip_if_no_composite + def test_subclass(self): + oid = self._create_type("type_isd", + [('anint', 'integer'), ('astring', 'text'), ('adate', 'date')]) + + from psycopg2.extras import register_composite, CompositeCaster + + class DictComposite(CompositeCaster): + def make(self, values): + return dict(zip(self.attnames, values)) + + t = register_composite('type_isd', self.conn, factory=DictComposite) + + self.assertEqual(t.name, 'type_isd') + self.assertEqual(t.oid, oid) + + curs = self.conn.cursor() + r = (10, 'hello', date(2011,1,2)) + curs.execute("select %s::type_isd;", (r,)) + v = curs.fetchone()[0] + self.assert_(isinstance(v, dict)) + self.assertEqual(v['anint'], 10) + self.assertEqual(v['astring'], "hello") + self.assertEqual(v['adate'], date(2011,1,2)) + + def _create_type(self, name, fields): + curs = self.conn.cursor() + try: + curs.execute("drop type %s cascade;" % name) + except psycopg2.ProgrammingError: + self.conn.rollback() + + curs.execute("create type %s as (%s);" % (name, + ", ".join(["%s %s" % p for p in fields]))) + if '.' in name: + schema, name = name.split('.') + else: + schema = 'public' + + curs.execute("""\ + SELECT t.oid + FROM pg_type t JOIN pg_namespace ns ON typnamespace = ns.oid + WHERE typname = %s and nspname = %s; + """, (name, schema)) + oid = curs.fetchone()[0] + self.conn.commit() + return oid + + +def skip_if_json_module(f): + """Skip a test if a Python json module *is* available""" + @wraps(f) + def skip_if_json_module_(self): + if psycopg2.extras.json is not None: + return self.skipTest("json module is available") + + return f(self) + + return skip_if_json_module_ + +def skip_if_no_json_module(f): + """Skip a test if no Python json module is available""" + @wraps(f) + def skip_if_no_json_module_(self): + if psycopg2.extras.json is None: + return self.skipTest("json module not available") + + return f(self) + + return skip_if_no_json_module_ + +def skip_if_no_json_type(f): + """Skip a test if PostgreSQL json type is not available""" + @wraps(f) + def skip_if_no_json_type_(self): + curs = self.conn.cursor() + curs.execute("select oid from pg_type where typname = 'json'") + if not curs.fetchone(): + return self.skipTest("json not available in test database") + + return f(self) + + return skip_if_no_json_type_ + +class JsonTestCase(ConnectingTestCase): + @skip_if_json_module + def test_module_not_available(self): + from psycopg2.extras import Json + self.assertRaises(ImportError, Json(None).getquoted) + + @skip_if_json_module + def test_customizable_with_module_not_available(self): + from psycopg2.extras import Json + class MyJson(Json): + def dumps(self, obj): + assert obj is None + return "hi" + + self.assertEqual(MyJson(None).getquoted(), "'hi'") + + @skip_if_no_json_module + def test_adapt(self): + from psycopg2.extras import json, Json + + objs = [None, "te'xt", 123, 123.45, + u'\xe0\u20ac', ['a', 100], {'a': 100} ] + + curs = self.conn.cursor() + for obj in enumerate(objs): + self.assertEqual(curs.mogrify("%s", (Json(obj),)), + psycopg2.extensions.QuotedString(json.dumps(obj)).getquoted()) + + @skip_if_no_json_module + def test_adapt_dumps(self): + from psycopg2.extras import json, Json + + class DecimalEncoder(json.JSONEncoder): + def default(self, obj): + if isinstance(obj, Decimal): + return float(obj) + return json.JSONEncoder.default(self, obj) + + curs = self.conn.cursor() + obj = Decimal('123.45') + dumps = lambda obj: json.dumps(obj, cls=DecimalEncoder) + self.assertEqual(curs.mogrify("%s", (Json(obj, dumps=dumps),)), + b("'123.45'")) + + @skip_if_no_json_module + def test_adapt_subclass(self): + from psycopg2.extras import json, Json + + class DecimalEncoder(json.JSONEncoder): + def default(self, obj): + if isinstance(obj, Decimal): + return float(obj) + return json.JSONEncoder.default(self, obj) + + class MyJson(Json): + def dumps(self, obj): + return json.dumps(obj, cls=DecimalEncoder) + + curs = self.conn.cursor() + obj = Decimal('123.45') + self.assertEqual(curs.mogrify("%s", (MyJson(obj),)), + b("'123.45'")) + + @skip_if_no_json_module + def test_register_on_dict(self): + from psycopg2.extras import Json + psycopg2.extensions.register_adapter(dict, Json) + + try: + curs = self.conn.cursor() + obj = {'a': 123} + self.assertEqual(curs.mogrify("%s", (obj,)), + b("""'{"a": 123}'""")) + finally: + del psycopg2.extensions.adapters[dict, psycopg2.extensions.ISQLQuote] + + + def test_type_not_available(self): + curs = self.conn.cursor() + curs.execute("select oid from pg_type where typname = 'json'") + if curs.fetchone(): + return self.skipTest("json available in test database") + + self.assertRaises(psycopg2.ProgrammingError, + psycopg2.extras.register_json, self.conn) + + @skip_if_no_json_module + @skip_before_postgres(9, 2) + def test_default_cast(self): + curs = self.conn.cursor() + + curs.execute("""select '{"a": 100.0, "b": null}'::json""") + self.assertEqual(curs.fetchone()[0], {'a': 100.0, 'b': None}) + + curs.execute("""select array['{"a": 100.0, "b": null}']::json[]""") + self.assertEqual(curs.fetchone()[0], [{'a': 100.0, 'b': None}]) + + @skip_if_no_json_module + @skip_if_no_json_type + def test_register_on_connection(self): + psycopg2.extras.register_json(self.conn) + curs = self.conn.cursor() + curs.execute("""select '{"a": 100.0, "b": null}'::json""") + self.assertEqual(curs.fetchone()[0], {'a': 100.0, 'b': None}) + + @skip_if_no_json_module + @skip_if_no_json_type + def test_register_on_cursor(self): + curs = self.conn.cursor() + psycopg2.extras.register_json(curs) + curs.execute("""select '{"a": 100.0, "b": null}'::json""") + self.assertEqual(curs.fetchone()[0], {'a': 100.0, 'b': None}) + + @skip_if_no_json_module + @skip_if_no_json_type + def test_register_globally(self): + old = psycopg2.extensions.string_types.get(114) + olda = psycopg2.extensions.string_types.get(199) + try: + new, newa = psycopg2.extras.register_json(self.conn, globally=True) + curs = self.conn.cursor() + curs.execute("""select '{"a": 100.0, "b": null}'::json""") + self.assertEqual(curs.fetchone()[0], {'a': 100.0, 'b': None}) + finally: + psycopg2.extensions.string_types.pop(new.values[0]) + psycopg2.extensions.string_types.pop(newa.values[0]) + if old: + psycopg2.extensions.register_type(old) + if olda: + psycopg2.extensions.register_type(olda) + + @skip_if_no_json_module + @skip_if_no_json_type + def test_loads(self): + json = psycopg2.extras.json + loads = lambda x: json.loads(x, parse_float=Decimal) + psycopg2.extras.register_json(self.conn, loads=loads) + curs = self.conn.cursor() + curs.execute("""select '{"a": 100.0, "b": null}'::json""") + data = curs.fetchone()[0] + self.assert_(isinstance(data['a'], Decimal)) + self.assertEqual(data['a'], Decimal('100.0')) + + @skip_if_no_json_module + @skip_if_no_json_type + def test_no_conn_curs(self): + from psycopg2._json import _get_json_oids + oid, array_oid = _get_json_oids(self.conn) + + old = psycopg2.extensions.string_types.get(114) + olda = psycopg2.extensions.string_types.get(199) + loads = lambda x: psycopg2.extras.json.loads(x, parse_float=Decimal) + try: + new, newa = psycopg2.extras.register_json( + loads=loads, oid=oid, array_oid=array_oid) + curs = self.conn.cursor() + curs.execute("""select '{"a": 100.0, "b": null}'::json""") + data = curs.fetchone()[0] + self.assert_(isinstance(data['a'], Decimal)) + self.assertEqual(data['a'], Decimal('100.0')) + finally: + psycopg2.extensions.string_types.pop(new.values[0]) + psycopg2.extensions.string_types.pop(newa.values[0]) + if old: + psycopg2.extensions.register_type(old) + if olda: + psycopg2.extensions.register_type(olda) + + @skip_if_no_json_module + @skip_before_postgres(9, 2) + def test_register_default(self): + curs = self.conn.cursor() + + loads = lambda x: psycopg2.extras.json.loads(x, parse_float=Decimal) + psycopg2.extras.register_default_json(curs, loads=loads) + + curs.execute("""select '{"a": 100.0, "b": null}'::json""") + data = curs.fetchone()[0] + self.assert_(isinstance(data['a'], Decimal)) + self.assertEqual(data['a'], Decimal('100.0')) + + curs.execute("""select array['{"a": 100.0, "b": null}']::json[]""") + data = curs.fetchone()[0] + self.assert_(isinstance(data[0]['a'], Decimal)) + self.assertEqual(data[0]['a'], Decimal('100.0')) + + @skip_if_no_json_module + @skip_if_no_json_type + def test_null(self): + psycopg2.extras.register_json(self.conn) + curs = self.conn.cursor() + curs.execute("""select NULL::json""") + self.assertEqual(curs.fetchone()[0], None) + curs.execute("""select NULL::json[]""") + self.assertEqual(curs.fetchone()[0], None) + + @skip_if_no_json_module + def test_no_array_oid(self): + curs = self.conn.cursor() + t1, t2 = psycopg2.extras.register_json(curs, oid=25) + self.assertEqual(t1.values[0], 25) + self.assertEqual(t2, None) + + curs.execute("""select '{"a": 100.0, "b": null}'::text""") + data = curs.fetchone()[0] + self.assertEqual(data['a'], 100) + self.assertEqual(data['b'], None) + + +class RangeTestCase(unittest.TestCase): + def test_noparam(self): + from psycopg2.extras import Range + r = Range() + + self.assert_(not r.isempty) + self.assertEqual(r.lower, None) + self.assertEqual(r.upper, None) + self.assert_(r.lower_inf) + self.assert_(r.upper_inf) + self.assert_(not r.lower_inc) + self.assert_(not r.upper_inc) + + def test_empty(self): + from psycopg2.extras import Range + r = Range(empty=True) + + self.assert_(r.isempty) + self.assertEqual(r.lower, None) + self.assertEqual(r.upper, None) + self.assert_(not r.lower_inf) + self.assert_(not r.upper_inf) + self.assert_(not r.lower_inc) + self.assert_(not r.upper_inc) + + def test_nobounds(self): + from psycopg2.extras import Range + r = Range(10, 20) + self.assertEqual(r.lower, 10) + self.assertEqual(r.upper, 20) + self.assert_(not r.isempty) + self.assert_(not r.lower_inf) + self.assert_(not r.upper_inf) + self.assert_(r.lower_inc) + self.assert_(not r.upper_inc) + + def test_bounds(self): + from psycopg2.extras import Range + for bounds, lower_inc, upper_inc in [ + ('[)', True, False), + ('(]', False, True), + ('()', False, False), + ('[]', True, True),]: + r = Range(10, 20, bounds) + self.assertEqual(r.lower, 10) + self.assertEqual(r.upper, 20) + self.assert_(not r.isempty) + self.assert_(not r.lower_inf) + self.assert_(not r.upper_inf) + self.assertEqual(r.lower_inc, lower_inc) + self.assertEqual(r.upper_inc, upper_inc) + + def test_keywords(self): + from psycopg2.extras import Range + r = Range(upper=20) + self.assertEqual(r.lower, None) + self.assertEqual(r.upper, 20) + self.assert_(not r.isempty) + self.assert_(r.lower_inf) + self.assert_(not r.upper_inf) + self.assert_(not r.lower_inc) + self.assert_(not r.upper_inc) + + r = Range(lower=10, bounds='(]') + self.assertEqual(r.lower, 10) + self.assertEqual(r.upper, None) + self.assert_(not r.isempty) + self.assert_(not r.lower_inf) + self.assert_(r.upper_inf) + self.assert_(not r.lower_inc) + self.assert_(not r.upper_inc) + + def test_bad_bounds(self): + from psycopg2.extras import Range + self.assertRaises(ValueError, Range, bounds='(') + self.assertRaises(ValueError, Range, bounds='[}') + + def test_in(self): + from psycopg2.extras import Range + r = Range(empty=True) + self.assert_(10 not in r) + + r = Range() + self.assert_(10 in r) + + r = Range(lower=10, bounds='[)') + self.assert_(9 not in r) + self.assert_(10 in r) + self.assert_(11 in r) + + r = Range(lower=10, bounds='()') + self.assert_(9 not in r) + self.assert_(10 not in r) + self.assert_(11 in r) + + r = Range(upper=20, bounds='()') + self.assert_(19 in r) + self.assert_(20 not in r) + self.assert_(21 not in r) + + r = Range(upper=20, bounds='(]') + self.assert_(19 in r) + self.assert_(20 in r) + self.assert_(21 not in r) + + r = Range(10, 20) + self.assert_(9 not in r) + self.assert_(10 in r) + self.assert_(11 in r) + self.assert_(19 in r) + self.assert_(20 not in r) + self.assert_(21 not in r) + + r = Range(10, 20, '(]') + self.assert_(9 not in r) + self.assert_(10 not in r) + self.assert_(11 in r) + self.assert_(19 in r) + self.assert_(20 in r) + self.assert_(21 not in r) + + r = Range(20, 10) + self.assert_(9 not in r) + self.assert_(10 not in r) + self.assert_(11 not in r) + self.assert_(19 not in r) + self.assert_(20 not in r) + self.assert_(21 not in r) + + def test_nonzero(self): + from psycopg2.extras import Range + self.assert_(Range()) + self.assert_(Range(10, 20)) + self.assert_(not Range(empty=True)) + + def test_eq_hash(self): + from psycopg2.extras import Range + def assert_equal(r1, r2): + self.assert_(r1 == r2) + self.assert_(hash(r1) == hash(r2)) + + assert_equal(Range(empty=True), Range(empty=True)) + assert_equal(Range(), Range()) + assert_equal(Range(10, None), Range(10, None)) + assert_equal(Range(10, 20), Range(10, 20)) + assert_equal(Range(10, 20), Range(10, 20, '[)')) + assert_equal(Range(10, 20, '[]'), Range(10, 20, '[]')) + + def assert_not_equal(r1, r2): + self.assert_(r1 != r2) + self.assert_(hash(r1) != hash(r2)) + + assert_not_equal(Range(10, 20), Range(10, 21)) + assert_not_equal(Range(10, 20), Range(11, 20)) + assert_not_equal(Range(10, 20, '[)'), Range(10, 20, '[]')) + + def test_eq_wrong_type(self): + from psycopg2.extras import Range + self.assertNotEqual(Range(10, 20), ()) + + def test_eq_subclass(self): + from psycopg2.extras import Range, NumericRange + + class IntRange(NumericRange): pass + class PositiveIntRange(IntRange): pass + + self.assertEqual(Range(10, 20), IntRange(10, 20)) + self.assertEqual(PositiveIntRange(10, 20), IntRange(10, 20)) + + def test_not_ordered(self): + from psycopg2.extras import Range + self.assertRaises(TypeError, lambda: Range(empty=True) < Range(0,4)) + self.assertRaises(TypeError, lambda: Range(1,2) > Range(0,4)) + self.assertRaises(TypeError, lambda: Range(1,2) <= Range()) + self.assertRaises(TypeError, lambda: Range(1,2) >= Range()) + + +def skip_if_no_range(f): + @wraps(f) + def skip_if_no_range_(self): + if self.conn.server_version < 90200: + return self.skipTest( + "server version %s doesn't support range types" + % self.conn.server_version) + + return f(self) + + return skip_if_no_range_ + + +class RangeCasterTestCase(ConnectingTestCase): + + builtin_ranges = ('int4range', 'int8range', 'numrange', + 'daterange', 'tsrange', 'tstzrange') + + def test_cast_null(self): + cur = self.conn.cursor() + for type in self.builtin_ranges: + cur.execute("select NULL::%s" % type) + r = cur.fetchone()[0] + self.assertEqual(r, None) + + def test_cast_empty(self): + from psycopg2.extras import Range + cur = self.conn.cursor() + for type in self.builtin_ranges: + cur.execute("select 'empty'::%s" % type) + r = cur.fetchone()[0] + self.assert_(isinstance(r, Range), type) + self.assert_(r.isempty) + + def test_cast_inf(self): + from psycopg2.extras import Range + cur = self.conn.cursor() + for type in self.builtin_ranges: + cur.execute("select '(,)'::%s" % type) + r = cur.fetchone()[0] + self.assert_(isinstance(r, Range), type) + self.assert_(not r.isempty) + self.assert_(r.lower_inf) + self.assert_(r.upper_inf) + + def test_cast_numbers(self): + from psycopg2.extras import NumericRange + cur = self.conn.cursor() + for type in ('int4range', 'int8range'): + cur.execute("select '(10,20)'::%s" % type) + r = cur.fetchone()[0] + self.assert_(isinstance(r, NumericRange)) + self.assert_(not r.isempty) + self.assertEqual(r.lower, 11) + self.assertEqual(r.upper, 20) + self.assert_(not r.lower_inf) + self.assert_(not r.upper_inf) + self.assert_(r.lower_inc) + self.assert_(not r.upper_inc) + + cur.execute("select '(10.2,20.6)'::numrange") + r = cur.fetchone()[0] + self.assert_(isinstance(r, NumericRange)) + self.assert_(not r.isempty) + self.assertEqual(r.lower, Decimal('10.2')) + self.assertEqual(r.upper, Decimal('20.6')) + self.assert_(not r.lower_inf) + self.assert_(not r.upper_inf) + self.assert_(not r.lower_inc) + self.assert_(not r.upper_inc) + + def test_cast_date(self): + from psycopg2.extras import DateRange + cur = self.conn.cursor() + cur.execute("select '(2000-01-01,2012-12-31)'::daterange") + r = cur.fetchone()[0] + self.assert_(isinstance(r, DateRange)) + self.assert_(not r.isempty) + self.assertEqual(r.lower, date(2000,1,2)) + self.assertEqual(r.upper, date(2012,12,31)) + self.assert_(not r.lower_inf) + self.assert_(not r.upper_inf) + self.assert_(r.lower_inc) + self.assert_(not r.upper_inc) + + def test_cast_timestamp(self): + from psycopg2.extras import DateTimeRange + cur = self.conn.cursor() + ts1 = datetime(2000,1,1) + ts2 = datetime(2000,12,31,23,59,59,999) + cur.execute("select tsrange(%s, %s, '()')", (ts1, ts2)) + r = cur.fetchone()[0] + self.assert_(isinstance(r, DateTimeRange)) + self.assert_(not r.isempty) + self.assertEqual(r.lower, ts1) + self.assertEqual(r.upper, ts2) + self.assert_(not r.lower_inf) + self.assert_(not r.upper_inf) + self.assert_(not r.lower_inc) + self.assert_(not r.upper_inc) + + def test_cast_timestamptz(self): + from psycopg2.extras import DateTimeTZRange + from psycopg2.tz import FixedOffsetTimezone + cur = self.conn.cursor() + ts1 = datetime(2000,1,1, tzinfo=FixedOffsetTimezone(600)) + ts2 = datetime(2000,12,31,23,59,59,999, tzinfo=FixedOffsetTimezone(600)) + cur.execute("select tstzrange(%s, %s, '[]')", (ts1, ts2)) + r = cur.fetchone()[0] + self.assert_(isinstance(r, DateTimeTZRange)) + self.assert_(not r.isempty) + self.assertEqual(r.lower, ts1) + self.assertEqual(r.upper, ts2) + self.assert_(not r.lower_inf) + self.assert_(not r.upper_inf) + self.assert_(r.lower_inc) + self.assert_(r.upper_inc) + + def test_adapt_number_range(self): + from psycopg2.extras import NumericRange + cur = self.conn.cursor() + + r = NumericRange(empty=True) + cur.execute("select %s::int4range", (r,)) + r1 = cur.fetchone()[0] + self.assert_(isinstance(r1, NumericRange)) + self.assert_(r1.isempty) + + r = NumericRange(10, 20) + cur.execute("select %s::int8range", (r,)) + r1 = cur.fetchone()[0] + self.assert_(isinstance(r1, NumericRange)) + self.assertEqual(r1.lower, 10) + self.assertEqual(r1.upper, 20) + self.assert_(r1.lower_inc) + self.assert_(not r1.upper_inc) + + r = NumericRange(Decimal('10.2'), Decimal('20.5'), '(]') + cur.execute("select %s::numrange", (r,)) + r1 = cur.fetchone()[0] + self.assert_(isinstance(r1, NumericRange)) + self.assertEqual(r1.lower, Decimal('10.2')) + self.assertEqual(r1.upper, Decimal('20.5')) + self.assert_(not r1.lower_inc) + self.assert_(r1.upper_inc) + + def test_adapt_numeric_range(self): + from psycopg2.extras import NumericRange + cur = self.conn.cursor() + + r = NumericRange(empty=True) + cur.execute("select %s::int4range", (r,)) + r1 = cur.fetchone()[0] + self.assert_(isinstance(r1, NumericRange), r1) + self.assert_(r1.isempty) + + r = NumericRange(10, 20) + cur.execute("select %s::int8range", (r,)) + r1 = cur.fetchone()[0] + self.assert_(isinstance(r1, NumericRange)) + self.assertEqual(r1.lower, 10) + self.assertEqual(r1.upper, 20) + self.assert_(r1.lower_inc) + self.assert_(not r1.upper_inc) + + r = NumericRange(Decimal('10.2'), Decimal('20.5'), '(]') + cur.execute("select %s::numrange", (r,)) + r1 = cur.fetchone()[0] + self.assert_(isinstance(r1, NumericRange)) + self.assertEqual(r1.lower, Decimal('10.2')) + self.assertEqual(r1.upper, Decimal('20.5')) + self.assert_(not r1.lower_inc) + self.assert_(r1.upper_inc) + + def test_adapt_date_range(self): + from psycopg2.extras import DateRange, DateTimeRange, DateTimeTZRange + from psycopg2.tz import FixedOffsetTimezone + cur = self.conn.cursor() + + d1 = date(2012, 01, 01) + d2 = date(2012, 12, 31) + r = DateRange(d1, d2) + cur.execute("select %s", (r,)) + r1 = cur.fetchone()[0] + self.assert_(isinstance(r1, DateRange)) + self.assertEqual(r1.lower, d1) + self.assertEqual(r1.upper, d2) + self.assert_(r1.lower_inc) + self.assert_(not r1.upper_inc) + + r = DateTimeRange(empty=True) + cur.execute("select %s", (r,)) + r1 = cur.fetchone()[0] + self.assert_(isinstance(r1, DateTimeRange)) + self.assert_(r1.isempty) + + ts1 = datetime(2000,1,1, tzinfo=FixedOffsetTimezone(600)) + ts2 = datetime(2000,12,31,23,59,59,999, tzinfo=FixedOffsetTimezone(600)) + r = DateTimeTZRange(ts1, ts2, '(]') + cur.execute("select %s", (r,)) + r1 = cur.fetchone()[0] + self.assert_(isinstance(r1, DateTimeTZRange)) + self.assertEqual(r1.lower, ts1) + self.assertEqual(r1.upper, ts2) + self.assert_(not r1.lower_inc) + self.assert_(r1.upper_inc) + + def test_register_range_adapter(self): + from psycopg2.extras import Range, register_range + cur = self.conn.cursor() + cur.execute("create type textrange as range (subtype=text)") + rc = register_range('textrange', 'TextRange', cur) + + TextRange = rc.range + self.assert_(issubclass(TextRange, Range)) + self.assertEqual(TextRange.__name__, 'TextRange') + + r = TextRange('a', 'b', '(]') + cur.execute("select %s", (r,)) + r1 = cur.fetchone()[0] + self.assertEqual(r1.lower, 'a') + self.assertEqual(r1.upper, 'b') + self.assert_(not r1.lower_inc) + self.assert_(r1.upper_inc) + + cur.execute("select %s", ([r,r,r],)) + rs = cur.fetchone()[0] + self.assertEqual(len(rs), 3) + for r1 in rs: + self.assertEqual(r1.lower, 'a') + self.assertEqual(r1.upper, 'b') + self.assert_(not r1.lower_inc) + self.assert_(r1.upper_inc) + + def test_range_escaping(self): + from psycopg2.extras import register_range + cur = self.conn.cursor() + cur.execute("create type textrange as range (subtype=text)") + rc = register_range('textrange', 'TextRange', cur) + + TextRange = rc.range + cur.execute(""" + create table rangetest ( + id integer primary key, + range textrange)""") + + bounds = [ '[)', '(]', '()', '[]' ] + ranges = [ TextRange(low, up, bounds[i % 4]) + for i, (low, up) in enumerate(zip( + [None] + map(chr, range(1, 128)), + map(chr, range(1,128)) + [None], + ))] + ranges.append(TextRange()) + ranges.append(TextRange(empty=True)) + + errs = 0 + for i, r in enumerate(ranges): + # not all the ranges make sense: + # fun fact: select ascii('#') < ascii('$'), '#' < '$' + # yelds... t, f! At least in en_GB.UTF-8 collation. + # which seems suggesting a supremacy of the pound on the dollar. + # So some of these ranges will fail to insert. Be prepared but... + try: + cur.execute(""" + savepoint x; + insert into rangetest (id, range) values (%s, %s); + """, (i, r)) + except psycopg2.DataError: + errs += 1 + cur.execute("rollback to savepoint x;") + + # ...not too many errors! in the above collate there are 17 errors: + # assume in other collates we won't find more than 30 + self.assert_(errs < 30, + "too many collate errors. Is the test working?") + + cur.execute("select id, range from rangetest order by id") + for i, r in cur: + self.assertEqual(ranges[i].lower, r.lower) + self.assertEqual(ranges[i].upper, r.upper) + self.assertEqual(ranges[i].lower_inc, r.lower_inc) + self.assertEqual(ranges[i].upper_inc, r.upper_inc) + self.assertEqual(ranges[i].lower_inf, r.lower_inf) + self.assertEqual(ranges[i].upper_inf, r.upper_inf) + + def test_range_not_found(self): + from psycopg2.extras import register_range + cur = self.conn.cursor() + self.assertRaises(psycopg2.ProgrammingError, + register_range, 'nosuchrange', 'FailRange', cur) + + def test_schema_range(self): + cur = self.conn.cursor() + cur.execute("create schema rs") + cur.execute("create type r1 as range (subtype=text)") + cur.execute("create type r2 as range (subtype=text)") + cur.execute("create type rs.r2 as range (subtype=text)") + cur.execute("create type rs.r3 as range (subtype=text)") + cur.execute("savepoint x") + + from psycopg2.extras import register_range + ra1 = register_range('r1', 'r1', cur) + ra2 = register_range('r2', 'r2', cur) + rars2 = register_range('rs.r2', 'r2', cur) + rars3 = register_range('rs.r3', 'r3', cur) + + self.assertNotEqual( + ra2.typecaster.values[0], + rars2.typecaster.values[0]) + + self.assertRaises(psycopg2.ProgrammingError, + register_range, 'r3', 'FailRange', cur) + cur.execute("rollback to savepoint x;") + + self.assertRaises(psycopg2.ProgrammingError, + register_range, 'rs.r1', 'FailRange', cur) + cur.execute("rollback to savepoint x;") + +decorate_all_tests(RangeCasterTestCase, skip_if_no_range) + + +def test_suite(): + return unittest.TestLoader().loadTestsFromName(__name__) + +if __name__ == "__main__": + unittest.main() + diff --git a/psycopg2/tests/test_with.py b/psycopg2/tests/test_with.py new file mode 100644 index 0000000..d39016c --- /dev/null +++ b/psycopg2/tests/test_with.py @@ -0,0 +1,208 @@ +#!/usr/bin/env python + +# test_ctxman.py - unit test for connection and cursor used as context manager +# +# Copyright (C) 2012 Daniele Varrazzo +# +# psycopg2 is free software: you can redistribute it and/or modify it +# under the terms of the GNU Lesser General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# In addition, as a special exception, the copyright holders give +# permission to link this program with the OpenSSL library (or with +# modified versions of OpenSSL that use the same license as OpenSSL), +# and distribute linked combinations including the two. +# +# You must obey the GNU Lesser General Public License in all respects for +# all of the code used other than OpenSSL. +# +# psycopg2 is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + + +from __future__ import with_statement + +import psycopg2 +import psycopg2.extensions as ext + +from testutils import unittest, ConnectingTestCase + +class WithTestCase(ConnectingTestCase): + def setUp(self): + ConnectingTestCase.setUp(self) + curs = self.conn.cursor() + try: + curs.execute("delete from test_with") + self.conn.commit() + except psycopg2.ProgrammingError: + # assume table doesn't exist + self.conn.rollback() + curs.execute("create table test_with (id integer primary key)") + self.conn.commit() + + +class WithConnectionTestCase(WithTestCase): + def test_with_ok(self): + with self.conn as conn: + self.assert_(self.conn is conn) + self.assertEqual(conn.status, ext.STATUS_READY) + curs = conn.cursor() + curs.execute("insert into test_with values (1)") + self.assertEqual(conn.status, ext.STATUS_BEGIN) + + self.assertEqual(self.conn.status, ext.STATUS_READY) + self.assert_(not self.conn.closed) + + curs = self.conn.cursor() + curs.execute("select * from test_with") + self.assertEqual(curs.fetchall(), [(1,)]) + + def test_with_connect_idiom(self): + with self.connect() as conn: + self.assertEqual(conn.status, ext.STATUS_READY) + curs = conn.cursor() + curs.execute("insert into test_with values (2)") + self.assertEqual(conn.status, ext.STATUS_BEGIN) + + self.assertEqual(self.conn.status, ext.STATUS_READY) + self.assert_(not self.conn.closed) + + curs = self.conn.cursor() + curs.execute("select * from test_with") + self.assertEqual(curs.fetchall(), [(2,)]) + + def test_with_error_db(self): + def f(): + with self.conn as conn: + curs = conn.cursor() + curs.execute("insert into test_with values ('a')") + + self.assertRaises(psycopg2.DataError, f) + self.assertEqual(self.conn.status, ext.STATUS_READY) + self.assert_(not self.conn.closed) + + curs = self.conn.cursor() + curs.execute("select * from test_with") + self.assertEqual(curs.fetchall(), []) + + def test_with_error_python(self): + def f(): + with self.conn as conn: + curs = conn.cursor() + curs.execute("insert into test_with values (3)") + 1/0 + + self.assertRaises(ZeroDivisionError, f) + self.assertEqual(self.conn.status, ext.STATUS_READY) + self.assert_(not self.conn.closed) + + curs = self.conn.cursor() + curs.execute("select * from test_with") + self.assertEqual(curs.fetchall(), []) + + def test_with_closed(self): + def f(): + with self.conn: + pass + + self.conn.close() + self.assertRaises(psycopg2.InterfaceError, f) + + def test_subclass_commit(self): + commits = [] + class MyConn(ext.connection): + def commit(self): + commits.append(None) + super(MyConn, self).commit() + + with self.connect(connection_factory=MyConn) as conn: + curs = conn.cursor() + curs.execute("insert into test_with values (10)") + + self.assertEqual(conn.status, ext.STATUS_READY) + self.assert_(commits) + + curs = self.conn.cursor() + curs.execute("select * from test_with") + self.assertEqual(curs.fetchall(), [(10,)]) + + def test_subclass_rollback(self): + rollbacks = [] + class MyConn(ext.connection): + def rollback(self): + rollbacks.append(None) + super(MyConn, self).rollback() + + try: + with self.connect(connection_factory=MyConn) as conn: + curs = conn.cursor() + curs.execute("insert into test_with values (11)") + 1/0 + except ZeroDivisionError: + pass + else: + self.assert_("exception not raised") + + self.assertEqual(conn.status, ext.STATUS_READY) + self.assert_(rollbacks) + + curs = conn.cursor() + curs.execute("select * from test_with") + self.assertEqual(curs.fetchall(), []) + + +class WithCursorTestCase(WithTestCase): + def test_with_ok(self): + with self.conn as conn: + with conn.cursor() as curs: + curs.execute("insert into test_with values (4)") + self.assert_(not curs.closed) + self.assertEqual(self.conn.status, ext.STATUS_BEGIN) + self.assert_(curs.closed) + + self.assertEqual(self.conn.status, ext.STATUS_READY) + self.assert_(not self.conn.closed) + + curs = self.conn.cursor() + curs.execute("select * from test_with") + self.assertEqual(curs.fetchall(), [(4,)]) + + def test_with_error(self): + try: + with self.conn as conn: + with conn.cursor() as curs: + curs.execute("insert into test_with values (5)") + 1/0 + except ZeroDivisionError: + pass + + self.assertEqual(self.conn.status, ext.STATUS_READY) + self.assert_(not self.conn.closed) + self.assert_(curs.closed) + + curs = self.conn.cursor() + curs.execute("select * from test_with") + self.assertEqual(curs.fetchall(), []) + + def test_subclass(self): + closes = [] + class MyCurs(ext.cursor): + def close(self): + closes.append(None) + super(MyCurs, self).close() + + with self.conn.cursor(cursor_factory=MyCurs) as curs: + self.assert_(isinstance(curs, MyCurs)) + + self.assert_(curs.closed) + self.assert_(closes) + + +def test_suite(): + return unittest.TestLoader().loadTestsFromName(__name__) + +if __name__ == "__main__": + unittest.main() diff --git a/psycopg2/tests/testconfig.py b/psycopg2/tests/testconfig.py new file mode 100644 index 0000000..f83ded8 --- /dev/null +++ b/psycopg2/tests/testconfig.py @@ -0,0 +1,36 @@ +# Configure the test suite from the env variables. + +import os + +dbname = os.environ.get('PSYCOPG2_TESTDB', 'psycopg2_test') +dbhost = os.environ.get('PSYCOPG2_TESTDB_HOST', None) +dbport = os.environ.get('PSYCOPG2_TESTDB_PORT', None) +dbuser = os.environ.get('PSYCOPG2_TESTDB_USER', None) +dbpass = os.environ.get('PSYCOPG2_TESTDB_PASSWORD', None) + +# Check if we want to test psycopg's green path. +green = os.environ.get('PSYCOPG2_TEST_GREEN', None) +if green: + if green == '1': + from psycopg2.extras import wait_select as wait_callback + elif green == 'eventlet': + from eventlet.support.psycopg2_patcher import eventlet_wait_callback \ + as wait_callback + else: + raise ValueError("please set 'PSYCOPG2_TEST_GREEN' to a valid value") + + import psycopg2.extensions + psycopg2.extensions.set_wait_callback(wait_callback) + +# Construct a DSN to connect to the test database: +dsn = 'dbname=%s' % dbname +if dbhost is not None: + dsn += ' host=%s' % dbhost +if dbport is not None: + dsn += ' port=%s' % dbport +if dbuser is not None: + dsn += ' user=%s' % dbuser +if dbpass is not None: + dsn += ' password=%s' % dbpass + + diff --git a/psycopg2/tests/testutils.py b/psycopg2/tests/testutils.py new file mode 100644 index 0000000..708dd22 --- /dev/null +++ b/psycopg2/tests/testutils.py @@ -0,0 +1,331 @@ +# testutils.py - utility module for psycopg2 testing. + +# +# Copyright (C) 2010-2011 Daniele Varrazzo +# +# psycopg2 is free software: you can redistribute it and/or modify it +# under the terms of the GNU Lesser General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# In addition, as a special exception, the copyright holders give +# permission to link this program with the OpenSSL library (or with +# modified versions of OpenSSL that use the same license as OpenSSL), +# and distribute linked combinations including the two. +# +# You must obey the GNU Lesser General Public License in all respects for +# all of the code used other than OpenSSL. +# +# psycopg2 is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + + +# Use unittest2 if available. Otherwise mock a skip facility with warnings. + +import os +import sys +from functools import wraps +from testconfig import dsn + +try: + import unittest2 + unittest = unittest2 +except ImportError: + import unittest + unittest2 = None + +if hasattr(unittest, 'skipIf'): + skip = unittest.skip + skipIf = unittest.skipIf + +else: + import warnings + + def skipIf(cond, msg): + def skipIf_(f): + @wraps(f) + def skipIf__(self): + if cond: + warnings.warn(msg) + return + else: + return f(self) + return skipIf__ + return skipIf_ + + def skip(msg): + return skipIf(True, msg) + + def skipTest(self, msg): + warnings.warn(msg) + return + + unittest.TestCase.skipTest = skipTest + +# Silence warnings caused by the stubborness of the Python unittest maintainers +# http://bugs.python.org/issue9424 +if not hasattr(unittest.TestCase, 'assert_') \ +or unittest.TestCase.assert_ is not unittest.TestCase.assertTrue: + # mavaff... + unittest.TestCase.assert_ = unittest.TestCase.assertTrue + unittest.TestCase.failUnless = unittest.TestCase.assertTrue + unittest.TestCase.assertEquals = unittest.TestCase.assertEqual + unittest.TestCase.failUnlessEqual = unittest.TestCase.assertEqual + + +class ConnectingTestCase(unittest.TestCase): + """A test case providing connections for tests. + + A connection for the test is always available as `self.conn`. Others can be + created with `self.connect()`. All are closed on tearDown. + + Subclasses needing to customize setUp and tearDown should remember to call + the base class implementations. + """ + def setUp(self): + self._conns = [] + + def tearDown(self): + # close the connections used in the test + for conn in self._conns: + if not conn.closed: + conn.close() + + def connect(self, **kwargs): + try: + self._conns + except AttributeError, e: + raise AttributeError( + "%s (did you remember calling ConnectingTestCase.setUp()?)" + % e) + + import psycopg2 + conn = psycopg2.connect(dsn, **kwargs) + self._conns.append(conn) + return conn + + def _get_conn(self): + if not hasattr(self, '_the_conn'): + self._the_conn = self.connect() + + return self._the_conn + + def _set_conn(self, conn): + self._the_conn = conn + + conn = property(_get_conn, _set_conn) + + +def decorate_all_tests(cls, *decorators): + """ + Apply all the *decorators* to all the tests defined in the TestCase *cls*. + """ + for n in dir(cls): + if n.startswith('test'): + for d in decorators: + setattr(cls, n, d(getattr(cls, n))) + + +def skip_if_no_uuid(f): + """Decorator to skip a test if uuid is not supported by Py/PG.""" + @wraps(f) + def skip_if_no_uuid_(self): + try: + import uuid + except ImportError: + return self.skipTest("uuid not available in this Python version") + + try: + cur = self.conn.cursor() + cur.execute("select typname from pg_type where typname = 'uuid'") + has = cur.fetchone() + finally: + self.conn.rollback() + + if has: + return f(self) + else: + return self.skipTest("uuid type not available on the server") + + return skip_if_no_uuid_ + + +def skip_if_tpc_disabled(f): + """Skip a test if the server has tpc support disabled.""" + @wraps(f) + def skip_if_tpc_disabled_(self): + from psycopg2 import ProgrammingError + cnn = self.connect() + cur = cnn.cursor() + try: + cur.execute("SHOW max_prepared_transactions;") + except ProgrammingError: + return self.skipTest( + "server too old: two phase transactions not supported.") + else: + mtp = int(cur.fetchone()[0]) + cnn.close() + + if not mtp: + return self.skipTest( + "server not configured for two phase transactions. " + "set max_prepared_transactions to > 0 to run the test") + return f(self) + + return skip_if_tpc_disabled_ + + +def skip_if_no_namedtuple(f): + @wraps(f) + def skip_if_no_namedtuple_(self): + try: + from collections import namedtuple + except ImportError: + return self.skipTest("collections.namedtuple not available") + else: + return f(self) + + return skip_if_no_namedtuple_ + + +def skip_if_no_iobase(f): + """Skip a test if io.TextIOBase is not available.""" + @wraps(f) + def skip_if_no_iobase_(self): + try: + from io import TextIOBase + except ImportError: + return self.skipTest("io.TextIOBase not found.") + else: + return f(self) + + return skip_if_no_iobase_ + + +def skip_before_postgres(*ver): + """Skip a test on PostgreSQL before a certain version.""" + ver = ver + (0,) * (3 - len(ver)) + def skip_before_postgres_(f): + @wraps(f) + def skip_before_postgres__(self): + if self.conn.server_version < int("%d%02d%02d" % ver): + return self.skipTest("skipped because PostgreSQL %s" + % self.conn.server_version) + else: + return f(self) + + return skip_before_postgres__ + return skip_before_postgres_ + +def skip_after_postgres(*ver): + """Skip a test on PostgreSQL after (including) a certain version.""" + ver = ver + (0,) * (3 - len(ver)) + def skip_after_postgres_(f): + @wraps(f) + def skip_after_postgres__(self): + if self.conn.server_version >= int("%d%02d%02d" % ver): + return self.skipTest("skipped because PostgreSQL %s" + % self.conn.server_version) + else: + return f(self) + + return skip_after_postgres__ + return skip_after_postgres_ + +def skip_before_python(*ver): + """Skip a test on Python before a certain version.""" + def skip_before_python_(f): + @wraps(f) + def skip_before_python__(self): + if sys.version_info[:len(ver)] < ver: + return self.skipTest("skipped because Python %s" + % ".".join(map(str, sys.version_info[:len(ver)]))) + else: + return f(self) + + return skip_before_python__ + return skip_before_python_ + +def skip_from_python(*ver): + """Skip a test on Python after (including) a certain version.""" + def skip_from_python_(f): + @wraps(f) + def skip_from_python__(self): + if sys.version_info[:len(ver)] >= ver: + return self.skipTest("skipped because Python %s" + % ".".join(map(str, sys.version_info[:len(ver)]))) + else: + return f(self) + + return skip_from_python__ + return skip_from_python_ + +def skip_if_no_superuser(f): + """Skip a test if the database user running the test is not a superuser""" + @wraps(f) + def skip_if_no_superuser_(self): + from psycopg2 import ProgrammingError + try: + return f(self) + except ProgrammingError, e: + import psycopg2.errorcodes + if e.pgcode == psycopg2.errorcodes.INSUFFICIENT_PRIVILEGE: + self.skipTest("skipped because not superuser") + else: + raise + + return skip_if_no_superuser_ + +def skip_if_green(reason): + def skip_if_green_(f): + @wraps(f) + def skip_if_green__(self): + from testconfig import green + if green: + return self.skipTest(reason) + else: + return f(self) + + return skip_if_green__ + return skip_if_green_ + +skip_copy_if_green = skip_if_green("copy in async mode currently not supported") + +def skip_if_no_getrefcount(f): + @wraps(f) + def skip_if_no_getrefcount_(self): + if not hasattr(sys, 'getrefcount'): + return self.skipTest('skipped, no sys.getrefcount()') + else: + return f(self) + return skip_if_no_getrefcount_ + +def script_to_py3(script): + """Convert a script to Python3 syntax if required.""" + if sys.version_info[0] < 3: + return script + + import tempfile + f = tempfile.NamedTemporaryFile(suffix=".py", delete=False) + f.write(script.encode()) + f.flush() + filename = f.name + f.close() + + # 2to3 is way too chatty + import logging + logging.basicConfig(filename=os.devnull) + + from lib2to3.main import main + if main("lib2to3.fixes", ['--no-diffs', '-w', '-n', filename]): + raise Exception('py3 conversion failed') + + f2 = open(filename) + try: + return f2.read() + finally: + f2.close() + os.remove(filename) + diff --git a/psycopg2/tz.py b/psycopg2/tz.py new file mode 100644 index 0000000..695a925 --- /dev/null +++ b/psycopg2/tz.py @@ -0,0 +1,135 @@ +"""tzinfo implementations for psycopg2 + +This module holds two different tzinfo implementations that can be used as +the 'tzinfo' argument to datetime constructors, directly passed to psycopg +functions or used to set the .tzinfo_factory attribute in cursors. +""" +# psycopg/tz.py - tzinfo implementation +# +# Copyright (C) 2003-2010 Federico Di Gregorio +# +# psycopg2 is free software: you can redistribute it and/or modify it +# under the terms of the GNU Lesser General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# In addition, as a special exception, the copyright holders give +# permission to link this program with the OpenSSL library (or with +# modified versions of OpenSSL that use the same license as OpenSSL), +# and distribute linked combinations including the two. +# +# You must obey the GNU Lesser General Public License in all respects for +# all of the code used other than OpenSSL. +# +# psycopg2 is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + +import datetime +import time + +ZERO = datetime.timedelta(0) + +class FixedOffsetTimezone(datetime.tzinfo): + """Fixed offset in minutes east from UTC. + + This is exactly the implementation__ found in Python 2.3.x documentation, + with a small change to the `!__init__()` method to allow for pickling + and a default name in the form ``sHH:MM`` (``s`` is the sign.). + + The implementation also caches instances. During creation, if a + FixedOffsetTimezone instance has previously been created with the same + offset and name that instance will be returned. This saves memory and + improves comparability. + + .. __: http://docs.python.org/library/datetime.html#datetime-tzinfo + """ + _name = None + _offset = ZERO + + _cache = {} + + def __init__(self, offset=None, name=None): + if offset is not None: + self._offset = datetime.timedelta(minutes = offset) + if name is not None: + self._name = name + + def __new__(cls, offset=None, name=None): + """Return a suitable instance created earlier if it exists + """ + key = (offset, name) + try: + return cls._cache[key] + except KeyError: + tz = super(FixedOffsetTimezone, cls).__new__(cls, offset, name) + cls._cache[key] = tz + return tz + + def __repr__(self): + offset_mins = self._offset.seconds // 60 + self._offset.days * 24 * 60 + return "psycopg2.tz.FixedOffsetTimezone(offset=%r, name=%r)" \ + % (offset_mins, self._name) + + def __getinitargs__(self): + offset_mins = self._offset.seconds // 60 + self._offset.days * 24 * 60 + return (offset_mins, self._name) + + def utcoffset(self, dt): + return self._offset + + def tzname(self, dt): + if self._name is not None: + return self._name + else: + seconds = self._offset.seconds + self._offset.days * 86400 + hours, seconds = divmod(seconds, 3600) + minutes = seconds/60 + if minutes: + return "%+03d:%d" % (hours, minutes) + else: + return "%+03d" % hours + + def dst(self, dt): + return ZERO + + +STDOFFSET = datetime.timedelta(seconds = -time.timezone) +if time.daylight: + DSTOFFSET = datetime.timedelta(seconds = -time.altzone) +else: + DSTOFFSET = STDOFFSET +DSTDIFF = DSTOFFSET - STDOFFSET + +class LocalTimezone(datetime.tzinfo): + """Platform idea of local timezone. + + This is the exact implementation from the Python 2.3 documentation. + """ + def utcoffset(self, dt): + if self._isdst(dt): + return DSTOFFSET + else: + return STDOFFSET + + def dst(self, dt): + if self._isdst(dt): + return DSTDIFF + else: + return ZERO + + def tzname(self, dt): + return time.tzname[self._isdst(dt)] + + def _isdst(self, dt): + tt = (dt.year, dt.month, dt.day, + dt.hour, dt.minute, dt.second, + dt.weekday(), 0, -1) + stamp = time.mktime(tt) + tt = time.localtime(stamp) + return tt.tm_isdst > 0 + +LOCAL = LocalTimezone() + +# TODO: pre-generate some interesting time zones? diff --git a/setup.py b/setup.py new file mode 100644 index 0000000..c205a7c --- /dev/null +++ b/setup.py @@ -0,0 +1,42 @@ +from distutils.core import setup + +setup( + name='psycopg2', + version='2.5.2', + summary='Python-PostgreSQL Database Adapter', + author='Federico Di Gregorio', + author_email='fog@initd.org', + description='Agnostic and easy to use ajax library for django', + url='http://initd.org/psycopg', + license='GPL with exceptions or ZPL', + package_data={'dajaxice': ['templates/dajaxice/*']}, + long_description=("psycopg2 is a PostgreSQL database adapter for the Python programming " + "language. psycopg2 was written with the aim of being very small and fast, " + "and stable as a rock. " + "" + "psycopg2 is different from the other database adapter because it was " + "designed for heavily multi-threaded applications that create and destroy " + "lots of cursors and make a conspicuous number of concurrent INSERTs or " + "UPDATEs. psycopg2 also provide full asynchronous operations and support " + "for coroutine libraries"), + classifiers=['Development Status :: 5 - Production/Stable', + 'Intended Audience :: Developers', + 'License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL)', + 'License :: OSI Approved :: Zope Public License', + 'Programming Language :: Python', + 'Programming Language :: Python :: 2.5', + 'Programming Language :: Python :: 2.6', + 'Programming Language :: Python :: 2.7', + 'Programming Language :: Python :: 3', + 'Programming Language :: Python :: 3.1', + 'Programming Language :: Python :: 3.2', + 'Programming Language :: Python :: 3.3', + 'Programming Language :: C', + 'Programming Language :: SQL', + 'Topic :: Database', + 'Topic :: Database :: Front-Ends', + 'Topic :: Software Development', + 'Topic :: Software Development :: Libraries :: Python Modules', + 'Operating System :: Microsoft :: Windows', + 'Operating System :: Unix'] +) diff --git a/src/psycopg2-2.5.2-py2.7.egg-info b/src/psycopg2-2.5.2-py2.7.egg-info new file mode 100644 index 0000000..d731d13 --- /dev/null +++ b/src/psycopg2-2.5.2-py2.7.egg-info @@ -0,0 +1,40 @@ +Metadata-Version: 1.1 +Name: psycopg2 +Version: 2.5.2 +Summary: Python-PostgreSQL Database Adapter +Home-page: http://initd.org/psycopg/ +Author: Federico Di Gregorio +Author-email: fog@initd.org +License: GPL with exceptions or ZPL +Download-URL: http://initd.org/psycopg/tarballs/PSYCOPG-2-5/psycopg2-2.5.2.tar.gz +Description: psycopg2 is a PostgreSQL database adapter for the Python programming + language. psycopg2 was written with the aim of being very small and fast, + and stable as a rock. + + psycopg2 is different from the other database adapter because it was + designed for heavily multi-threaded applications that create and destroy + lots of cursors and make a conspicuous number of concurrent INSERTs or + UPDATEs. psycopg2 also provide full asynchronous operations and support + for coroutine libraries. + +Platform: any +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL) +Classifier: License :: OSI Approved :: Zope Public License +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2.5 +Classifier: Programming Language :: Python :: 2.6 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.1 +Classifier: Programming Language :: Python :: 3.2 +Classifier: Programming Language :: Python :: 3.3 +Classifier: Programming Language :: C +Classifier: Programming Language :: SQL +Classifier: Topic :: Database +Classifier: Topic :: Database :: Front-Ends +Classifier: Topic :: Software Development +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Classifier: Operating System :: Microsoft :: Windows +Classifier: Operating System :: Unix