Skip to content

Commit

Permalink
Invalidate PS cache on a DDL statement
Browse files Browse the repository at this point in the history
This is because a change to the DB structure might invalidate prepared
statements.
  • Loading branch information
tlocke committed Jun 8, 2014
1 parent 18a0965 commit 0ddbef5
Show file tree
Hide file tree
Showing 5 changed files with 48 additions and 62 deletions.
62 changes: 21 additions & 41 deletions pg8000/core.py
Expand Up @@ -35,7 +35,7 @@
from pg8000.errors import (
NotSupportedError, ProgrammingError, InternalError, IntegrityError,
OperationalError, DatabaseError, InterfaceError, Error,
CopyQueryOrTableRequiredError, CursorClosedError, QueryParameterParseError,
CopyQueryOrTableRequiredError, QueryParameterParseError,
ArrayContentNotHomogenousError, ArrayContentEmptyError,
ArrayDimensionsNotConsistentError, ArrayContentNotSupportedError, Warning,
CopyQueryWithoutStreamError)
Expand Down Expand Up @@ -85,6 +85,9 @@ def dst(self, dt):
FC_TEXT = 0
FC_BINARY = 1

BINARY_SPACE = b(" ")
DDL_COMMANDS = b("ALTER"), b("CREATE")


def convert_paramstyle(style, query):
# I don't see any way to avoid scanning the query string char by char,
Expand Down Expand Up @@ -251,14 +254,6 @@ def make_args(vals):
return ''.join(output_query), make_args


def require_open_cursor(fn):
def _fn(self, *args, **kwargs):
if self._c is None:
raise CursorClosedError()
return fn(self, *args, **kwargs)
return _fn


EPOCH = datetime.datetime(2000, 1, 1)
EPOCH_TZ = EPOCH.replace(tzinfo=utc)
EPOCH_SECONDS = timegm(EPOCH.timetuple())
Expand Down Expand Up @@ -598,42 +593,23 @@ def fetchone(self):
# Stability: Part of the DBAPI 2.0 specification.
# @param size The number of rows to fetch when called. If not provided,
# the arraysize property value is used instead.
if IS_JYTHON:
def fetchmany(self, num=None):
if self._stmt is None:
raise ProgrammingError("attempting to use unexecuted cursor")
else:
try:
return tuple(
islice(self, self.arraysize if num is None else num))
except TypeError:
raise ProgrammingError(
"attempting to use unexecuted cursor")
else:
def fetchmany(self, num=None):
try:
return tuple(
islice(self, self.arraysize if num is None else num))
except TypeError:
raise ProgrammingError("attempting to use unexecuted cursor")
def fetchmany(self, num=None):
try:
return tuple(
islice(self, self.arraysize if num is None else num))
except TypeError:
raise ProgrammingError("attempting to use unexecuted cursor")

##
# Fetch all remaining rows of a query result, returning them as a sequence
# of sequences.
# <p>
# Stability: Part of the DBAPI 2.0 specification.
if IS_JYTHON:
def fetchall(self):
if self._stmt is None:
raise ProgrammingError("attempting to use unexecuted cursor")
else:
return tuple(self)
else:
def fetchall(self):
try:
return tuple(self)
except TypeError:
raise ProgrammingError("attempting to use unexecuted cursor")
def fetchall(self):
try:
return tuple(self)

This comment has been minimized.

Copy link
@nad2000

nad2000 Aug 6, 2015

shouldn't fetchall return a mutable, e.g, a list of tuples instead of a tuple of lists? The DBAPI 2.0 spec isn't strict on that:

Fetch all (remaining) rows of a query result, returning them as a sequence of sequences (e.g. a list of tuples).

However the majority of providers return a list of tuples, e.g, sqlite, psycopg2, ...

This comment has been minimized.

Copy link
@tlocke

tlocke Aug 6, 2015

Author Collaborator

We could make fetchall return a list, but I think a user of the library should expect a sequence type but not make any further assumptions. The reason we use a tuple is that tuple creation is much faster than list creation:

http://stackoverflow.com/questions/68630/are-tuples-more-efficient-than-lists-in-python

except TypeError:
raise ProgrammingError("attempting to use unexecuted cursor")

##
# Close the cursor.
Expand Down Expand Up @@ -1607,13 +1583,17 @@ def handle_NO_DATA(self, msg, ps):
pass

def handle_COMMAND_COMPLETE(self, data, cursor):
values = data[:-1].split(b(" "))
if values[0] in self._commands_with_count:
values = data[:-1].split(BINARY_SPACE)
command = values[0]
if command in self._commands_with_count:
row_count = int(values[-1])
if cursor._row_count == -1:
cursor._row_count = row_count
else:
cursor._row_count += row_count
if command in DDL_COMMANDS:
for k in self._caches:
self._caches[k]['ps'].clear()

def handle_DATA_ROW(self, data, cursor):
data_idx = 2
Expand Down
10 changes: 0 additions & 10 deletions pg8000/errors.py
Expand Up @@ -42,16 +42,6 @@ class InterfaceError(Error):
pass


class ConnectionClosedError(InterfaceError):
def __init__(self):
InterfaceError.__init__(self, "connection is closed")


class CursorClosedError(InterfaceError):
def __init__(self):
InterfaceError.__init__(self, "cursor is closed")


class DatabaseError(Error):
pass

Expand Down
27 changes: 27 additions & 0 deletions pg8000/tests/test_query.py
Expand Up @@ -118,6 +118,33 @@ def testQuerySizeCache(self):
cursor.close()
self.db.rollback()

# Run a query on a table, alter the structure of the table, then run the
# original query again.

def testAlter(self):
try:
cursor = self.db.cursor()
cursor.execute("select * from t1")
cursor.execute("alter table t1 drop column f3")
cursor.execute("select * from t1")
finally:
cursor.close()
self.db.rollback()

# Run a query on a table, drop then re-create the table, then run the
# original query again.

def testCreate(self):
try:
cursor = self.db.cursor()
cursor.execute("select * from t1")
cursor.execute("drop table t1")
cursor.execute("create temporary table t1 (f1 int primary key)")
cursor.execute("select * from t1")
finally:
cursor.close()
self.db.rollback()

def testInsertReturning(self):
try:
cursor = self.db.cursor()
Expand Down
2 changes: 0 additions & 2 deletions run_25
@@ -1,4 +1,2 @@
jython run_25.py
jython run_25_cache.py
python2.5 run_25.py
python2.5 run_25_cache.py
9 changes: 0 additions & 9 deletions run_25_cache.py

This file was deleted.

0 comments on commit 0ddbef5

Please sign in to comment.