Skip to content

Commit

Permalink
Store values for callbacks on insert/update/delete operations (#431)
Browse files Browse the repository at this point in the history
* Changed listify to store computations in values

* Moved attempt_upload logic to callbacks

* Fixed callbacks access to fields
  • Loading branch information
gi0baro authored and mdipierro committed Dec 20, 2016
1 parent 5cd99f5 commit e49dd9b
Show file tree
Hide file tree
Showing 5 changed files with 309 additions and 199 deletions.
90 changes: 87 additions & 3 deletions pydal/helpers/classes.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,13 +5,26 @@
import time
import traceback

from .._compat import PY2, exists, copyreg, integer_types, implements_bool, \
iterkeys, itervalues, iteritems
from .._compat import (
PY2, exists, copyreg, implements_bool, iterkeys, itervalues, iteritems,
long
)
from .._globals import THREAD_LOCAL
from .serializers import serializers


long = integer_types[-1]
class cachedprop(object):
#: a read-only @property that is only evaluated once.
def __init__(self, fget, doc=None):
self.fget = fget
self.__doc__ = doc or fget.__doc__
self.__name__ = fget.__name__

def __get__(self, obj, cls):
if obj is None:
return self
obj.__dict__[self.__name__] = result = self.fget(obj)
return result


@implements_bool
Expand Down Expand Up @@ -78,6 +91,77 @@ def pickle_basicstorage(s):
copyreg.pickle(BasicStorage, pickle_basicstorage)


class OpRow(object):
__slots__ = ('_table', '_fields', '_values')

def __init__(self, table):
object.__setattr__(self, '_table', table)
object.__setattr__(self, '_fields', {})
object.__setattr__(self, '_values', {})

def set_value(self, key, value, field=None):
self._values[key] = value
self._fields[key] = self._fields.get(key, field or self._table[key])

def del_value(self, key):
del self._values[key]
del self._fields[key]

def __getitem__(self, key):
return self._values[key]

def __setitem__(self, key, value):
return self.set_value(key, value)

def __delitem__(self, key):
return self.del_value(key)

def __getattr__(self, key):
try:
return self.__getitem__(key)
except KeyError:
raise AttributeError

def __setattr__(self, key, value):
return self.set_value(key, value)

def __delattr__(self, key):
return self.del_value(key)

def __iter__(self):
return self._values.__iter__()

def __contains__(self, key):
return key in self._values

def keys(self):
return self._values.keys()

def iterkeys(self):
return iterkeys(self._values)

def values(self):
return self._values.values()

def itervalues(self):
return itervalues(self._values)

def items(self):
return self._values.items()

def iteritems(self):
return iteritems(self._values)

def op_values(self):
return [
(self._fields[key], value)
for key, value in iteritems(self._values)
]

def __repr__(self):
return '<OpRow %s>' % repr(self._values)


class Serializable(object):
def as_dict(self, flat=False, sanitize=True):
return self.__dict__
Expand Down
89 changes: 85 additions & 4 deletions pydal/helpers/methods.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,14 @@
# -*- coding: utf-8 -*-
import uuid
import re

from .._compat import iteritems, integer_types
import os
import re
import uuid
from .._compat import (
PY2, BytesIO, iteritems, integer_types, string_types, to_bytes, pjoin,
exists
)
from .regex import REGEX_NOPASSWD, REGEX_UNPACK, REGEX_CONST_STRING, REGEX_W
from .classes import SQLCustomType
# from ..objects import Field, Table


PLURALIZE_RULES = [
Expand Down Expand Up @@ -339,3 +342,81 @@ def geoLine(*line):

def geoPolygon(*line):
return "POLYGON ((%s))" % ','.join("%f %f" % item for item in line)


# upload utils
def attempt_upload(table, fields):
for fieldname in table._upload_fieldnames & set(fields):
value = fields[fieldname]
if not (value is None or isinstance(value, string_types)):
if not PY2 and isinstance(value, bytes):
continue
if hasattr(value, 'file') and hasattr(value, 'filename'):
new_name = table[fieldname].store(
value.file, filename=value.filename)
elif isinstance(value, dict):
if 'data' in value and 'filename' in value:
stream = BytesIO(to_bytes(value['data']))
new_name = table[fieldname].store(
stream, filename=value['filename'])
else:
new_name = None
elif hasattr(value, 'read') and hasattr(value, 'name'):
new_name = table[fieldname].store(value, filename=value.name)
else:
raise RuntimeError("Unable to handle upload")
fields[fieldname] = new_name


def attempt_upload_on_insert(table):
def wrapped(fields):
return attempt_upload(table, fields)
return wrapped


def attempt_upload_on_update(table):
def wrapped(dbset, fields):
return attempt_upload(table, fields)
return wrapped


def delete_uploaded_files(dbset, upload_fields=None):
table = dbset.db._adapter.tables(dbset.query).popitem()[1]
# ## mind uploadfield==True means file is not in DB
if upload_fields:
fields = list(upload_fields)
# Explicitly add compute upload fields (ex: thumbnail)
fields += [f for f in table.fields if table[f].compute is not None]
else:
fields = table.fields
fields = [
f for f in fields if table[f].type == 'upload' and
table[f].uploadfield == True and table[f].autodelete
]
if not fields:
return False
for record in dbset.select(*[table[f] for f in fields]):
for fieldname in fields:
field = table[fieldname]
oldname = record.get(fieldname, None)
if not oldname:
continue
if upload_fields and fieldname in upload_fields and \
oldname == upload_fields[fieldname]:
continue
if field.custom_delete:
field.custom_delete(oldname)
else:
uploadfolder = field.uploadfolder
if not uploadfolder:
uploadfolder = pjoin(
dbset.db._adapter.folder, '..', 'uploads')
if field.uploadseparate:
items = oldname.split('.')
uploadfolder = pjoin(
uploadfolder, "%s.%s" %
(items[0], items[1]), items[2][:2])
oldpath = pjoin(uploadfolder, oldname)
if exists(oldpath):
os.unlink(oldpath)
return False

0 comments on commit e49dd9b

Please sign in to comment.