Skip to content
This repository has been archived by the owner on May 13, 2020. It is now read-only.

Commit

Permalink
* Added some more tests to improve test coverage.
Browse files Browse the repository at this point in the history
* Renamed processSpec() to process_spec(), since we are using PEP8 naming.

* Added ProcessSpecDecorator.

* Added processSpec to the CollectionWrapper. Removed manual calls to
  processSpec() and make sure the wrapper is used everywhere.
  • Loading branch information
strichter committed Mar 13, 2012
1 parent b5c4057 commit 0583763
Show file tree
Hide file tree
Showing 5 changed files with 156 additions and 29 deletions.
5 changes: 3 additions & 2 deletions src/mongopersist/README.txt
Expand Up @@ -115,8 +115,9 @@ Let's now add an address for Stephan. Addresses are also persistent objects:
MongoPersist supports a special attribute called ``_p_mongo_collection``,
which allows you to specify a custom collection to use.

>>> dm.root['stephan'].address = Address('Maynard', '01754')
>>> dm.root['stephan'].address
>>> stephan = dm.root['stephan']
>>> stephan.address = Address('Maynard', '01754')
>>> stephan.address
<Address Maynard (01754)>

Note that the address is not immediately saved in the database:
Expand Down
41 changes: 32 additions & 9 deletions src/mongopersist/datamanager.py
Expand Up @@ -27,7 +27,7 @@ def create_conflict_error(obj, new_doc):
None, obj,
(new_doc.get('_py_serial', 0), serialize.u64(obj._p_serial)))

def processSpec(collection, spec):
def process_spec(collection, spec):
try:
adapter = interfaces.IMongoSpecProcessor(None)
except TypeError:
Expand All @@ -46,10 +46,32 @@ def __call__(self, *args, **kwargs):
self.datamanager.flush()
return self.function(*args, **kwargs)

class ProcessSpecDecorator(object):

def __init__(self, collection, function):
self.collection = collection
self.function = function

def __call__(self, *args, **kwargs):
if args:
args = (process_spec(self.collection, args[0]),) + args[1:]
# find()
if 'spec' in kwargs:
kwargs['spec'] = process_spec(self.collection, kwargs['spec'])
# find_one()
elif 'spec_or_id' in kwargs:
kwargs['spec_or_id'] = process_spec(
self.collection, kwargs['spec_or_id'])
# find_and_modify()
elif 'query' in kwargs:
kwargs['query'] = process_spec(self.collection, kwargs['query'])
return self.function(*args, **kwargs)

class CollectionWrapper(object):

QUERY_METHODS = ['group', 'map_reduce', 'inline_map_reduce', 'find_one',
'find', 'count', 'find_and_modify']
'find', 'find_and_modify']
PROCESS_SPEC_METHODS = ['find_and_modify', 'find_one', 'find']

def __init__(self, collection, datamanager):
self.__dict__['collection'] = collection
Expand All @@ -59,6 +81,8 @@ def __getattr__(self, name):
attr = getattr(self.collection, name)
if name in self.QUERY_METHODS:
attr = FlushDecorator(self._datamanager, attr)
if name in self.PROCESS_SPEC_METHODS:
attr = ProcessSpecDecorator(self.collection, attr)
return attr

def __setattr__(self, name, value):
Expand All @@ -80,26 +104,24 @@ def __init__(self, jar, database=None, collection=None):
if collection is not None:
self.collection = collection
db = self._jar._conn[self.database]
self._collection_inst = db[self.collection]
self._collection_inst = CollectionWrapper(db[self.collection], jar)

def __getitem__(self, key):
doc = self._collection_inst.find_one(
processSpec(self._collection_inst, {'name': key}))
doc = self._collection_inst.find_one({'name': key})
if doc is None:
raise KeyError(key)
return self._jar.load(doc['ref'])

def __setitem__(self, key, value):
dbref = self._jar.dump(value)
dbref = self._jar.insert(value)
if self.get(key) is not None:
del self[key]
doc = {'ref': dbref, 'name': key}
self._collection_inst.insert(doc)

def __delitem__(self, key):
doc = self._collection_inst.find_one(
processSpec(self._collection_inst, {'name': key}))
coll = self._jar._get_collection(
doc = self._collection_inst.find_one({'name': key})
coll = self._jar.get_collection(
doc['ref'].database, doc['ref'].collection)
coll.remove(doc['ref'].id)
self._collection_inst.remove({'name': key})
Expand Down Expand Up @@ -190,6 +212,7 @@ def dump(self, obj):
if obj in self._registered_objects:
obj._p_changed = False
self._registered_objects.remove(obj)
return res

def load(self, dbref):
return self._reader.get_ghost(dbref)
Expand Down
5 changes: 2 additions & 3 deletions src/mongopersist/mapping.py
Expand Up @@ -16,7 +16,6 @@
import pymongo

from mongopersist import interfaces
from mongopersist.datamanager import processSpec

class MongoCollectionMapping(UserDict.DictMixin, object):
__mongo_database__ = None
Expand All @@ -37,7 +36,7 @@ def __getitem__(self, key):
filter = self.__mongo_filter__()
filter[self.__mongo_mapping_key__] = key
coll = self.get_mongo_collection()
doc = coll.find_one(processSpec(coll, filter))
doc = coll.find_one(filter)
if doc is None:
raise KeyError(key)
db_name = self.__mongo_database__ or self._m_jar.default_database
Expand All @@ -64,4 +63,4 @@ def keys(self):
coll = self.get_mongo_collection()
return [
doc[self.__mongo_mapping_key__]
for doc in coll.find(processSpec(coll, filter))]
for doc in coll.find(filter)]
124 changes: 115 additions & 9 deletions src/mongopersist/tests/test_datamanager.py
Expand Up @@ -188,6 +188,22 @@ def doctest_MongoDataManager_object_dump_load_reset():
ObjectId('4eb2eb7437a08e0156000000'),
'mongopersist_test')
When the object is modified, ``dump()`` will remove it from the list of
registered objects.
>>> foo.name = 'Foo'
>>> foo._p_changed
True
>>> dm._registered_objects
[<mongopersist.tests.test_datamanager.Foo object at 0x2fe1f50>]
>>> foo_ref = dm.dump(foo)
>>> foo._p_changed
False
>>> dm._registered_objects
[]
Let's now reset the data manager, so we do not hit a cache while loading
the object again:
Expand Down Expand Up @@ -641,17 +657,23 @@ def doctest_MongoDataManager_sortKey():
('MongoDataManager', 0)
"""

def doctest_processSpec():
r"""processSpec(): General test
def doctest_process_spec():
r"""process_spec(): General test
A simple helper function that returns the spec itself if no
IMongoSpecProcessor adapter is registered.
``IMongoSpecProcessor`` adapter is registered. If a processor is found it
is applied. The spec processor can be used for:
* Additional logging.
* Modifying the spec, for example providing additional parameters.
Let's now call the function:
>>> from zope.testing.cleanup import CleanUp as PlacelessSetup
>>> PlacelessSetup().setUp()
>>> datamanager.processSpec('a_collection', {'life': 42})
>>> datamanager.process_spec('a_collection', {'life': 42})
{'life': 42}
Now let's register an adapter
Expand All @@ -665,21 +687,105 @@ def doctest_processSpec():
>>> import zope.interface
>>> from zope.component import provideAdapter
>>> provideAdapter(Processor, (zope.interface.Interface,), interfaces.IMongoSpecProcessor)
>>> provideAdapter(
... Processor,
... (zope.interface.Interface,), interfaces.IMongoSpecProcessor)
And see what happens on processSpec:
And see what happens on calling ``process_spec()``:
>>> datamanager.processSpec('a_collection', {'life': 42})
>>> datamanager.process_spec('a_collection', {'life': 42})
passed in: a_collection {'life': 42}
{'life': 24}
We get the processed spec in return.
>>> PlacelessSetup().tearDown()
"""

def doctest_FlushDecorator_basic():
r"""class FlushDecorator: basic functionality
The FlushDecorator class can be used to ensure that data is flushed before
a given function is called. Let's create an object and modify it:
>>> foo = Foo('foo')
>>> foo_ref = dm.dump(foo)
>>> dm.reset()
>>> foo_new = dm.load(foo._p_oid)
>>> foo_new.name = 'Foo'
The database is not immediately updated:
>>> coll = conn[DBNAME]['mongopersist.tests.test_datamanager.Foo']
>>> list(coll.find())
[{u'_id': ObjectId('4e7ddf12e138237403000000'), u'name': u'foo'}]
But when I use the decorator, all outstanding changes are updated at
first:
>>> flush_find = datamanager.FlushDecorator(dm, coll.find)
>>> list(flush_find())
[{u'_id': ObjectId('4e7ddf12e138237403000000'), u'name': u'Foo'}]
"""

def doctest_ProcessSpecDecorator_basic():
r"""class ProcessSpecDecorator: basic
The ``ProcessSpecDecorator`` decorator processes the spec before passing
it to the function. Currently the following collection methods are
supported: ``find_one()``, ``find()``, ``find_and_modify``.
Now let's register an adapter
>>> from zope.testing.cleanup import CleanUp as PlacelessSetup
>>> PlacelessSetup().setUp()
>>> class Processor(object):
... def __init__(self, context):
... pass
... def process(self, collection, spec):
... print 'passed in:', spec
... return spec
>>> import zope.interface
>>> from zope.component import provideAdapter
>>> provideAdapter(
... Processor,
... (zope.interface.Interface,), interfaces.IMongoSpecProcessor)
Let's now create the decorator:
>>> coll = conn[DBNAME]['mongopersist.tests.test_datamanager.Foo']
>>> process_find = datamanager.ProcessSpecDecorator(coll, coll.find)
>>> list(process_find({'life': 42}))
passed in: {'life': 42}
[]
Keyword arguments are also supported:
>>> process_find = datamanager.ProcessSpecDecorator(coll, coll.find)
>>> list(process_find(spec={'life': 42}))
passed in: {'life': 42}
[]
>>> process_find_one = datamanager.ProcessSpecDecorator(
... coll, coll.find_one)
>>> process_find_one(spec_or_id={'life': 42})
passed in: {'life': 42}
>>> process_find_one = datamanager.ProcessSpecDecorator(
... coll, coll.find_one)
>>> process_find_one(query={'life': 42})
passed in: {'life': 42}
We get the processed spec in return.
>>> PlacelessSetup().tearDown()
"""

def test_suite():
return doctest.DocTestSuite(
setUp=testing.setUp, tearDown=testing.tearDown,
Expand Down
10 changes: 4 additions & 6 deletions src/mongopersist/zope/container.py
Expand Up @@ -22,7 +22,6 @@

from mongopersist import interfaces, serialize
from mongopersist.zope import interfaces as zinterfaces
from mongopersist.datamanager import processSpec, CollectionWrapper

class MongoContained(contained.Contained):

Expand Down Expand Up @@ -165,7 +164,7 @@ def __getitem__(self, key):
filter = self._m_get_items_filter()
filter[self._m_mapping_key] = key
coll = self.get_collection()
doc = coll.find_one(processSpec(coll, filter))
doc = coll.find_one(filter)
if doc is None:
raise KeyError(key)
return self._load_one(doc)
Expand Down Expand Up @@ -211,15 +210,14 @@ def keys(self):
filter[self._m_mapping_key] = {'$ne': None}
coll = self.get_collection()
return [doc[self._m_mapping_key]
for doc in coll.find(processSpec(coll, filter),
fields=(self._m_mapping_key,))]
for doc in coll.find(filter, fields=(self._m_mapping_key,))]

def raw_find(self, spec=None, *args, **kwargs):
if spec is None:
spec = {}
spec.update(self._m_get_items_filter())
coll = self.get_collection()
return coll.find(processSpec(coll, spec), *args, **kwargs)
return coll.find(spec, *args, **kwargs)

def find(self, spec=None, *args, **kwargs):
# Search for matching objects.
Expand All @@ -235,7 +233,7 @@ def raw_find_one(self, spec_or_id=None, *args, **kwargs):
spec_or_id = {'_id': spec_or_id}
spec_or_id.update(self._m_get_items_filter())
coll = self.get_collection()
return coll.find_one(processSpec(coll, spec_or_id), *args, **kwargs)
return coll.find_one(spec_or_id, *args, **kwargs)

def find_one(self, spec_or_id=None, *args, **kwargs):
doc = self.raw_find_one(spec_or_id, *args, **kwargs)
Expand Down

0 comments on commit 0583763

Please sign in to comment.