Skip to content
This repository has been archived by the owner on May 13, 2020. It is now read-only.

Commit

Permalink
- Cleaned up get_collection*() API a bit and changed as much code as
Browse files Browse the repository at this point in the history
  possible to using those APIs.

- Documented _m_remove_documents in IMongoContainer interface.
  • Loading branch information
strichter committed Mar 11, 2012
1 parent 1418819 commit eb40a0d
Show file tree
Hide file tree
Showing 10 changed files with 81 additions and 28 deletions.
3 changes: 2 additions & 1 deletion CHANGES.txt
Expand Up @@ -38,7 +38,8 @@ CHANGES
* Implemented a flushing policy: Changes are always flushed before any query
is made. A simple wrapper for the ``pymongo`` collection
(``CollectionWrapper``) ensures that flush is called before the correct
method calls. The new API method ``DataManager.get_collection(obj)``
method calls. Two new API methods ``DataManager.get_collection(db_name,
coll_name)`` and ``DataManager.get_collection_from_object(obj)``
allows one to quickly get a wrapped collection.

- The ``MongoContainer`` class now removes objects from the database upon
Expand Down
27 changes: 17 additions & 10 deletions src/mongopersist/datamanager.py
Expand Up @@ -99,7 +99,8 @@ def __setitem__(self, key, value):
def __delitem__(self, key):
doc = self._collection_inst.find_one(
processSpec(self._collection_inst, {'name': key}))
coll = self._jar._conn[doc['ref'].database][doc['ref'].collection]
coll = self._jar._get_collection(
doc['ref'].database, doc['ref'].collection)
coll.remove(doc['ref'].id)
self._collection_inst.remove({'name': key})

Expand Down Expand Up @@ -140,10 +141,13 @@ def __init__(self, conn, detect_conflicts=None, default_database=None,
self.transaction_manager = transaction.manager
self.root = Root(self, root_database, root_collection)

def _get_collection(self, obj):
db_name, coll_name = self._writer.get_collection_name(obj)
def _get_collection(self, db_name, coll_name):
return self._conn[db_name][coll_name]

def _get_collection_from_object(self, obj):
db_name, coll_name = self._writer.get_collection_name(obj)
return self._get_collection(db_name, coll_name)

def _check_conflicts(self):
if not self.detect_conflicts:
return
Expand All @@ -154,7 +158,7 @@ def _check_conflicts(self):
# cannot be a conflict.
if obj._p_oid is None:
continue
coll = self._get_collection(obj)
coll = self._get_collection_from_object(obj)
new_doc = coll.find_one(obj._p_oid.id, fields=('_py_serial',))
if new_doc is None:
continue
Expand All @@ -175,8 +179,11 @@ def _flush_objects(self):
self._writer.store(obj)
written.append(obj)

def get_collection(self, obj):
return CollectionWrapper(self._get_collection(obj), self)
def get_collection(self, db_name, coll_name):
return CollectionWrapper(self._get_collection(db_name, coll_name), self)

def get_collection_from_object(self, obj):
return CollectionWrapper(self._get_collection_from_object(obj), self)

def dump(self, obj):
return self._writer.store(obj)
Expand Down Expand Up @@ -221,7 +228,7 @@ def remove(self, obj):
if obj._p_changed is None:
self.setstate(obj)
# Now we remove the object from Mongo.
coll = self._get_collection(obj)
coll = self._get_collection_from_object(obj)
coll.remove({'_id': obj._p_oid.id})
self._removed_objects.append(obj)
# Just in case the object was modified before removal, let's remove it
Expand Down Expand Up @@ -258,16 +265,16 @@ def abort(self, transaction):
# Aborting the transaction requires three steps:
# 1. Remove any inserted objects.
for obj in self._inserted_objects:
coll = self._get_collection(obj)
coll = self._get_collection_from_object(obj)
coll.remove({'_id': obj._p_oid.id})
# 2. Re-insert any removed objects.
for obj in self._removed_objects:
coll = self._get_collection(obj)
coll = self._get_collection_from_object(obj)
coll.insert(self._original_states[obj._p_oid])
del self._original_states[obj._p_oid]
# 3. Reset any changed states.
for db_ref, state in self._original_states.items():
coll = self._conn[db_ref.database][db_ref.collection]
coll = self._get_collection(db_ref.database, db_ref.collection)
coll.update({'_id': db_ref.id}, state, True)
self.reset()

Expand Down
5 changes: 4 additions & 1 deletion src/mongopersist/interfaces.py
Expand Up @@ -129,7 +129,10 @@ class IMongoDataManager(persistent.interfaces.IPersistentDataManager):
detect_conflicts = zope.interface.Attribute(
"""A flag, when set it enables write conflict detection.""")

def get_collection(obj):
def get_collection(db_name, coll_name):
"""Return the collection for the given DB and collection names."""

def get_collection_of_object(obj):
"""Return the collection for an object."""

def reset():
Expand Down
2 changes: 1 addition & 1 deletion src/mongopersist/mapping.py
Expand Up @@ -31,7 +31,7 @@ def __mongo_filter__(self):

def get_mongo_collection(self):
db_name = self.__mongo_database__ or self._m_jar.default_database
return self._m_jar._conn[db_name][self.__mongo_collection__]
return self._m_jar.get_collection(db_name, self.__mongo_collection__)

def __getitem__(self, key):
filter = self.__mongo_filter__()
Expand Down
10 changes: 6 additions & 4 deletions src/mongopersist/serialize.py
Expand Up @@ -212,7 +212,7 @@ def get_state(self, obj, seen=None):

def store(self, obj, ref_only=False):
db_name, coll_name = self.get_collection_name(obj)
coll = self._jar._conn[db_name][coll_name]
coll = self._jar._get_collection(db_name, coll_name)
if ref_only:
# We only want to get OID quickly. Trying to reduce the full state
# might cause infinite recusrion loop. (Example: 2 new objects
Expand Down Expand Up @@ -281,8 +281,9 @@ def resolve(self, dbref):
raise ImportError(dbref)
# Multiple object types are stored in the collection. We have to
# look at the object to find out the type.
obj_doc = self._jar._conn[dbref.database][dbref.collection].find_one(
dbref.id, fields=('_py_persistent_type',))
obj_doc = self._jar\
._get_collection(dbref.database, dbref.collection).find_one(
dbref.id, fields=('_py_persistent_type',))
if '_py_persistent_type' in obj_doc:
klass = self.simple_resolve(obj_doc['_py_persistent_type'])
else:
Expand Down Expand Up @@ -375,7 +376,8 @@ def get_object(self, state, obj):
def set_ghost_state(self, obj, doc=None):
# Look up the object state by coll_name and oid.
if doc is None:
coll = self._jar._conn[obj._p_oid.database][obj._p_oid.collection]
coll = self._jar._get_collection(
obj._p_oid.database, obj._p_oid.collection)
doc = coll.find_one({'_id': obj._p_oid.id})
doc.pop('_id')
doc.pop('_py_persistent_type', None)
Expand Down
47 changes: 39 additions & 8 deletions src/mongopersist/tests/test_datamanager.py
Expand Up @@ -81,22 +81,53 @@ def doctest_Root():
"""

def doctest_MongoDataManager_get_collection():
r"""MongoDataManager: get_collection(obj)
r"""MongoDataManager: get_collection(db_name, coll_name)
Get the collection given the DB and collection name.
>>> foo = Foo('1')
>>> foo_ref = dm.insert(foo)
>>> dm.reset()
>>> coll = dm.get_collection(
... DBNAME, 'mongopersist.tests.test_datamanager.Foo')
We are returning a collection wrapper instead, so that we can flush the
data before any method involving a query.
>>> coll
<mongopersist.datamanager.CollectionWrapper object at 0x19e47d0>
>>> coll.collection
Collection(Database(Connection('localhost', 27017), u'mongopersist_test'),
u'mongopersist.tests.test_datamanager.Foo')
Let's now make a query:
>>> tuple(coll.find())
({u'_id': ObjectId('4f5c1bf537a08e2ea6000000'), u'name': u'1'},)
"""

def doctest_MongoDataManager_get_collection_from_object():
r"""MongoDataManager: get_collection_from_object(obj)
Get the collection for an object.
>>> foo = Foo('1')
>>> foo_ref = dm.insert(foo)
>>> dm.reset()
>>> coll = dm.get_collection(foo)
>>> coll = dm.get_collection_from_object(foo)
We are returning a collection wrapper instead, so that we can flush the
data before any method involving a query.
>>> coll
<mongopersist.datamanager.CollectionWrapper object at 0x19e47d0>
>>> coll.collection
Collection(Database(Connection('localhost', 27017), u'mongopersist_test'),
u'mongopersist.tests.test_datamanager.Foo')
Let's make sure that modifying attributes is done on the original
collection:
Expand All @@ -115,18 +146,18 @@ def doctest_MongoDataManager_get_collection():
If we do not use the wrapper, the change is not visible:
>>> tuple(dm._get_collection(foo_new).find())
>>> tuple(dm._get_collection_from_object(foo_new).find())
({u'_id': ObjectId('4f5c1bf537a08e2ea6000000'), u'name': u'1'},)
But if we use the wrapper, the change gets flushed first:
>>> tuple(dm.get_collection(foo_new).find())
>>> tuple(dm.get_collection_from_object(foo_new).find())
({u'_id': ObjectId('4f5c1bf537a08e2ea6000000'), u'name': u'2'},)
Of course, aborting the transaction gets us back to the original state:
>>> dm.abort(transaction.get())
>>> tuple(dm._get_collection(foo_new).find())
>>> tuple(dm._get_collection_from_object(foo_new).find())
({u'_id': ObjectId('4f5c1bf537a08e2ea6000000'), u'name': u'1'},)
"""

Expand Down Expand Up @@ -287,7 +318,7 @@ def doctest_MongoDataManager_insert():
But storing works as expected (flush is implicit before find):
>>> tuple(dm.get_collection(foo2).find())
>>> tuple(dm.get_collection_from_object(foo2).find())
({u'_id': ObjectId('4f5c443837a08e37bf000000'), u'name': u'foo'},
{u'_id': ObjectId('4f5c443837a08e37bf000001'), u'name': u'Foo 2'})
"""
Expand All @@ -308,7 +339,7 @@ def doctest_MongoDataManager_remove():
The object is removed from the collection immediately:
>>> tuple(dm._get_collection(foo_ref).find())
>>> tuple(dm._get_collection_from_object(foo_ref).find())
()
Also, the object is added to the list of removed objects:
Expand Down Expand Up @@ -427,7 +458,7 @@ def doctest_MongoDataManager_abort():
>>> foo2_ref = dm.insert(Foo('two'))
>>> dm.reset()
>>> coll = dm._get_collection(Foo())
>>> coll = dm._get_collection_from_object(Foo())
>>> tuple(coll.find({}))
({u'_id': ObjectId('4f5c114f37a08e2cac000000'), u'name': u'one'},
{u'_id': ObjectId('4f5c114f37a08e2cac000001'), u'name': u'two'})
Expand Down
3 changes: 3 additions & 0 deletions src/mongopersist/tests/test_mapping.py
Expand Up @@ -70,6 +70,9 @@ def doctest_MongoCollectionMapping_simple():
managing:
>>> container.get_mongo_collection()
<mongopersist.datamanager.CollectionWrapper object at 0x001122>
>>> container.get_mongo_collection().collection
Collection(Database(Connection('localhost', 27017),
u'mongopersist_test'),
u'mongopersist.tests.test_mapping.Item')
Expand Down
3 changes: 1 addition & 2 deletions src/mongopersist/zope/container.py
Expand Up @@ -128,8 +128,7 @@ def _m_jar(self):

def get_collection(self):
db_name = self._m_database or self._m_jar.default_database
return CollectionWrapper(
self._m_jar._conn[db_name][self._m_collection], self._m_jar)
return self._m_jar.get_collection(db_name, self._m_collection)

def _m_get_parent_key_value(self):
if getattr(self, '_p_jar', None) is None:
Expand Down
7 changes: 7 additions & 0 deletions src/mongopersist/zope/interfaces.py
Expand Up @@ -44,6 +44,13 @@ class IMongoContainer(zope.interface.Interface):
u'the parent/container reference.'),
default='parent')

_m_remove_documents = zope.schema.Bool(
title=u'Remove Documents',
description=(
u'A flag when set causes documents to be removed from the DB when '
u'they are removed from the container.'),
default=True)

def _m_get_parent_key_value():
"""Returns the value that is used to specify a particular container as
the parent of the item.
Expand Down
2 changes: 1 addition & 1 deletion src/mongopersist/zope/tests/test_container.py
Expand Up @@ -545,7 +545,7 @@ def doctest_MongoContainer_with_ZODB():
>>> stephan.__parent__
<mongopersist.zope.container.MongoContainer object at 0x7f6b6273b7d0>
>>> pprint(list(dm._conn[DBNAME]['person'].find()))
>>> pprint(list(dm._get_collection(DBNAME, 'person').find()))
[{u'_id': ObjectId('4e7ed795e1382366a0000001'),
u'key': u'stephan',
u'name': u'Stephan',
Expand Down

0 comments on commit eb40a0d

Please sign in to comment.