Skip to content

Commit

Permalink
Add _key and _rev to the docs before returning them from store.
Browse files Browse the repository at this point in the history
Till now it was not possible to find the key of the document once it is
fetched from the store. Adding _key there solved this issue.

_rev is like an e-tag added to the document. It is not used now, but
eventually it will be used to enforce constistancy and detecting
conflicts.
  • Loading branch information
anandology committed Apr 12, 2011
1 parent 73dcd40 commit 81af4eb
Show file tree
Hide file tree
Showing 3 changed files with 54 additions and 43 deletions.
27 changes: 20 additions & 7 deletions infogami/infobase/_dbstore/store.py
Expand Up @@ -58,17 +58,30 @@ def fire_event(self, name, data):
self.listener and self.listener(name, data)

def get_json(self, key):
row = self.get_row(key)
return row and row.json
d = self.get(key)
return d and simplejson.dumps(d)

def get(self, key):
json = self.get_json(key)
return json and simplejson.loads(json)
row = self.get_row(key)
return row and self._get_doc(row)

def _get_doc(self, row):
doc = simplejson.loads(row.json)
doc['_key'] = row.key
doc['_rev'] = str(row.id)
return doc

def put(self, key, data):
self.put_json(key, simplejson.dumps(data))

def put_json(self, key, json):
# remove _key and _rev from json
# Eventually, we should check allow put to successed only if _rev == row.id
d = simplejson.loads(json)
d.pop("_key", None)
d.pop("_rev", None)
json = simplejson.dumps(d)

tx = self.db.transaction()
try:
row = self.get_row(key, for_update=True)
Expand Down Expand Up @@ -114,7 +127,7 @@ def query(self, type, name, value, limit=100, offset=0, include_docs=False):
All the documents are returned when the type is None.
"""
if type is None:
rows = self.db.select("store", what="key, json", limit=limit, offset=offset, order="store.id desc", vars=locals())
rows = self.db.select("store", what="store.*", limit=limit, offset=offset, order="store.id desc", vars=locals())
else:
tables = ["store", "store_index"]
wheres = ["store.id = store_index.store_id", "type = $type"]
Expand All @@ -123,11 +136,11 @@ def query(self, type, name, value, limit=100, offset=0, include_docs=False):
wheres.append("name='_key'")
else:
wheres.append("name=$name AND value=$value")
rows = self.db.select(tables, what='store.key, store.json', where=" AND ".join(wheres), limit=limit, offset=offset, order="store.id desc", vars=locals())
rows = self.db.select(tables, what='store.*', where=" AND ".join(wheres), limit=limit, offset=offset, order="store.id desc", vars=locals())

def process_row(row):
if include_docs:
return {"key": row.key, "doc": simplejson.loads(row.json)}
return {"key": row.key, "doc": self._get_doc(row)}
else:
return {"key": row.key}

Expand Down
37 changes: 16 additions & 21 deletions infogami/infobase/tests/test_client.py
Expand Up @@ -71,7 +71,7 @@ class TestStore:
def setup_method(self, method):
s.clear()

def test_getitem(self):
def test_getitem(self, wildcard):
try:
s["x"]
except KeyError:
Expand All @@ -80,10 +80,10 @@ def test_getitem(self):
assert False, "should raise KeyError"

s["x"] = {"name": "x"}
assert s["x"] == {"name": "x"}
assert s["x"] == {"name": "x", "_key": "x", "_rev": wildcard}

s["x"] = {"name": "xx"}
assert s["x"] == {"name": "xx"}
assert s["x"] == {"name": "xx", "_key": "x", "_rev": wildcard}

def test_contains(self):
assert "x" not in s
Expand Down Expand Up @@ -116,7 +116,7 @@ def srange(*args):
assert s.keys() == srange(100, 200)[::-1]
assert list(s.keys(limit=-1)) == srange(200)[::-1]

def test_key_value_items(self):
def test_key_value_items(self, wildcard):
s["x"] = {"type": "foo", "name": "x"}
s["y"] = {"type": "bar", "name": "y"}
s["z"] = {"type": "bar", "name": "z"}
Expand All @@ -126,36 +126,31 @@ def test_key_value_items(self):
assert s.keys(type='bar', name="name", value="y") == ["y"]

assert s.values() == [
{"type": "bar", "name": "z"},
{"type": "bar", "name": "y"},
{"type": "foo", "name": "x"}
{"type": "bar", "name": "z", "_key": "z", "_rev": wildcard},
{"type": "bar", "name": "y", "_key": "y", "_rev": wildcard},
{"type": "foo", "name": "x", "_key": "x", "_rev": wildcard}
]
assert s.values(type='bar') == [
{"type": "bar", "name": "z"},
{"type": "bar", "name": "y"}
{"type": "bar", "name": "z", "_key": "z", "_rev": wildcard},
{"type": "bar", "name": "y", "_key": "y", "_rev": wildcard}
]
assert s.values(type='bar', name="name", value="y") == [
{"type": "bar", "name": "y"}
{"type": "bar", "name": "y", "_key": "y", "_rev": wildcard}
]

assert s.items() == [
("z", {"type": "bar", "name": "z"}),
("y", {"type": "bar", "name": "y"}),
("x", {"type": "foo", "name": "x"})
("z", {"type": "bar", "name": "z", "_key": "z", "_rev": wildcard}),
("y", {"type": "bar", "name": "y", "_key": "y", "_rev": wildcard}),
("x", {"type": "foo", "name": "x", "_key": "x", "_rev": wildcard})
]
assert s.items(type='bar') == [
("z", {"type": "bar", "name": "z"}),
("y", {"type": "bar", "name": "y"}),
("z", {"type": "bar", "name": "z", "_key": "z", "_rev": wildcard}),
("y", {"type": "bar", "name": "y", "_key": "y", "_rev": wildcard}),
]
assert s.items(type='bar', name="name", value="y") == [
("y", {"type": "bar", "name": "y"}),
("y", {"type": "bar", "name": "y", "_key": "y", "_rev": wildcard}),
]

def test_bad_data(self):
s["x"] = 1
assert s["x"] == 1
assert "x" in s

class TestSeq:
def test_seq(self):
seq.get_value("foo") == 0
Expand Down
33 changes: 18 additions & 15 deletions infogami/infobase/tests/test_store.py
Expand Up @@ -12,43 +12,46 @@ def teardown_module(mod):
utils.teardown_db(mod)
mod.store = None

class DBTest(unittest.TestCase):
def setUp(self):
class DBTest:
def setup_method(self, method):
self.tx = db.transaction()
db.insert("thing", key='/type/object')

def tearDown(self):
def teardown_method(self, method):
self.tx.rollback()

class TestStore(DBTest):
def test_insert(self):
def test_insert(self, wildcard):
for i in range(10):
d = {"name": str(i), "value": i}
store.put(str(i), d)

for i in range(10):
d = {"name": str(i), "value": i}
d = {"name": str(i), "value": i, "_key": str(i), "_rev": wildcard}
assert store.get(str(i)) == d

def test_update(self):
self.test_insert()
self.test_insert()
def test_update(self, wildcard):
store.put("foo", {"name": "foo"})
assert store.get("foo") == dict(name="foo", _key="foo", _rev=wildcard)

store.put("foo", {"name": "bar"})
assert store.get("foo") == dict(name="bar", _key="foo", _rev=wildcard)

def test_notfound(self):
assert store.get("xxx") is None
assert store.get_json("xxx") is None
assert store.get_row("xxx") is None

def test_delete(self):
def test_delete(self, wildcard):
d = {"name": "foo"}
store.put("foo", d)
assert store.get("foo") == d
assert store.get("foo") == dict(d, _key="foo", _rev=wildcard)

store.delete("foo")
assert store.get("foo") is None

store.put("foo", {"name": "bar"})
assert store.get("foo") == {"name": "bar"}
assert store.get("foo") == {"name": "bar", "_key": "foo", "_rev": wildcard}

def test_query(self):
store.put("one", {"type": "digit", "name": "one", "value": 1})
Expand All @@ -67,18 +70,18 @@ def test_query(self):
# query for all
assert store.query(None, None, None) == [{"key": "b"}, {"key": "a"}, {"key": "two"}, {"key": "one"}]

def test_query_include_docs(self):
def test_query_include_docs(self, wildcard):
assert store.query(None, None, None, include_docs=True) == []

store.put("one", {"type": "digit", "name": "one", "value": 1})
store.put("two", {"type": "digit", "name": "two", "value": 2})

assert store.query("digit", "name", "one", include_docs=True) == [
{'key': "one", "doc": {"type": "digit", "name": "one", "value": 1}}
{'key': "one", "doc": {"type": "digit", "name": "one", "value": 1, "_key": "one", "_rev": wildcard}}
]
assert store.query(None, None, None, include_docs=True) == [
{'key': "two", "doc": {"type": "digit", "name": "two", "value": 2}},
{'key': "one", "doc": {"type": "digit", "name": "one", "value": 1}},
{'key': "two", "doc": {"type": "digit", "name": "two", "value": 2, "_key": "two", "_rev": wildcard}},
{'key': "one", "doc": {"type": "digit", "name": "one", "value": 1, "_key": "one", "_rev": wildcard}},
]

def test_indexer(self):
Expand Down

0 comments on commit 81af4eb

Please sign in to comment.