Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion datajoint/relation.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,8 @@ class Subjects(dj.Relation):
def definition(self):
"""
:return: string containing the table declaration using the DataJoint Data Definition Language.
The DataJoint DDL is described at: TODO

The DataJoint DDL is described at: http://datajoint.github.com
"""
pass

Expand Down
29 changes: 15 additions & 14 deletions datajoint/relational_operand.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
from datajoint import DataJointError
from .blob import unpack
import logging
import numpy.lib.recfunctions as rfn

logger = logging.getLogger(__name__)

Expand Down Expand Up @@ -135,7 +136,7 @@ def count(self):
return cur.fetchone()[0]

def __call__(self, *args, **kwargs):
return self(*args, **kwargs)
return self.fetch(*args, **kwargs)

def fetch(self, offset=0, limit=None, order_by=None, descending=False):
"""
Expand All @@ -146,12 +147,7 @@ def fetch(self, offset=0, limit=None, order_by=None, descending=False):
:param descending: the list of attributes to order the results
:return: the contents of the relation in the form of a structured numpy.array
"""
cur = self.cursor(offset, limit, order_by, descending)
ret = np.array(list(cur), dtype=self.heading.as_dtype)
for f in self.heading.blobs:
for i in range(len(ret)):
ret[i][f] = unpack(ret[i][f])
return ret
return np.atleast_1d(rfn.stack_arrays(list(self.__iter__(offset, limit, order_by, descending)), usemask=False))

def cursor(self, offset=0, limit=None, order_by=None, descending=False):
"""
Expand Down Expand Up @@ -190,17 +186,22 @@ def __repr__(self):
repr_string += '%d tuples\n' % self.count
return repr_string

def __iter__(self):
def __iter__(self, offset=0, limit=None, order_by=None, descending=False):
"""
iterator yields primary key tuples
Example:
for key in relation:
(schema.Relation & key).fetch('field')
Iterator that yields individual tuples of the current table (as record arrays).


:param offset: parameter passed to the :func:`cursor`
:param limit: parameter passed to the :func:`cursor`
:param order_by: parameter passed to the :func:`cursor`
:param descending: parameter passed to the :func:`cursor`
"""
cur, h = self.project().cursor() # project
cur = self.cursor(offset, limit, order_by, descending)
do_unpack = tuple(h in self.heading.blobs for h in self.heading.names)
q = cur.fetchone()
while q:
yield np.array([q, ], dtype=h.asdtype)
yield np.array([tuple(unpack(field) if up else field for up, field in zip(do_unpack, q))],
dtype=self.heading.as_dtype)[0]
q = cur.fetchone()

@property
Expand Down
8 changes: 7 additions & 1 deletion datajoint/settings.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,12 @@
})


class Config(collections.MutableMapping):
class Borg:
_shared_state = {}
def __init__(self):
self.__dict__ = self._shared_state

class Config(Borg, collections.MutableMapping):
"""
Stores datajoint settings. Behaves like a dictionary, but applies validator functions
when certain keys are set.
Expand All @@ -47,6 +52,7 @@ class Config(collections.MutableMapping):
"""

def __init__(self, *args, **kwargs):
Borg.__init__(self)
self._conf = dict(default)
self.update(dict(*args, **kwargs)) # use the free update to set keys

Expand Down
20 changes: 3 additions & 17 deletions tests/test_base.py → tests/test_free_relation.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ def setup():
pass


class TestBaseInstantiations(object):
class TestRelationInstantiations(object):
"""
Test cases for instantiating Relation objects
"""
Expand Down Expand Up @@ -89,7 +89,7 @@ def test_packagelevel_binding(self):
s = schema2.test1.Subjects()


class TestBaseDeclaration(object):
class TestRelationDeclaration(object):
"""
Test declaration (creation of table) from
definition in Relation under various circumstances
Expand Down Expand Up @@ -147,7 +147,7 @@ def test_reference_to_unknown_module_in_definition_should_fail(self):
s.declare()


class TestBaseWithExistingTables(object):
class TestRelationWithExistingTables(object):
"""
Test base derivatives behaviors when some of the tables
already exists in the database
Expand Down Expand Up @@ -205,17 +205,3 @@ def test_instantiation_of_base_derivative_without_definition_should_fail():

















77 changes: 76 additions & 1 deletion tests/test_table.py → tests/test_relation.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,14 @@
import random
import string

__author__ = 'fabee'

from .schemata.schema1 import test1, test4

from . import BASE_CONN, CONN_INFO, PREFIX, cleanup
from datajoint.connection import Connection
from nose.tools import assert_raises, assert_equal, assert_regexp_matches, assert_false, assert_true, assert_list_equal
from nose.tools import assert_raises, assert_equal, assert_regexp_matches, assert_false, assert_true, assert_list_equal,\
assert_tuple_equal
from datajoint import DataJointError
import numpy as np
from numpy.testing import assert_array_equal
Expand Down Expand Up @@ -33,6 +37,9 @@ def setup(self):
test1 - has conn and bounded
"""
cleanup() # drop all databases with PREFIX
test1.__dict__.pop('conn', None)
test4.__dict__.pop('conn', None) # make sure conn is not defined at schema level

self.conn = Connection(**CONN_INFO)
test1.conn = self.conn
test4.conn = self.conn
Expand Down Expand Up @@ -172,3 +179,71 @@ def test_reference_to_existing_table(self):
assert_true('animal_id' in table2.primary_key)


def id_generator(size=6, chars=string.ascii_uppercase + string.digits):
return ''.join(random.choice(chars) for _ in range(size))

class TestIterator(object):
def __init__(self):
self.relvar = None
self.setup()

"""
Test cases for Iterators in Relations objects
"""

def setup(self):
"""
Create a connection object and prepare test modules
as follows:
test1 - has conn and bounded
"""
cleanup() # drop all databases with PREFIX
test4.__dict__.pop('conn', None) # make sure conn is not defined at schema level

self.conn = Connection(**CONN_INFO)
test4.conn = self.conn
self.conn.bind(test4.__name__, PREFIX + '_test4')
self.relvar_blob = test4.Matrix()

def teardown(self):
cleanup()


def test_blob_iteration(self):
"Tests the basic call of the iterator"

tuples = []
for i in range(10):

c = id_generator()

t = (i, np.random.randn(4,4,4), c)
self.relvar_blob.insert(t)
tuples.append(t)

for t, t2 in zip(tuples, self.relvar_blob):

assert_equal(t[0], t2[0], 'inserted and retrieved tuples do not match')
assert_equal(t[2], t2[2], 'inserted and retrieved tuples do not match')
assert_true(np.all(t[1] == t2[1]), 'inserted and retrieved tuples do not match')

def test_fetch(self):
tuples = []
for i in range(10):

c = id_generator()

t = (i, np.random.randn(4,4,4), c)
self.relvar_blob.insert(t)
tuples.append(t)

tuples2 = self.relvar_blob.fetch()
print(type(tuples2))
assert_true(isinstance(tuples2, np.ndarray), "Return value of fetch does not have proper type.")
assert_true(isinstance(tuples2[0], np.void), "Return value of fetch does not have proper type.")
for t, t2 in zip(tuples, tuples2):

assert_equal(t[0], t2['matrix_id'], 'inserted and retrieved tuples do not match')
assert_equal(t[2], t2['comment'], 'inserted and retrieved tuples do not match')
assert_true(np.all(t[1] == t2['data']), 'inserted and retrieved tuples do not match')

File renamed without changes.
12 changes: 11 additions & 1 deletion tests/test_settings.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

__author__ = 'Fabian Sinz'

from nose.tools import assert_true, assert_raises, assert_equal, raises
from nose.tools import assert_true, assert_raises, assert_equal, raises, assert_dict_equal
import datajoint as dj


Expand All @@ -13,6 +13,16 @@ def test_load_save():
assert_true(conf == dj.config, 'Two config files do not match.')
os.remove('tmp.json')

def test_singleton():
dj.config.save('tmp.json')
conf = dj.Config()
conf.load('tmp.json')
conf['dummy.val'] = 2

assert_true(conf == dj.config, 'Config does not behave like a singleton.')
os.remove('tmp.json')


@raises(ValueError)
def test_nested_check():
dummy = {'dummy.testval': {'notallowed': 2}}
Expand Down