From 119052e07414732396a69d527431e17f21a68069 Mon Sep 17 00:00:00 2001 From: Fabian Sinz Date: Sun, 10 May 2015 08:24:09 -0500 Subject: [PATCH 1/2] changed __iter__ and made settings singleton --- datajoint/relation.py | 3 +- datajoint/relational_operand.py | 33 ++++---- datajoint/settings.py | 8 +- tests/{test_base.py => test_free_relation.py} | 20 +---- tests/{test_table.py => test_relation.py} | 76 ++++++++++++++++++- ...lational.py => test_relational_operand.py} | 0 tests/test_settings.py | 12 ++- 7 files changed, 117 insertions(+), 35 deletions(-) rename tests/{test_base.py => test_free_relation.py} (97%) rename tests/{test_table.py => test_relation.py} (71%) rename tests/{test_relational.py => test_relational_operand.py} (100%) diff --git a/datajoint/relation.py b/datajoint/relation.py index 50a3b9661..d4b166d4c 100644 --- a/datajoint/relation.py +++ b/datajoint/relation.py @@ -33,7 +33,8 @@ class Subjects(dj.Relation): def definition(self): """ :return: string containing the table declaration using the DataJoint Data Definition Language. - The DataJoint DDL is described at: TODO + + The DataJoint DDL is described at: http://datajoint.github.com """ pass diff --git a/datajoint/relational_operand.py b/datajoint/relational_operand.py index e5ea820ab..1d2784022 100644 --- a/datajoint/relational_operand.py +++ b/datajoint/relational_operand.py @@ -135,7 +135,7 @@ def count(self): return cur.fetchone()[0] def __call__(self, *args, **kwargs): - return self(*args, **kwargs) + return self.fetch(*args, **kwargs) def fetch(self, offset=0, limit=None, order_by=None, descending=False): """ @@ -146,12 +146,13 @@ def fetch(self, offset=0, limit=None, order_by=None, descending=False): :param descending: the list of attributes to order the results :return: the contents of the relation in the form of a structured numpy.array """ - cur = self.cursor(offset, limit, order_by, descending) - ret = np.array(list(cur), dtype=self.heading.as_dtype) - for f in self.heading.blobs: - for i in range(len(ret)): - ret[i][f] = unpack(ret[i][f]) - return ret + # cur = self.cursor(offset, limit, order_by, descending) + # ret = np.array(list(cur), dtype=self.heading.as_dtype) + # for f in self.heading.blobs: + # for i in range(len(ret)): + # ret[i][f] = unpack(ret[i][f]) + # return ret + return np.array(list(self.__iter__(offset, limit, order_by, descending)), dtype=self.heading.as_dtype) def cursor(self, offset=0, limit=None, order_by=None, descending=False): """ @@ -190,17 +191,21 @@ def __repr__(self): repr_string += '%d tuples\n' % self.count return repr_string - def __iter__(self): + def __iter__(self, offset=0, limit=None, order_by=None, descending=False): """ - iterator yields primary key tuples - Example: - for key in relation: - (schema.Relation & key).fetch('field') + Iterator that yields individual tuples of the current table (as tuples). + + + :param offset: parameter passed to the :func:`cursor` + :param limit: parameter passed to the :func:`cursor` + :param order_by: parameter passed to the :func:`cursor` + :param descending: parameter passed to the :func:`cursor` """ - cur, h = self.project().cursor() # project + cur = self.cursor(offset, limit, order_by, descending) + do_unpack = tuple(h in self.heading.blobs for h in self.heading.names) q = cur.fetchone() while q: - yield np.array([q, ], dtype=h.asdtype) + yield tuple(unpack(field) if up else field for up, field in zip(do_unpack, q)) q = cur.fetchone() @property diff --git a/datajoint/settings.py b/datajoint/settings.py index 97316c89a..bf31ee710 100644 --- a/datajoint/settings.py +++ b/datajoint/settings.py @@ -37,7 +37,12 @@ }) -class Config(collections.MutableMapping): +class Borg: + _shared_state = {} + def __init__(self): + self.__dict__ = self._shared_state + +class Config(Borg, collections.MutableMapping): """ Stores datajoint settings. Behaves like a dictionary, but applies validator functions when certain keys are set. @@ -47,6 +52,7 @@ class Config(collections.MutableMapping): """ def __init__(self, *args, **kwargs): + Borg.__init__(self) self._conf = dict(default) self.update(dict(*args, **kwargs)) # use the free update to set keys diff --git a/tests/test_base.py b/tests/test_free_relation.py similarity index 97% rename from tests/test_base.py rename to tests/test_free_relation.py index c57076dc4..2f59ebf7a 100644 --- a/tests/test_base.py +++ b/tests/test_free_relation.py @@ -21,7 +21,7 @@ def setup(): pass -class TestBaseInstantiations(object): +class TestRelationInstantiations(object): """ Test cases for instantiating Relation objects """ @@ -89,7 +89,7 @@ def test_packagelevel_binding(self): s = schema2.test1.Subjects() -class TestBaseDeclaration(object): +class TestRelationDeclaration(object): """ Test declaration (creation of table) from definition in Relation under various circumstances @@ -147,7 +147,7 @@ def test_reference_to_unknown_module_in_definition_should_fail(self): s.declare() -class TestBaseWithExistingTables(object): +class TestRelationWithExistingTables(object): """ Test base derivatives behaviors when some of the tables already exists in the database @@ -205,17 +205,3 @@ def test_instantiation_of_base_derivative_without_definition_should_fail(): - - - - - - - - - - - - - - diff --git a/tests/test_table.py b/tests/test_relation.py similarity index 71% rename from tests/test_table.py rename to tests/test_relation.py index 8d6c4743d..328f63767 100644 --- a/tests/test_table.py +++ b/tests/test_relation.py @@ -1,10 +1,14 @@ +import random +import string + __author__ = 'fabee' from .schemata.schema1 import test1, test4 from . import BASE_CONN, CONN_INFO, PREFIX, cleanup from datajoint.connection import Connection -from nose.tools import assert_raises, assert_equal, assert_regexp_matches, assert_false, assert_true, assert_list_equal +from nose.tools import assert_raises, assert_equal, assert_regexp_matches, assert_false, assert_true, assert_list_equal,\ + assert_tuple_equal from datajoint import DataJointError import numpy as np from numpy.testing import assert_array_equal @@ -33,6 +37,9 @@ def setup(self): test1 - has conn and bounded """ cleanup() # drop all databases with PREFIX + test1.__dict__.pop('conn', None) + test4.__dict__.pop('conn', None) # make sure conn is not defined at schema level + self.conn = Connection(**CONN_INFO) test1.conn = self.conn test4.conn = self.conn @@ -172,3 +179,70 @@ def test_reference_to_existing_table(self): assert_true('animal_id' in table2.primary_key) +def id_generator(size=6, chars=string.ascii_uppercase + string.digits): + return ''.join(random.choice(chars) for _ in range(size)) + +class TestIterator(object): + def __init__(self): + self.relvar = None + self.setup() + + """ + Test cases for Iterators in Relations objects + """ + + def setup(self): + """ + Create a connection object and prepare test modules + as follows: + test1 - has conn and bounded + """ + cleanup() # drop all databases with PREFIX + test4.__dict__.pop('conn', None) # make sure conn is not defined at schema level + + self.conn = Connection(**CONN_INFO) + test4.conn = self.conn + self.conn.bind(test4.__name__, PREFIX + '_test4') + self.relvar_blob = test4.Matrix() + + def teardown(self): + cleanup() + + + def test_blob_iteration(self): + "Tests the basic call of the iterator" + + tuples = [] + for i in range(10): + + c = id_generator() + + t = (i, np.random.randn(4,4,4), c) + self.relvar_blob.insert(t) + tuples.append(t) + + for t, t2 in zip(tuples, self.relvar_blob): + + assert_equal(t[0], t2[0], 'inserted and retrieved tuples do not match') + assert_equal(t[2], t2[2], 'inserted and retrieved tuples do not match') + assert_true(np.all(t[1] == t2[1]), 'inserted and retrieved tuples do not match') + + def test_fetch(self): + tuples = [] + for i in range(10): + + c = id_generator() + + t = (i, np.random.randn(4,4,4), c) + self.relvar_blob.insert(t) + tuples.append(t) + + tuples2 = self.relvar_blob.fetch() + print(type(tuples2)) + assert_true(isinstance(tuples2, np.ndarray), "Return value of fetch does not have proper type.") + assert_true(isinstance(tuples2[0], np.void), "Return value of fetch does not have proper type.") + for t, t2 in zip(tuples, tuples2): + + assert_equal(t[0], t2['matrix_id'], 'inserted and retrieved tuples do not match') + assert_equal(t[2], t2['comment'], 'inserted and retrieved tuples do not match') + assert_true(np.all(t[1] == t2['data']), 'inserted and retrieved tuples do not match') diff --git a/tests/test_relational.py b/tests/test_relational_operand.py similarity index 100% rename from tests/test_relational.py rename to tests/test_relational_operand.py diff --git a/tests/test_settings.py b/tests/test_settings.py index aff05b9c9..e081f8f2c 100644 --- a/tests/test_settings.py +++ b/tests/test_settings.py @@ -2,7 +2,7 @@ __author__ = 'Fabian Sinz' -from nose.tools import assert_true, assert_raises, assert_equal, raises +from nose.tools import assert_true, assert_raises, assert_equal, raises, assert_dict_equal import datajoint as dj @@ -13,6 +13,16 @@ def test_load_save(): assert_true(conf == dj.config, 'Two config files do not match.') os.remove('tmp.json') +def test_singleton(): + dj.config.save('tmp.json') + conf = dj.Config() + conf.load('tmp.json') + conf['dummy.val'] = 2 + + assert_true(conf == dj.config, 'Config does not behave like a singleton.') + os.remove('tmp.json') + + @raises(ValueError) def test_nested_check(): dummy = {'dummy.testval': {'notallowed': 2}} From 03b23f0d19d7adf5ca6c0848db79a52d060e100e Mon Sep 17 00:00:00 2001 From: Fabian Sinz Date: Sun, 10 May 2015 09:42:53 -0500 Subject: [PATCH 2/2] Relation.__iter__ returns record arrays --- datajoint/relational_operand.py | 14 +++++--------- tests/test_relation.py | 1 + 2 files changed, 6 insertions(+), 9 deletions(-) diff --git a/datajoint/relational_operand.py b/datajoint/relational_operand.py index 1d2784022..facdc6aab 100644 --- a/datajoint/relational_operand.py +++ b/datajoint/relational_operand.py @@ -9,6 +9,7 @@ from datajoint import DataJointError from .blob import unpack import logging +import numpy.lib.recfunctions as rfn logger = logging.getLogger(__name__) @@ -146,13 +147,7 @@ def fetch(self, offset=0, limit=None, order_by=None, descending=False): :param descending: the list of attributes to order the results :return: the contents of the relation in the form of a structured numpy.array """ - # cur = self.cursor(offset, limit, order_by, descending) - # ret = np.array(list(cur), dtype=self.heading.as_dtype) - # for f in self.heading.blobs: - # for i in range(len(ret)): - # ret[i][f] = unpack(ret[i][f]) - # return ret - return np.array(list(self.__iter__(offset, limit, order_by, descending)), dtype=self.heading.as_dtype) + return np.atleast_1d(rfn.stack_arrays(list(self.__iter__(offset, limit, order_by, descending)), usemask=False)) def cursor(self, offset=0, limit=None, order_by=None, descending=False): """ @@ -193,7 +188,7 @@ def __repr__(self): def __iter__(self, offset=0, limit=None, order_by=None, descending=False): """ - Iterator that yields individual tuples of the current table (as tuples). + Iterator that yields individual tuples of the current table (as record arrays). :param offset: parameter passed to the :func:`cursor` @@ -205,7 +200,8 @@ def __iter__(self, offset=0, limit=None, order_by=None, descending=False): do_unpack = tuple(h in self.heading.blobs for h in self.heading.names) q = cur.fetchone() while q: - yield tuple(unpack(field) if up else field for up, field in zip(do_unpack, q)) + yield np.array([tuple(unpack(field) if up else field for up, field in zip(do_unpack, q))], + dtype=self.heading.as_dtype)[0] q = cur.fetchone() @property diff --git a/tests/test_relation.py b/tests/test_relation.py index 328f63767..8797ad573 100644 --- a/tests/test_relation.py +++ b/tests/test_relation.py @@ -246,3 +246,4 @@ def test_fetch(self): assert_equal(t[0], t2['matrix_id'], 'inserted and retrieved tuples do not match') assert_equal(t[2], t2['comment'], 'inserted and retrieved tuples do not match') assert_true(np.all(t[1] == t2['data']), 'inserted and retrieved tuples do not match') +