From e70af5da7d6f430a16db8fa001939ce8e436d358 Mon Sep 17 00:00:00 2001 From: Achilleas Koutsou Date: Wed, 16 Jan 2019 19:44:28 +0100 Subject: [PATCH 1/4] [compile] Search both /usr and /usr/local by default When looking for NIX to compile cross-compatibility tests, search both in /usr and /usr/local directories, as well as any directories supplied by the user through the env vars (NIX_LIBDIR, NIX_INCDIR, BOOST_LIBDIR, BOOST_INCDIR) --- nixio/test/xcompat/compile.py | 42 +++++++++++++++++++++++++---------- 1 file changed, 30 insertions(+), 12 deletions(-) diff --git a/nixio/test/xcompat/compile.py b/nixio/test/xcompat/compile.py index e6225ffa..39dbd3f2 100644 --- a/nixio/test/xcompat/compile.py +++ b/nixio/test/xcompat/compile.py @@ -28,10 +28,14 @@ def cc(filenames, dest, compiler = ccompiler.new_compiler() distutils.sysconfig.customize_compiler(compiler) - compiler.set_library_dirs(library_dirs) - compiler.set_include_dirs(include_dirs) - compiler.set_libraries(libraries) - compiler.set_runtime_library_dirs(runtime_lib_dirs) + if library_dirs: + compiler.set_library_dirs(library_dirs) + if include_dirs: + compiler.set_include_dirs(include_dirs) + if libraries: + compiler.set_libraries(libraries) + if runtime_lib_dirs: + compiler.set_runtime_library_dirs(runtime_lib_dirs) try: objnames = compiler.compile(filenames, output_dir=dest, @@ -51,14 +55,28 @@ def maketests(dest): scriptloc, _ = os.path.split(os.path.abspath(__file__)) os.chdir(scriptloc) filenames = glob("*.cpp") - nix_inc_dir = os.getenv('NIX_INCDIR', '/usr/local/include/nixio-1.0') - nix_lib_dir = os.getenv('NIX_LIBDIR', '/usr/local/lib') - - boost_inc_dir = os.getenv('BOOST_INCDIR', '/usr/local/include') - boost_lib_dir = os.getenv('BOOST_LIBDIR', '/usr/local/lib') - library_dirs = [boost_lib_dir, nix_lib_dir] - include_dirs = [boost_inc_dir, nix_inc_dir, 'src'] - runtime_dirs = ["/usr/local/lib"] + + # look for libs and headers in both /usr/ and /usr/local/ + library_dirs = ["/usr/lib", "/usr/local/lib"] + libenv = os.getenv("NIX_LIBDIR", None) + if libenv: + library_dirs.append(libenv) + + include_dirs = ["/usr/include/", "/usr/local/include", + "/usr/include/nixio-1.0", "/usr/local/include/nixio-1.0", + "src"] + incenv = os.getenv("NIX_INCDIR", None) + if incenv: + include_dirs.append(libenv) + + boost_libenv = os.getenv("BOOST_LIBDIR", None) + if boost_libenv: + library_dirs.append(boost_libenv) + boost_incenv = os.getenv("BOOST_INCDIR", None) + if boost_incenv: + include_dirs.append(boost_incenv) + + runtime_dirs = ["/usr/lib", "/usr/local/lib"] llp = os.getenv("LD_LIBRARY_PATH", None) if llp is not None: runtime_dirs.append(llp) From c43c85798fc5e139c75d604070fa4b2d0dc2d9a9 Mon Sep 17 00:00:00 2001 From: Achilleas Koutsou Date: Wed, 16 Jan 2019 20:55:08 +0100 Subject: [PATCH 2/4] Codestyle improvements and cleanup --- nixio/__init__.py | 6 ++-- nixio/block.py | 31 +++++++++++++-------- nixio/data_frame.py | 41 ++++++++++++++++++---------- nixio/test/test_data_frame.py | 39 ++++++++++++++------------ nixio/test/test_nix_compatibility.py | 6 ++-- nixio/validate.py | 5 +++- 6 files changed, 79 insertions(+), 49 deletions(-) diff --git a/nixio/__init__.py b/nixio/__init__.py index be3b6ead..305d6c4e 100644 --- a/nixio/__init__.py +++ b/nixio/__init__.py @@ -32,9 +32,9 @@ # version from .info import VERSION as __version__ -__all__ = ("File", "Block", "Group", "DataArray", "DataFrame", "Tag", "MultiTag", "Source", - "Section", "S", "Feature", "Property", "OdmlType", - "SampledDimension", "RangeDimension", "SetDimension", +__all__ = ("File", "Block", "Group", "DataArray", "DataFrame", "Tag", + "MultiTag", "Source", "Section", "S", "Feature", "Property", + "OdmlType", "SampledDimension", "RangeDimension", "SetDimension", "FileMode", "DataSliceMode", "DataType", "DimensionType", "LinkType", "Compression") __author__ = ('Christian Kellner, Adrian Stoewer, Andrey Sobolev, Jan Grewe, ' diff --git a/nixio/block.py b/nixio/block.py index 0969edd4..260ec0dd 100644 --- a/nixio/block.py +++ b/nixio/block.py @@ -14,7 +14,10 @@ import numpy as np from inspect import isclass from six import string_types -from collections import OrderedDict # using it for python2.7 +try: + from collections.abc import OrderedDict +except ImportError: + from collections import OrderedDict import sys from .util import find as finders @@ -194,12 +197,13 @@ def create_data_frame(self, name, type_, col_dict=None, col_names=None, col_dtypes=None, data=None, compression=Compression.No): - if isinstance(col_dict, dict) and not \ - isinstance(col_dict, OrderedDict) and sys.version_info[0] < 3: + if (isinstance(col_dict, dict) + and not isinstance(col_dict, OrderedDict) + and sys.version_info[0] < 3): raise TypeError("Python 2 users should use name_list " - "or OrderedDict created with LIST and TUPLES" - " to create DataFrames as the order " - "of the columns cannot be maintained in Py2") + "or OrderedDict created with LIST and TUPLES " + "to create DataFrames as the order " + "of the columns cannot be maintained in Py2") if data is not None: shape = len(data) @@ -210,14 +214,18 @@ def create_data_frame(self, name, type_, col_dict=None, col_names=None, if col_dict is None: if col_names is not None: if col_dtypes is not None: - col_dict = OrderedDict((str(nam), dt) - for nam, dt in zip(col_names, col_dtypes)) + col_dict = OrderedDict( + (str(nam), dt) + for nam, dt in zip(col_names, col_dtypes) + ) elif col_dtypes is None and data is not None: col_dtypes = [] for x in data[0]: col_dtypes.append(type(x)) - col_dict = OrderedDict((str(nam), dt) - for nam, dt in zip(col_names, col_dtypes)) + col_dict = OrderedDict( + (str(nam), dt) + for nam, dt in zip(col_names, col_dtypes) + ) else: # col_dtypes is None and data is None raise (ValueError, "The data type of each column have to be specified") @@ -233,7 +241,8 @@ def create_data_frame(self, name, type_, col_dict=None, col_names=None, col_dict = OrderedDict(zip(cn, raw_dt_list)) else: - # data is None or type(data[0]) != np.void /data_type doesnt matter + # data is None or type(data[0]) != np.void + # data_type doesnt matter raise (ValueError, "No information about column names is provided!") diff --git a/nixio/data_frame.py b/nixio/data_frame.py index 74692063..1a031d77 100644 --- a/nixio/data_frame.py +++ b/nixio/data_frame.py @@ -2,9 +2,10 @@ from __future__ import (absolute_import, division, print_function) try: - from collections.abc import Iterable, OrderedDict + from collections.abc import Iterable except ImportError: - from collections import Iterable, OrderedDict + from collections import Iterable +from collections import OrderedDict from inspect import isclass import numpy as np from .exceptions import OutOfBounds @@ -25,9 +26,10 @@ def __init__(self, nixparent, h5group): self._rows = None @classmethod - def _create_new(cls, nixparent, h5parent, name, type_, shape, col_dtype, compression): - - newentity = super(DataFrame, cls)._create_new(nixparent, h5parent, name, type_) + def _create_new(cls, nixparent, h5parent, + name, type_, shape, col_dtype, compression): + newentity = super(DataFrame, cls)._create_new(nixparent, h5parent, + name, type_) newentity._h5group.create_dataset("data", (shape, ), col_dtype) return newentity @@ -38,7 +40,8 @@ def append_column(self, column, name, datatype=None): raise ValueError("Too much entries for column in this dataframe") if datatype is None: datatype = DataType.get_dtype(column[0]) - if isclass(datatype) and any(issubclass(datatype, st) for st in string_types): + if isclass(datatype) and any(issubclass(datatype, st) + for st in string_types): datatype = util.vlen_str_dtype dt_arr = [(n, dty) for n, dty in zip(self.column_names, self.dtype)] dt_arr.append((name, datatype)) @@ -56,7 +59,8 @@ def append_column(self, column, name, datatype=None): self._h5group.create_dataset("data", (self.shape[0],), dt) self.write_direct(farr) - def append_rows(self, data): # In Python2, the data supplied must be iterable (not np arrays) + def append_rows(self, data): + # In Python2, the data supplied must be iterable (not np arrays) li_data = [] for d in data: d = tuple(d) @@ -77,7 +81,7 @@ def write_column(self, column, index=None, name=None): rows[name] = cell self.write_rows(rows=[rows], index=[i]) -# TODO: for read column add a Mode that break down the tuples + # TODO: for read column add a Mode that break down the tuples def read_columns(self, index=None, name=None, sl=None): if index is None and name is None: raise ValueError("Either index or name must not be None") @@ -96,10 +100,15 @@ def read_columns(self, index=None, name=None, sl=None): def write_rows(self, rows, index): if len(rows) != len(index): - raise IndexError("Length of row changed and index specified do not match") + raise IndexError( + "Number of rows ({}) does not match " + "length of indexes ({})".format(len(rows), len(index)) + ) x, = self.shape if max(index) > (x - 1): - raise OutOfBounds("Row index should not exceed the existing no. of rows") + raise OutOfBounds( + "Row index exceeds the existing number of rows" + ) if len(index) == 1: rows = tuple(rows[0]) self._write_data(rows, sl=index) @@ -118,7 +127,8 @@ def read_rows(self, index): def write_cell(self, cell, position=None, col_name=None, row_idx=None): if position is not None: if len(position) != 2: - raise ValueError('not a position') + raise ValueError("position is invalid: " + "need row and column index") x, y = position targeted_row = self.read_rows(x) targeted_row[y] = cell @@ -170,7 +180,7 @@ def write_to_csv(self, filename, mode='w'): with open(filename, mode, newline='') as csvfile: dw = csv.DictWriter(csvfile, fieldnames=self.column_names) dw.writeheader() - di = dict() # this dict make the iter below quicker compared to using self in L172 + di = dict() for n in self.column_names: n = str(n) di[n] = list(self[n]) @@ -199,15 +209,16 @@ def units(self, u): @property def columns(self): if self.units: - cols = [(n, dt, u) for n, dt, u in zip(self.column_names, self.dtype, self.units)] + cols = [(n, dt, u) for n, dt, u in + zip(self.column_names, self.dtype, self.units)] else: - cols = [(n, dt, None) for n, dt in zip(self.column_names, self.dtype)] + cols = [(n, dt, None) for n, dt in + zip(self.column_names, self.dtype)] return cols @property def column_names(self): dt = self._h5group.group["data"].dtype - # cn = dt.fields.keys() return dt.names @property diff --git a/nixio/test/test_data_frame.py b/nixio/test/test_data_frame.py index c062648d..d7008cfc 100644 --- a/nixio/test/test_data_frame.py +++ b/nixio/test/test_data_frame.py @@ -4,7 +4,10 @@ import os import numpy as np from six import string_types -from collections import OrderedDict +try: + from collections.abc import OrderedDict +except ImportError: + from collections import OrderedDict import sys @@ -17,7 +20,7 @@ def setUp(self): self.file = nix.File.open(self.testfilename, nix.FileMode.Overwrite) self.block = self.file.create_block("test block", "recordingsession") di = OrderedDict([('name', np.int64), ('id', str), ('time', float), - ('sig1', np.float64), ('sig2', np.int32)]) + ('sig1', np.float64), ('sig2', np.int32)]) arr = [(1, "a", 20.18, 5.0, 100), (2, 'b', 20.09, 5.5, 101), (2, 'c', 20.05, 5.1, 100), (1, "d", 20.15, 5.3, 150), (2, 'e', 20.23, 5.7, 200), (2, 'f', 20.07, 5.2, 300), @@ -30,7 +33,8 @@ def setUp(self): self.df2 = self.block.create_data_frame("other df", "signal2", data=arr, col_dict=di) self.df3 = self.block.create_data_frame("reference df", "signal3", - data=other_arr, col_dict=other_di) + data=other_arr, + col_dict=other_di) self.dtype = self.df1._h5group.group["data"].dtype def tearDown(self): @@ -41,8 +45,8 @@ def create_with_list(self): arr = np.arange(999).reshape((333, 3)) namelist = np.array(['name', 'id', 'time']) dtlist = np.array([int, str, float]) - new_df = self.blk.create_data_frame('test1', 'for_test', - col_names=namelist, col_dtypes=dtlist, data=arr) + self.blk.create_data_frame('test1', 'for_test', col_names=namelist, + col_dtypes=dtlist, data=arr) def test_data_frame_eq(self): assert self.df1 == self.df1 @@ -57,7 +61,8 @@ def test_create_with_list(self): namelist = np.array(['name', 'id', 'time', 'sig1', 'sig2']) dtlist = np.array([np.int64, str, float, np.float64, np.int32]) df_li = self.block.create_data_frame("test_list", "make_of_list", - data=arr, col_names=namelist, col_dtypes=dtlist) + data=arr, col_names=namelist, + col_dtypes=dtlist) assert df_li.column_names == self.df1.column_names assert df_li.dtype == self.df1.dtype for i in df_li[:]: @@ -142,19 +147,19 @@ def test_write_cell(self): def test_append_column(self): y = np.arange(start=16000, stop=16010, step=1) self.df1.append_column(y, name='trial_col', datatype=int) - assert self.df1.column_names == \ - ('name', 'id', 'time', 'sig1', 'sig2', 'trial_col') + assert self.df1.column_names == ('name', 'id', 'time', + 'sig1', 'sig2', 'trial_col') assert len(self.df1.dtype) == 6 k = np.array(self.df1[0:10]["trial_col"], dtype=np.int64) np.testing.assert_almost_equal(k, y) - # too short coulmn + # too short column sh_col = np.arange(start=16000, stop=16003, step=1) - self.assertRaises(ValueError, lambda: - self.df1.append_column(sh_col, name='sh_col')) + with self.assertRaises(ValueError): + self.df1.append_column(sh_col, name='sh_col') # too long column long = np.arange(start=16000, stop=16500, step=1) - self.assertRaises(ValueError, lambda: - self.df1.append_column(long, name='long')) + with self.assertRaises(ValueError): + self.df1.append_column(long, name='long') def test_append_rows(self): # append single row @@ -182,12 +187,12 @@ def test_df_shape(self): # create df with incorrect dimension to see if Error is raised arr = np.arange(1000).reshape(10, 10, 10) if sys.version_info[0] == 3: - self.assertRaises(ValueError, - lambda: self.block.create_data_frame('err', 'err', - {'name': np.int64}, data=arr)) + with self.assertRaises(ValueError): + self.block.create_data_frame('err', 'err', + {'name': np.int64}, + data=arr) def test_data_type(self): assert self.df1.dtype[4] == np.int32 assert self.df1.dtype[0] != self.df1.dtype[4] assert self.df1.dtype[2] == self.df1.dtype[3] - diff --git a/nixio/test/test_nix_compatibility.py b/nixio/test/test_nix_compatibility.py index a1e3946c..0c8878b9 100644 --- a/nixio/test/test_nix_compatibility.py +++ b/nixio/test/test_nix_compatibility.py @@ -506,8 +506,10 @@ def test_full_file(tmpdir): group.data_arrays.append(da) df = block.create_data_frame("adataframe", "4-column df", - col_dict=OrderedDict([('name', str), ('id', - int), ('time', float), ('Adjusted', bool)]), + col_dict=OrderedDict([('name', str), + ('id', int), + ('time', float), + ('Adjusted', bool)]), data=[["Bob", 9, 11.28, False], ["Jane", 10, 14.37, True]]) df.append_rows([["Alice", 2, 3.7, False]]) diff --git a/nixio/validate.py b/nixio/validate.py index 15373015..0afa4047 100644 --- a/nixio/validate.py +++ b/nixio/validate.py @@ -1,7 +1,10 @@ from __future__ import (absolute_import, division, print_function) import numpy as np from .util import units -from collections import OrderedDict +try: + from collections.abc import OrderedDict +except ImportError: + from collections import OrderedDict class Validate: From 5323305c211ed38ae7672b3fd23594345937adca Mon Sep 17 00:00:00 2001 From: Achilleas Koutsou Date: Wed, 16 Jan 2019 20:55:47 +0100 Subject: [PATCH 3/4] Reimplementing force-compat test option Old way is deprecated and produces warnings. Now building in conftest and sending bindir to tests via fixture. Tests are disabled by being marked with 'skip' if the build fails and force-compat is not specified. --- conftest.py | 38 +++++++++++++++--- nixio/test/test_nix_compatibility.py | 60 ++++++++++++++-------------- 2 files changed, 63 insertions(+), 35 deletions(-) diff --git a/conftest.py b/conftest.py index f3585f98..304892ee 100644 --- a/conftest.py +++ b/conftest.py @@ -1,6 +1,34 @@ +import pytest +import tempfile +from nixio.test.xcompat.compile import maketests + + +BINDIR = tempfile.mkdtemp(prefix="nixpy-tests-") +xcavail = maketests(BINDIR) + + def pytest_addoption(parser): - parser.addoption("--force-compat", - action="store_true", - default=False, - help=("Force cross-compatibility tests. " - "Raise error instead of skipping.")) + parser.addoption("--force-compat", action="store_true", default=False, + help=("Force run compatibility tests. " + "If they fail to compile (e.g., missing NIX) " + "the tests wont pass.") + ) + + +@pytest.fixture +def bindir(request): + return BINDIR + + +def pytest_collection_modifyitems(config, items): + if config.getoption("--force-compat"): + print("Forcing compatibility tests") + return + if not xcavail: + print("Skipping compatibility tests") + skip_compat = pytest.mark.skip( + reason="Compatibility tests require the NIX library" + ) + for item in items: + if "compatibility" in item.keywords: + item.add_marker(skip_compat) diff --git a/nixio/test/test_nix_compatibility.py b/nixio/test/test_nix_compatibility.py index 0c8878b9..2e042df0 100644 --- a/nixio/test/test_nix_compatibility.py +++ b/nixio/test/test_nix_compatibility.py @@ -11,27 +11,14 @@ from collections.abc import Iterable except ImportError: from collections import Iterable +from collections import OrderedDict from six import string_types from subprocess import Popen, PIPE import numpy as np -import tempfile import pytest -from collections import OrderedDict import nixio as nix -from .xcompat.compile import maketests - -BINDIR = tempfile.mkdtemp(prefix="nixpy-tests-") - -# skip these tests if nix isn't available -if pytest.config.getoption("--force-compat"): - print("Forcing compatibility tests") - maketests(BINDIR) -else: - pytestmark = pytest.mark.skipif( - "skip()", - reason="Compatibility tests require the C++ NIX library") dtypes = ( nix.DataType.UInt8, @@ -49,10 +36,6 @@ ) -def skip(): - return not maketests(BINDIR) - - def validate(fname): """ Runs the nix validation function on the given file. @@ -60,10 +43,7 @@ def validate(fname): runcpp("validate", fname) -def runcpp(command, *args): - cmdbin = os.path.join(BINDIR, command) - cmdargs = [cmdbin] - cmdargs.extend(args) +def runcpp(*cmdargs): proc = Popen(cmdargs, stdout=PIPE, stderr=PIPE) proc.wait() stdout = proc.stdout.read().decode() @@ -74,7 +54,8 @@ def runcpp(command, *args): raise ValueError(stdout+stderr) -def test_blocks(tmpdir): +@pytest.mark.compatibility +def test_blocks(tmpdir, bindir): nixfilepath = os.path.join(str(tmpdir), "blocktest.nix") nix_file = nix.File.open(nixfilepath, mode=nix.FileMode.Overwrite) for idx in range(10): @@ -85,10 +66,12 @@ def test_blocks(tmpdir): nix_file.close() # validate(nixfilepath) - runcpp("readblocks", nixfilepath) + cmd = os.path.join(bindir, "readblocks") + runcpp(cmd, nixfilepath) -def test_groups(tmpdir): +@pytest.mark.compatibility +def test_groups(tmpdir, bindir): nixfilepath = os.path.join(str(tmpdir), "grouptest.nix") nix_file = nix.File.open(nixfilepath, mode=nix.FileMode.Overwrite) blk = nix_file.create_block("test_block", "blocktype") @@ -99,9 +82,11 @@ def test_groups(tmpdir): nix_file.close() # validate(nixfilepath) - runcpp("readgroups", nixfilepath) + cmd = os.path.join(bindir, "readgroups") + runcpp(cmd, nixfilepath) +@pytest.mark.compatibility def _test_data_arrays(tmpdir): nixfilepath = os.path.join(str(tmpdir), "arraytest.nix") nix_file = nix.File.open(nixfilepath, mode=nix.FileMode.Overwrite) @@ -126,6 +111,7 @@ def _test_data_arrays(tmpdir): # validate(nixfilepath) +@pytest.mark.compatibility def _test_data_frames(tmpdir): nixfilepath = os.path.join(str(tmpdir), "frametest.nix") nix_file = nix.File.open(nixfilepath, mode=nix.FileMode.Overwrite) @@ -150,6 +136,7 @@ def _test_data_frames(tmpdir): # validate(nixfilepath) +@pytest.mark.compatibility def _test_tags(tmpdir): nixfilepath = os.path.join(str(tmpdir), "tagtest.nix") nix_file = nix.File.open(nixfilepath, mode=nix.FileMode.Overwrite) @@ -172,6 +159,7 @@ def _test_tags(tmpdir): # validate(nixfilepath) +@pytest.mark.compatibility def _test_multi_tags(tmpdir): nixfilepath = os.path.join(str(tmpdir), "mtagtest.nix") nix_file = nix.File.open(nixfilepath, mode=nix.FileMode.Overwrite) @@ -194,6 +182,7 @@ def _test_multi_tags(tmpdir): # validate(nixfilepath) +@pytest.mark.compatibility def _test_sources(tmpdir): nixfilepath = os.path.join(str(tmpdir), "sourcetest.nix") nix_file = nix.File.open(nixfilepath, mode=nix.FileMode.Overwrite) @@ -225,6 +214,7 @@ def _test_sources(tmpdir): # validate(nixfilepath) +@pytest.mark.compatibility def _test_dimensions(tmpdir): nixfilepath = os.path.join(str(tmpdir), "dimtest.nix") nix_file = nix.File.open(nixfilepath, mode=nix.FileMode.Overwrite) @@ -265,6 +255,7 @@ def _test_dimensions(tmpdir): # validate(nixfilepath) +@pytest.mark.compatibility def _test_tag_features(tmpdir): nixfilepath = os.path.join(str(tmpdir), "feattest.nix") nix_file = nix.File.open(nixfilepath, mode=nix.FileMode.Overwrite) @@ -289,6 +280,7 @@ def _test_tag_features(tmpdir): # validate(nixfilepath) +@pytest.mark.compatibility def test_multi_tag_features(tmpdir): nixfilepath = os.path.join(str(tmpdir), "mtagfeattest.nix") nix_file = nix.File.open(nixfilepath, mode=nix.FileMode.Overwrite) @@ -363,6 +355,7 @@ def test_multi_tag_features(tmpdir): # validate(nixfilepath) +@pytest.mark.compatibility def test_multi_tag_references(tmpdir): nixfilepath = os.path.join(str(tmpdir), "blocktest.nix") nix_file = nix.File.open(nixfilepath, mode=nix.FileMode.Overwrite) @@ -394,6 +387,7 @@ def test_multi_tag_references(tmpdir): # validate(nixfilepath) +@pytest.mark.compatibility def test_properties(tmpdir): nixfilepath = os.path.join(str(tmpdir), "proptest.nix") nix_file = nix.File.open(nixfilepath, mode=nix.FileMode.Overwrite) @@ -421,6 +415,7 @@ def test_properties(tmpdir): # validate(nixfilepath) +@pytest.mark.compatibility def test_sections(tmpdir): nixfilepath = os.path.join(str(tmpdir), "sectiontest.nix") nix_file = nix.File.open(nixfilepath, mode=nix.FileMode.Overwrite) @@ -444,6 +439,7 @@ def test_sections(tmpdir): # validate(nixfilepath) +@pytest.mark.compatibility def test_full_write(tmpdir): # Create a fully-featured nix file nixfilepath = os.path.join(str(tmpdir), "fulltest.nix") @@ -468,7 +464,8 @@ def test_full_write(tmpdir): # validate(nixfilepath) -def test_full_file(tmpdir): +@pytest.mark.compatibility +def test_full_file(tmpdir, bindir): nixfilepath = os.path.join(str(tmpdir), "filetest-writepy.nix") nix_file = nix.File.open(nixfilepath, mode=nix.FileMode.Overwrite) @@ -642,7 +639,8 @@ def test_full_file(tmpdir): dtype=dt, data=dt(0)) nix_file.close() - runcpp("readfullfile", nixfilepath) + cmd = os.path.join(bindir, "readfullfile") + runcpp(cmd, nixfilepath) # validate(nixfilepath) @@ -677,9 +675,11 @@ def compare(exp, actual): assert exp == actual, "Expected {}, got {}".format(exp, actual) -def test_full_file_read(tmpdir): +@pytest.mark.compatibility +def test_full_file_read(tmpdir, bindir): nixfilepath = os.path.join(str(tmpdir), "filetest-readpy.nix") - runcpp("writefullfile", nixfilepath) + cmd = os.path.join(bindir, "writefullfile") + runcpp(cmd, nixfilepath) nix_file = nix.File.open(nixfilepath, mode=nix.FileMode.ReadOnly) # Check object counts From 1551af098a0e4d98ff40aa4b582d26e9dd4d6307 Mon Sep 17 00:00:00 2001 From: Achilleas Koutsou Date: Thu, 17 Jan 2019 03:22:04 +0100 Subject: [PATCH 4/4] Run compatibility tests only when requested The compatibility tests are no longer run by default when NIX is detected. They can be run by specifying --nix-compat. If specified, the tests are compiled and any errors will cause failure of the compatibility tests. If the option is not specified, the tests are skipped, even if NIX is available. One reason for this change is that the compatibility tests were always compiled when NIX was available, even when they are unselected, and compilation slows down the tests. With this change, they are only compiled and run when explicitly enabled. --- .travis.yml | 4 ++-- conftest.py | 28 ++++++++++++---------------- 2 files changed, 14 insertions(+), 18 deletions(-) diff --git a/.travis.yml b/.travis.yml index c2d24b35..e444cb57 100644 --- a/.travis.yml +++ b/.travis.yml @@ -77,9 +77,9 @@ install: script: - if [[ "${coveralls}" == 1 ]]; then - coverage run --source=nixio setup.py test --addopts "--force-compat -s" && coverage report -m; + coverage run --source=nixio setup.py test --addopts "--nix-compat -s" && coverage report -m; else - python${pymajor} setup.py test --addopts "--force-compat -s -nauto"; + python${pymajor} setup.py test --addopts "--nix-compat -s -nauto"; fi after_success: diff --git a/conftest.py b/conftest.py index 304892ee..377ddc70 100644 --- a/conftest.py +++ b/conftest.py @@ -4,15 +4,12 @@ BINDIR = tempfile.mkdtemp(prefix="nixpy-tests-") -xcavail = maketests(BINDIR) def pytest_addoption(parser): - parser.addoption("--force-compat", action="store_true", default=False, - help=("Force run compatibility tests. " - "If they fail to compile (e.g., missing NIX) " - "the tests wont pass.") - ) + parser.addoption("--nix-compat", action="store_true", default=False, + help=("Run nix compatibility tests " + "(requires NIX library)")) @pytest.fixture @@ -21,14 +18,13 @@ def bindir(request): def pytest_collection_modifyitems(config, items): - if config.getoption("--force-compat"): - print("Forcing compatibility tests") + if config.getoption("--nix-compat"): + print("Compiling NIX compatibility tests") + maketests(BINDIR) return - if not xcavail: - print("Skipping compatibility tests") - skip_compat = pytest.mark.skip( - reason="Compatibility tests require the NIX library" - ) - for item in items: - if "compatibility" in item.keywords: - item.add_marker(skip_compat) + skip_compat = pytest.mark.skip( + reason="Use --nix-compat option to run compatibility tests" + ) + for item in items: + if "compatibility" in item.keywords: + item.add_marker(skip_compat)