diff --git a/.appveyor.yml b/.appveyor.yml index 5203d30..8ba8d17 100644 --- a/.appveyor.yml +++ b/.appveyor.yml @@ -75,13 +75,29 @@ environment: # MacOS core tests - ID: MacP38 DTS: datalad_osf - APPVEYOR_BUILD_WORKER_IMAGE: macOS + APPVEYOR_BUILD_WORKER_IMAGE: macos-monterey PY: 3.8 INSTALL_GITANNEX: git-annex DATALAD_LOCATIONS_SOCKETS: /Users/appveyor/DLTMP/sockets CODECOV_BINARY: https://uploader.codecov.io/latest/macos/codecov +# do not run the CI if only documentation changes were made +# documentation builds are tested elsewhere and cheaper +skip_commits: + files: + - docs/ + - changelog.d/ + - .github/ + - CHANGELOG.md + - CITATION.cff + - CONTRIBUTORS + - LICENSE + - Makefile + - README.md + - readthedocs.yml + + # it is OK to specify paths that may not exist for a particular test run cache: # pip cache @@ -152,8 +168,8 @@ install: build_script: - - pip install -r requirements-devel.txt - - pip install . + - python -m pip install -r requirements-devel.txt + - python -m pip install . #after_build: @@ -172,8 +188,9 @@ test_script: #- sh: mkdir __testhome__ #- cd __testhome__ # run test selecion (--traverse-namespace needed from Python 3.8 onwards) - - cmd: python -m nose --traverse-namespace -s -v -A "not (turtle)" --with-cov --cover-package datalad_osf %DTS% - - sh: python -m nose --traverse-namespace -s -v -A "not (turtle)" --with-cov --cover-package datalad_osf ${DTS} + - cmd: python -m pytest -s -v -m "not (turtle)" -k "%KEYWORDS%" --cov=datalad_osf --pyargs %DTS% + # also add --cov datalad, because some core test runs may not touch -next code + - sh: PATH=$PWD/../tools/coverage-bin:$PATH python -m pytest -s -v -m "not (turtle)" -k "$KEYWORDS" --cov=datalad_osf --pyargs ${DTS} after_test: diff --git a/datalad_osf/__init__.py b/datalad_osf/__init__.py index 2976aac..d42f711 100644 --- a/datalad_osf/__init__.py +++ b/datalad_osf/__init__.py @@ -37,9 +37,6 @@ ) -from datalad import setup_package -from datalad import teardown_package - from ._version import get_versions __version__ = get_versions()['version'] del get_versions diff --git a/datalad_osf/conftest.py b/datalad_osf/conftest.py new file mode 100644 index 0000000..7a3497d --- /dev/null +++ b/datalad_osf/conftest.py @@ -0,0 +1,22 @@ +from datalad.conftest import setup_package + +from datalad_next.tests.fixtures import ( + # no test can leave global config modifications behind + check_gitconfig_global, + # no test can leave secrets behind + check_plaintext_keyring, + # function-scope config manager + datalad_cfg, + # function-scope, Dataset instance + dataset, + # function-scope, Dataset instance with underlying repository + existing_dataset, +) + +from datalad_osf.tests.fixtures import ( + # standard test dataset setup used throughout the datalad-osf tests + minimal_dataset, + osf_credentials, + osf_credentials_or_skip, + osf_node, +) diff --git a/datalad_osf/tests/fixtures.py b/datalad_osf/tests/fixtures.py new file mode 100644 index 0000000..0ec8cb9 --- /dev/null +++ b/datalad_osf/tests/fixtures.py @@ -0,0 +1,48 @@ +import pytest + + +@pytest.fixture(autouse=False, scope="function") +def minimal_dataset(existing_dataset): + ds = existing_dataset + (ds.pathobj / 'file1.txt').write_text('content') + (ds.pathobj / 'subdir').mkdir() + (ds.pathobj / 'subdir' / 'file2.txt').write_text('different content') + ds.save() + + yield ds + + +@pytest.fixture(autouse=False, scope="session") +def osf_credentials(): + """Yields credential dict from get_credentials() suitable of OSF client""" + from datalad_osf.utils import get_credentials + cred = get_credentials(allow_interactive=False) + yield cred + + +@pytest.fixture(autouse=False, scope="function") +def osf_credentials_or_skip(osf_credentials): + if not any(osf_credentials.values()): + pytest.skip(reason='no OSF credentials') + + yield osf_credentials + + +@pytest.fixture(autouse=False, scope="function") +def osf_node(osf_credentials_or_skip): + from datalad_osf.utils import ( + create_node, + delete_node, + ) + from osfclient import OSF + osf = OSF(**osf_credentials_or_skip) + + title = 'Temporary DataLad CI project' + category = "data" + + node_id, proj_url = create_node( + osf.session, title, category=category, + ) + yield node_id + + delete_node(osf.session, node_id) diff --git a/datalad_osf/tests/test_create_sibling_osf.py b/datalad_osf/tests/test_create_sibling_osf.py index 8e9ce14..c5a007d 100644 --- a/datalad_osf/tests/test_create_sibling_osf.py +++ b/datalad_osf/tests/test_create_sibling_osf.py @@ -7,49 +7,24 @@ # # ## ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## -from datalad.api import ( - Dataset, -) -from datalad.tests.utils import ( - assert_equal, - assert_in, - assert_not_in, - assert_result_count, - skip_if, - SkipTest, - with_tree -) -from datalad.utils import Path -from datalad_osf.utils import ( - delete_node, - get_credentials, -) +from datalad_next.tests.utils import assert_result_count +from datalad_osf.utils import delete_node from osfclient import OSF -minimal_repo = {'ds': {'file1.txt': 'content', - 'subdir': {'file2.txt': 'different content'} - } - } - - -@with_tree(tree=minimal_repo) -def test_invalid_calls(path): - - # - impossible w/o dataset - # - impossible w/o annex - # - mandatory arguments - raise SkipTest("TODO") +#def test_invalid_calls(minimal_dataset): +# +# # - impossible w/o dataset +# # - impossible w/o annex +# # - mandatory arguments +# raise SkipTest("TODO") -@skip_if(cond=not any(get_credentials().values()), msg='no OSF credentials') -@with_tree(tree=minimal_repo) -def test_create_osf_simple(path): +def test_create_osf_simple(osf_credentials_or_skip, minimal_dataset): - ds = Dataset(path).create(force=True) - ds.save() + ds = minimal_dataset - file1 = Path('ds') / "file1.txt" + file1 = ds.pathobj / "file1.txt" create_results = ds.create_sibling_osf(name="osf") @@ -67,41 +42,37 @@ def test_create_osf_simple(path): # special remote is configured: remote_log = ds.repo.call_git(['cat-file', 'blob', 'git-annex:remote.log']) - assert_in("node={}".format(create_results[0]['id']), remote_log) + assert "node={}".format(create_results[0]['id']) in remote_log # copy files over ds.repo.copy_to('.', "osf-storage") whereis = ds.repo.whereis(str(file1)) here = ds.config.get("annex.uuid") # files should be 'here' and on remote end: - assert_equal(len(whereis), 2) - assert_in(here, whereis) + assert len(whereis) == 2 + assert here in whereis # drop content here ds.drop('.') whereis = ds.repo.whereis(str(file1)) # now on remote end only - assert_equal(len(whereis), 1) - assert_not_in(here, whereis) + assert len(whereis) == 1 + assert here not in whereis # and get content again from remote: ds.get('.') whereis = ds.repo.whereis(str(file1)) - assert_equal(len(whereis), 2) - assert_in(here, whereis) + assert len(whereis) == 2 + assert here in whereis finally: # clean remote end: - cred = get_credentials(allow_interactive=False) - osf = OSF(**cred) + osf = OSF(**osf_credentials_or_skip) delete_node(osf.session, create_results[0]['id']) -@skip_if(cond=not any(get_credentials().values()), msg='no OSF credentials') -@with_tree(tree=minimal_repo) -def test_create_osf_export(path): +def test_create_osf_export(osf_credentials_or_skip, minimal_dataset): - ds = Dataset(path).create(force=True) - ds.save() + ds = minimal_dataset create_results = ds.create_sibling_osf( title="CI dl-create", @@ -121,12 +92,9 @@ def test_create_osf_export(path): finally: # clean remote end: - cred = get_credentials(allow_interactive=False) - osf = OSF(**cred) + osf = OSF(**osf_credentials_or_skip) delete_node(osf.session, create_results[0]['id']) -@skip_if(cond=not any(get_credentials().values()), msg='no OSF credentials') -def test_create_osf_existing(): - - raise SkipTest("TODO") +# def test_create_osf_existing(osf_credentials_or_skip): +# raise SkipTest("TODO") diff --git a/datalad_osf/tests/test_public.py b/datalad_osf/tests/test_public.py index 7f4c2e8..1d97d79 100644 --- a/datalad_osf/tests/test_public.py +++ b/datalad_osf/tests/test_public.py @@ -7,31 +7,32 @@ # # ## ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## -from mock import patch - from datalad.api import clone -from datalad.support.exceptions import IncompleteResultsError -from datalad.tests.utils import ( +from datalad_next.exceptions import IncompleteResultsError +from datalad_next.tests.utils import ( assert_in, assert_raises, eq_, skip_if_on_windows, - with_tempfile, ) +import datalad_osf.utils as dlosf_utils def no_credentials(*args, **kwargs): return dict(token=None, username=None, password=None) -@with_tempfile -# make sure that even with locally configured credentials -# none actually reach the special remote -@patch('datalad_osf.utils.get_credentials', no_credentials) -def test_readonly_access(path): +def test_readonly_access(tmp_path, monkeypatch): # obtain a prepared minimal dataset with pre-configured # OSF remotes and prestaged data - ds = clone('https://github.com/datalad/testrepo--minimalds-osf.git', path) + # make sure that even with locally configured credentials + # none actually reach the special remote + with monkeypatch.context() as m: + m.setattr(dlosf_utils, 'get_credentials', no_credentials) + ds = clone( + 'https://github.com/datalad/testrepo--minimalds-osf.git', + tmp_path, + ) # check that both OSF remotes were enabled assert_in('osfannex', ds.repo.get_remotes()) assert_in('osftree', ds.repo.get_remotes()) @@ -54,11 +55,13 @@ def test_readonly_access(path): # line-ending(?) issue # https://github.com/datalad/datalad-osf/pull/106#issuecomment-653772696 @skip_if_on_windows -@with_tempfile -@patch('datalad_osf.utils.get_credentials', no_credentials) -def test_readonly_dataset_access(path): +def test_readonly_dataset_access(tmp_path, monkeypatch): # clone from OSF; ds is self-contained at OSF - ds = clone('osf://q8xnk', path) + # make sure that even with locally configured credentials + # none actually reach the special remote + with monkeypatch.context() as m: + m.setattr(dlosf_utils, 'get_credentials', no_credentials) + ds = clone('osf://q8xnk', tmp_path) # standard name storage remote assert_in('osf-storage', ds.repo.get_remotes()) for avail in ds.repo.whereis('inannex'): @@ -69,8 +72,12 @@ def test_readonly_dataset_access(path): eq_(ds.repo.annexstatus([test_file])[test_file]['has_content'], True) -@with_tempfile -@patch('datalad_osf.utils.get_credentials', no_credentials) -def test_invalid_url(path): - - assert_raises(IncompleteResultsError, clone, 'osf://q8xnk/somepath', path) +def test_invalid_url(tmp_path, monkeypatch): + # make sure that even with locally configured credentials + # none actually reach the special remote + with monkeypatch.context() as m: + m.setattr(dlosf_utils, 'get_credentials', no_credentials) + assert_raises( + IncompleteResultsError, + clone, 'osf://q8xnk/somepath', tmp_path, + ) diff --git a/datalad_osf/tests/test_register.py b/datalad_osf/tests/test_register.py index 9e832a0..ee2762d 100644 --- a/datalad_osf/tests/test_register.py +++ b/datalad_osf/tests/test_register.py @@ -11,4 +11,3 @@ def test_register(): import datalad.api as da assert hasattr(da, 'create_sibling_osf') - diff --git a/datalad_osf/tests/test_remote.py b/datalad_osf/tests/test_remote.py index 36ccc3b..9a5dee5 100644 --- a/datalad_osf/tests/test_remote.py +++ b/datalad_osf/tests/test_remote.py @@ -7,21 +7,7 @@ # # ## ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## -from datalad.api import ( - Dataset, -) -from datalad.utils import Path -from datalad.tests.utils import ( - with_tempfile, - skip_if_on_windows, - skip_if, -) -from datalad_osf.tests.utils import ( - with_node, -) -from datalad_osf.utils import ( - get_credentials, -) +from datalad_next.tests.utils import skip_if_on_windows common_init_opts = ["encryption=none", "type=external", "externaltype=osf", "autoenable=true"] @@ -31,17 +17,12 @@ # remote. It might just be that the SHA256 key paths get too long # https://github.com/datalad/datalad-osf/issues/71 @skip_if_on_windows -@skip_if(cond=not any(get_credentials().values()), msg='no OSF credentials') -@with_node(title="CI osf-special-remote") -@with_tempfile -def test_gitannex(osf_id, dspath): +def test_gitannex(osf_node, minimal_dataset): from datalad.cmd import GitWitlessRunner - dspath = Path(dspath) - - ds = Dataset(dspath).create() + ds = minimal_dataset # add remote parameters here - init_remote_opts = ["node={}".format(osf_id)] + init_remote_opts = ["node={}".format(osf_node)] # add special remote init_opts = common_init_opts + init_remote_opts @@ -52,7 +33,7 @@ def test_gitannex(osf_id, dspath): # want to see it in test build's output log. # TODO use AnnexRepo._call_annex(..., protocol=None) with 0.14+ GitWitlessRunner( - cwd=dspath, + cwd=ds.path, env=GitWitlessRunner.get_git_environ_adjusted()).run( ['git', 'annex', 'testremote', 'osfproject', "--fast"] ) diff --git a/requirements-devel.txt b/requirements-devel.txt index 1ea0dda..673505a 100644 --- a/requirements-devel.txt +++ b/requirements-devel.txt @@ -1,8 +1,6 @@ # requirements for a development environment -nose -nose-exclude +-e .[devel] mock -coverage sphinx sphinx_rtd_theme docutils<0.18 diff --git a/setup.cfg b/setup.cfg index 41e7dc1..e86575c 100644 --- a/setup.cfg +++ b/setup.cfg @@ -12,20 +12,19 @@ classifiers = Programming Language :: Python :: 3 [options] -python_requires = >= 3.5 +python_requires = >= 3.7 install_requires = datalad >= 0.18.4 + datalad_next >= 1.0.0b2 annexremote >= 1.4.0 osfclient >= 0.0.5 -test_requires = - nose - coverage packages = find: include_package_data = True [options.extras_require] devel = - nose + pytest + pytest-cov coverage [options.packages.find]