Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

BF/RF: automatically import needed interface submodules if Dataset.datasetmethod is not bound yet #3156

Merged
merged 9 commits into from Feb 12, 2019
2 changes: 0 additions & 2 deletions datalad/distribution/clone.py
Expand Up @@ -39,8 +39,6 @@
from datalad.utils import rmtree
from datalad.utils import assure_list

from datalad.distribution.add import Add

from .dataset import Dataset
from .dataset import datasetmethod
from .dataset import resolve_path
Expand Down
3 changes: 0 additions & 3 deletions datalad/distribution/create.py
Expand Up @@ -41,9 +41,6 @@
from datalad.utils import getpwd
from datalad.utils import get_dataset_root

# required to get the binding of `add` as a dataset method
from datalad.distribution.add import Add

from .dataset import Dataset
from .dataset import datasetmethod
from .dataset import EnsureDataset
Expand Down
29 changes: 29 additions & 0 deletions datalad/distribution/dataset.py
Expand Up @@ -172,6 +172,35 @@ def __eq__(self, other):
return False
return realpath(self.path) == realpath(other.path)

def __getattr__(self, attr):
# Assure that we are not just missing some late binding
# @datasetmethod . We will use interface definitions.
# The gotcha could be the mismatch between explicit name
# provided to @datasetmethod and what is defined in interfaces
if not attr.startswith('_'): # do not even consider those
from datalad.interface.base import (
get_interface_groups, get_api_name
)
for _, _, interfaces in get_interface_groups():
for intfspec in interfaces:
# lgr.log(5, "Considering interface %s", intfspec)
name = get_api_name(intfspec)
if attr == name:
from importlib import import_module
# turn the interface spec into an instance
import_module(intfspec[0], package='datalad')
# Now it must be bound
meth = getattr(self, attr, None)
if meth:
lgr.debug(
"Found matching interface %s for %s",
intfspec, name)
return meth
# keep going otherwise although probably should
# not be needed
lgr.debug("Found no match among known interfaces for %r", attr)
return super(Dataset, self).__getattribute__(attr)

def close(self):
"""Perform operations which would close any possible process using this Dataset
"""
Expand Down
6 changes: 0 additions & 6 deletions datalad/distribution/publish.py
Expand Up @@ -36,10 +36,6 @@
from datalad.support.exceptions import InsufficientArgumentsError
from datalad.support.network import URL, RI, SSHRI, is_ssh

# haunted imports/bindings
from datalad.interface.diff import Diff


from datalad.utils import assure_list
from datalad.dochelpers import exc_str

Expand Down Expand Up @@ -510,8 +506,6 @@ def _get_remote_info(ds_path, ds_remote_info, to, missing):
if not superds:
return ('error',
("No super-dataset to inherit settings for remote %s", to))
# avoid global import, only needed for this corner case
from datalad.distribution.create_sibling import CreateSibling
# XXX due to difference between create-sibling and create-sibling-github
# would not be as transparent to inherit for -github
lgr.info("Will try to create a sibling inheriting settings from %s", superds)
Expand Down
3 changes: 0 additions & 3 deletions datalad/distribution/subdatasets.py
Expand Up @@ -46,9 +46,6 @@
from .dataset import datasetmethod
from .dataset import resolve_path

# bound methods
import datalad.distribution.add

lgr = logging.getLogger('datalad.distribution.subdatasets')


Expand Down
1 change: 0 additions & 1 deletion datalad/distribution/tests/test_dataset.py
Expand Up @@ -16,7 +16,6 @@
from ..dataset import Dataset, EnsureDataset, resolve_path, require_dataset
from datalad import cfg
from datalad.api import create
from datalad.api import install
from datalad.api import get
from datalad.utils import chpwd, getpwd, rmtree
from datalad.utils import _path_
Expand Down
39 changes: 39 additions & 0 deletions datalad/distribution/tests/test_dataset_api.py
@@ -0,0 +1,39 @@
# ex: set sts=4 ts=4 sw=4 noet:
# ## ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ##
#
# See COPYING file distributed along with the datalad package for the
# copyright and license terms.
#
# ## ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ##
"""Test Dataset class @datasetmethod bindings without possible side effects
from needed otherwise datalad.api imports.

This one to be effective should be tested first or in isolation from other
test files
"""

from ..dataset import Dataset
from ...tests.utils import (
assert_raises,
with_tempfile,
)


@with_tempfile(mkdir=True)
def test_datasetmethod_bound(path):
ds = Dataset(path)
# should be automagically imported/picked up if not bound already
assert ds.add # simplest, intfspec only 2 entries
assert ds.download_url # 3 entries, with dash
assert ds.create_sibling_github # 3 entries, 2 dashes
assert ds.aggregate_metadata # module name is called "aggregate"
assert ds.drop # some fancy parametrization
assert ds.get # some fancy parametrization
# plugins - unfortunately they are too special ATM,
# we would need to import from the full filename path, etc
# postpoined that part
# assert ds.addurls
# assert ds.wtf
# if we ask for some really not known API - kaboom
with assert_raises(AttributeError):
ds.kaboommethod()
1 change: 0 additions & 1 deletion datalad/distribution/tests/test_install.py
Expand Up @@ -28,7 +28,6 @@

from datalad.api import create
from datalad.api import install
from datalad.api import remove
from datalad.api import get
from datalad import consts
from datalad.utils import chpwd
Expand Down
2 changes: 0 additions & 2 deletions datalad/distribution/uninstall.py
Expand Up @@ -55,8 +55,6 @@ def _uninstall_dataset(ds, check, has_super, **kwargs):
# error reporting already happened, we can just stop here
return

# we want to use the bound dataset method
from datalad.distribution.subdatasets import Subdatasets
# TODO: uninstall of a subdataset that has a local URL
# (e.g. ./anything) implies cannot be undone, decide how, and
# if to check for that
Expand Down
2 changes: 0 additions & 2 deletions datalad/distribution/update.py
Expand Up @@ -201,8 +201,6 @@ def __call__(


def _update_repo(ds, remote, reobtain_data):
# Just an ugly workaround for the late bindings of ds.get below
from datalad.distribution.get import Get
repo = ds.repo

lgr.info("Applying updates to %s", ds)
Expand Down
4 changes: 0 additions & 4 deletions datalad/interface/annotate_paths.py
Expand Up @@ -133,7 +133,6 @@ def yield_recursive(ds, path, action, recursion_limit):
# make sure we get everything relevant in all _checked out_
# subdatasets, obtaining of previously unavailable subdataset
# is elsewhere
from datalad.distribution.subdatasets import Subdatasets
for subd_res in ds.subdatasets(
recursive=True,
recursion_limit=recursion_limit,
Expand Down Expand Up @@ -165,8 +164,6 @@ def get_modified_subpaths(aps, refds, revision, recursion_limit=None,
revision : str
Commit-ish
"""
from datalad.interface.diff import Diff

# TODO needs recursion limit
# NOTE this is implemented as a generator despite that fact that we need
# to sort through _all_ the inputs initially, diff'ing each involved
Expand Down Expand Up @@ -668,7 +665,6 @@ def __call__(
(path_type == 'dataset' and 'registered_subds' not in path_props) or
path_type == 'directory' or
not lexists(path)):
from datalad.distribution.subdatasets import Subdatasets
# if the path doesn't exist, or is labeled a directory, or a dataset even
# a dataset (without this info) -> record whether this is a known subdataset
# to its parent
Expand Down
1 change: 0 additions & 1 deletion datalad/interface/download_url.py
Expand Up @@ -21,7 +21,6 @@
from ..interface.utils import eval_results
from ..utils import assure_list_from_str
from ..utils import get_dataset_pwds
from ..distribution.add import Add
from ..distribution.dataset import datasetmethod
from ..distribution.dataset import EnsureDataset
from ..distribution.dataset import require_dataset
Expand Down
2 changes: 0 additions & 2 deletions datalad/interface/ls.py
Expand Up @@ -34,8 +34,6 @@
from ..support.constraints import EnsureStr, EnsureNone
from ..distribution.dataset import Dataset

from datalad.distribution.subdatasets import Subdatasets

from datalad.support.annexrepo import AnnexRepo
from datalad.support.annexrepo import GitRepo
from datalad.utils import is_interactive
Expand Down
2 changes: 0 additions & 2 deletions datalad/interface/rerun.py
Expand Up @@ -22,8 +22,6 @@
from datalad.interface.base import Interface
from datalad.interface.utils import eval_results
from datalad.interface.base import build_doc
from datalad.interface.diff import Diff
from datalad.interface.unlock import Unlock
from datalad.interface.results import get_status_dict
from datalad.interface.run import run_command
from datalad.interface.run import format_command
Expand Down
5 changes: 0 additions & 5 deletions datalad/interface/run.py
Expand Up @@ -39,14 +39,9 @@
from datalad.support.param import Parameter
from datalad.support.json_py import dump2stream

from datalad.distribution.add import Add
from datalad.distribution.get import Get
from datalad.distribution.install import Install
from datalad.distribution.remove import Remove
from datalad.distribution.dataset import require_dataset
from datalad.distribution.dataset import EnsureDataset
from datalad.distribution.dataset import datasetmethod
from datalad.interface.unlock import Unlock

from datalad.utils import assure_bytes
from datalad.utils import assure_unicode
Expand Down
2 changes: 0 additions & 2 deletions datalad/interface/run_procedure.py
Expand Up @@ -37,9 +37,7 @@
from datalad.utils import assure_list
import datalad.support.ansi_colors as ac

# bound dataset methods
from datalad.interface.run import Run
from datalad.distribution.subdatasets import Subdatasets

lgr = logging.getLogger('datalad.interface.run_procedures')

Expand Down
2 changes: 0 additions & 2 deletions datalad/interface/tests/test_ls_webui.py
Expand Up @@ -26,8 +26,6 @@
from datalad.tests.utils import known_failure_direct_mode
from datalad.tests.utils import with_tree
from datalad.utils import swallow_logs, swallow_outputs, _path_
# needed below as bound dataset method
from datalad.api import add

from datalad.cmd import Runner

Expand Down
1 change: 0 additions & 1 deletion datalad/interface/tests/test_run_procedure.py
Expand Up @@ -31,7 +31,6 @@
from datalad.distribution.dataset import Dataset
from datalad.support.exceptions import InsufficientArgumentsError
from datalad.api import run_procedure
from datalad.api import clean
from datalad import cfg


Expand Down
1 change: 0 additions & 1 deletion datalad/interface/tests/test_utils.py
Expand Up @@ -40,7 +40,6 @@
from ..utils import discover_dataset_trace_to_targets
from datalad.interface.base import build_doc
from ..utils import handle_dirty_dataset
from datalad.api import create


__docformat__ = 'restructuredtext'
Expand Down
6 changes: 0 additions & 6 deletions datalad/metadata/aggregate.py
Expand Up @@ -25,12 +25,6 @@
from hashlib import md5
import shutil

# API commands we need
from datalad.distribution.get import Get
from datalad.distribution.remove import Remove
from datalad.distribution.subdatasets import Subdatasets
from datalad.interface.unlock import Unlock

import datalad
from datalad.dochelpers import exc_str
from datalad.interface.annotate_paths import AnnotatePaths
Expand Down
1 change: 0 additions & 1 deletion datalad/metadata/tests/test_aggregation.py
Expand Up @@ -14,7 +14,6 @@
from os.path import join as opj

from datalad.api import metadata
from datalad.api import install
from datalad.distribution.dataset import Dataset


Expand Down
2 changes: 0 additions & 2 deletions datalad/metadata/tests/test_base.py
Expand Up @@ -10,7 +10,6 @@
"""Test metadata """

import logging
import os

from os.path import join as opj
from os.path import relpath
Expand All @@ -19,7 +18,6 @@
from datalad.api import Dataset
from datalad.api import aggregate_metadata
from datalad.api import install
from datalad.api import search
from datalad.api import metadata
from datalad.metadata.metadata import (
get_metadata_type,
Expand Down
3 changes: 1 addition & 2 deletions datalad/metadata/tests/test_search.py
Expand Up @@ -15,7 +15,7 @@
from os import makedirs
from os.path import join as opj
from os.path import dirname
from datalad.api import Dataset, install
from datalad.api import Dataset
from nose.tools import assert_equal, assert_raises
from datalad.utils import (
chpwd,
Expand All @@ -36,7 +36,6 @@
from datalad.support.exceptions import NoDatasetArgumentFound

from datalad.api import search
from datalad.metadata import search as search_mod

from ..search import _listdict2dictlist
from ..search import _meta2autofield_dict
Expand Down
3 changes: 0 additions & 3 deletions datalad/plugin/add_readme.py
Expand Up @@ -60,10 +60,7 @@ def __call__(dataset, filename='README.md', existing='skip'):
import logging
lgr = logging.getLogger('datalad.plugin.add_readme')

from datalad.distribution.add import Add
from datalad.distribution.dataset import require_dataset
from datalad.interface.unlock import Unlock
from datalad.metadata.metadata import Metadata
from datalad.utils import assure_list

dataset = require_dataset(dataset, check_installed=True,
Expand Down
3 changes: 0 additions & 3 deletions datalad/resources/procedures/cfg_metadatatypes.py
Expand Up @@ -8,9 +8,6 @@

from datalad.distribution.dataset import require_dataset

# bound dataset methods
import datalad.interface.save

ds = require_dataset(
sys.argv[1],
check_installed=True,
Expand Down
3 changes: 0 additions & 3 deletions datalad/resources/procedures/cfg_text2git.py
Expand Up @@ -5,9 +5,6 @@

from datalad.distribution.dataset import require_dataset

# bound dataset methods
import datalad.distribution.add

ds = require_dataset(
sys.argv[1],
check_installed=True,
Expand Down
3 changes: 0 additions & 3 deletions datalad/resources/procedures/setup_yoda_dataset.py
Expand Up @@ -10,9 +10,6 @@
from datalad.distribution.dataset import require_dataset
from datalad.utils import create_tree

# bound dataset methods
import datalad.distribution.add

ds = require_dataset(
sys.argv[1],
check_installed=True,
Expand Down