Skip to content

Commit

Permalink
Update docval to accept fully qualified class names (#1036)
Browse files Browse the repository at this point in the history
  • Loading branch information
rly committed Jan 24, 2024
1 parent fa18cc5 commit d5d6383
Show file tree
Hide file tree
Showing 11 changed files with 99 additions and 23 deletions.
6 changes: 6 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,11 @@
# HDMF Changelog

## HDMF 3.12.1 (Upcoming)

### Bug fixes
- Fixed internal links in docstrings and tutorials. @stephprince [#1031](https://github.com/hdmf-dev/hdmf/pull/1031)
- Fixed issue with creating documentation links to classes in docval arguments. @rly [#1036](https://github.com/hdmf-dev/hdmf/pull/1036)

## HDMF 3.12.0 (January 16, 2024)

### Enhancements
Expand Down
6 changes: 4 additions & 2 deletions src/hdmf/backends/hdf5/h5_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -86,7 +86,8 @@ def append(self, dataset, data):

class H5Dataset(HDMFDataset):
@docval({'name': 'dataset', 'type': (Dataset, Array), 'doc': 'the HDF5 file lazily evaluate'},
{'name': 'io', 'type': 'HDF5IO', 'doc': 'the IO object that was used to read the underlying dataset'})
{'name': 'io', 'type': 'hdmf.backends.hdf5.h5tools.HDF5IO',
'doc': 'the IO object that was used to read the underlying dataset'})
def __init__(self, **kwargs):
self.__io = popargs('io', kwargs)
super().__init__(**kwargs)
Expand Down Expand Up @@ -175,7 +176,8 @@ def get_object(self, h5obj):
class AbstractH5TableDataset(DatasetOfReferences):

@docval({'name': 'dataset', 'type': (Dataset, Array), 'doc': 'the HDF5 file lazily evaluate'},
{'name': 'io', 'type': 'HDF5IO', 'doc': 'the IO object that was used to read the underlying dataset'},
{'name': 'io', 'type': 'hdmf.backends.hdf5.h5tools.HDF5IO',
'doc': 'the IO object that was used to read the underlying dataset'},
{'name': 'types', 'type': (list, tuple),
'doc': 'the IO object that was used to read the underlying dataset'})
def __init__(self, **kwargs):
Expand Down
5 changes: 3 additions & 2 deletions src/hdmf/backends/hdf5/h5tools.py
Original file line number Diff line number Diff line change
Expand Up @@ -363,7 +363,7 @@ def copy_file(self, **kwargs):
{'name': 'exhaust_dci', 'type': bool,
'doc': 'If True (default), exhaust DataChunkIterators one at a time. If False, exhaust them concurrently.',
'default': True},
{'name': 'herd', 'type': 'HERD',
{'name': 'herd', 'type': 'hdmf.common.resources.HERD',
'doc': 'A HERD object to populate with references.',
'default': None})
def write(self, **kwargs):
Expand Down Expand Up @@ -399,7 +399,8 @@ def __cache_spec(self):
ns_builder.export(self.__ns_spec_path, writer=writer)

_export_args = (
{'name': 'src_io', 'type': 'HDMFIO', 'doc': 'the HDMFIO object for reading the data to export'},
{'name': 'src_io', 'type': 'hdmf.backends.io.HDMFIO',
'doc': 'the HDMFIO object for reading the data to export'},
{'name': 'container', 'type': Container,
'doc': ('the Container object to export. If None, then the entire contents of the HDMFIO object will be '
'exported'),
Expand Down
5 changes: 3 additions & 2 deletions src/hdmf/backends/io.py
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,7 @@ def read(self, **kwargs):
return container

@docval({'name': 'container', 'type': Container, 'doc': 'the Container object to write'},
{'name': 'herd', 'type': 'HERD',
{'name': 'herd', 'type': 'hdmf.common.resources.HERD',
'doc': 'A HERD object to populate with references.',
'default': None}, allow_extra=True)
def write(self, **kwargs):
Expand All @@ -98,7 +98,8 @@ def write(self, **kwargs):
f_builder = self.__manager.build(container, source=self.__source, root=True)
self.write_builder(f_builder, **kwargs)

@docval({'name': 'src_io', 'type': 'HDMFIO', 'doc': 'the HDMFIO object for reading the data to export'},
@docval({'name': 'src_io', 'type': 'hdmf.backends.io.HDMFIO',
'doc': 'the HDMFIO object for reading the data to export'},
{'name': 'container', 'type': Container,
'doc': ('the Container object to export. If None, then the entire contents of the HDMFIO object will be '
'exported'),
Expand Down
18 changes: 12 additions & 6 deletions src/hdmf/build/builders.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,8 @@
class Builder(dict, metaclass=ABCMeta):

@docval({'name': 'name', 'type': str, 'doc': 'the name of the group'},
{'name': 'parent', 'type': 'Builder', 'doc': 'the parent builder of this Builder', 'default': None},
{'name': 'parent', 'type': 'hdmf.build.builders.Builder', 'doc': 'the parent builder of this Builder',
'default': None},
{'name': 'source', 'type': str,
'doc': 'the source of the data in this builder e.g. file name', 'default': None})
def __init__(self, **kwargs):
Expand Down Expand Up @@ -79,7 +80,8 @@ class BaseBuilder(Builder, metaclass=ABCMeta):
@docval({'name': 'name', 'type': str, 'doc': 'The name of the builder.'},
{'name': 'attributes', 'type': dict, 'doc': 'A dictionary of attributes to create in this builder.',
'default': dict()},
{'name': 'parent', 'type': 'GroupBuilder', 'doc': 'The parent builder of this builder.', 'default': None},
{'name': 'parent', 'type': 'hdmf.build.builders.GroupBuilder', 'doc': 'The parent builder of this builder.',
'default': None},
{'name': 'source', 'type': str,
'doc': 'The source of the data represented in this builder', 'default': None})
def __init__(self, **kwargs):
Expand Down Expand Up @@ -134,7 +136,8 @@ class GroupBuilder(BaseBuilder):
'doc': ('A dictionary or list of links to add to this group. If a dict is provided, only the '
'values are used.'),
'default': dict()},
{'name': 'parent', 'type': 'GroupBuilder', 'doc': 'The parent builder of this builder.', 'default': None},
{'name': 'parent', 'type': 'hdmf.build.builders.GroupBuilder', 'doc': 'The parent builder of this builder.',
'default': None},
{'name': 'source', 'type': str,
'doc': 'The source of the data represented in this builder.', 'default': None})
def __init__(self, **kwargs):
Expand Down Expand Up @@ -213,19 +216,22 @@ def __check_obj_type(self, name, obj_type):
raise ValueError("'%s' already exists in %s.%s, cannot set in %s."
% (name, self.name, self.obj_type[name], obj_type))

@docval({'name': 'builder', 'type': 'GroupBuilder', 'doc': 'The GroupBuilder to add to this group.'})
@docval({'name': 'builder', 'type': 'hdmf.build.builders.GroupBuilder',
'doc': 'The GroupBuilder to add to this group.'})
def set_group(self, **kwargs):
"""Add a subgroup to this group."""
builder = getargs('builder', kwargs)
self.__set_builder(builder, GroupBuilder.__group)

@docval({'name': 'builder', 'type': 'DatasetBuilder', 'doc': 'The DatasetBuilder to add to this group.'})
@docval({'name': 'builder', 'type': 'hdmf.build.builders.DatasetBuilder',
'doc': 'The DatasetBuilder to add to this group.'})
def set_dataset(self, **kwargs):
"""Add a dataset to this group."""
builder = getargs('builder', kwargs)
self.__set_builder(builder, GroupBuilder.__dataset)

@docval({'name': 'builder', 'type': 'LinkBuilder', 'doc': 'The LinkBuilder to add to this group.'})
@docval({'name': 'builder', 'type': 'hdmf.build.builders.LinkBuilder',
'doc': 'The LinkBuilder to add to this group.'})
def set_link(self, **kwargs):
"""Add a link to this group."""
builder = getargs('builder', kwargs)
Expand Down
2 changes: 1 addition & 1 deletion src/hdmf/build/classgenerator.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ def register_generator(self, **kwargs):
{'name': 'spec', 'type': BaseStorageSpec, 'doc': ''},
{'name': 'parent_cls', 'type': type, 'doc': ''},
{'name': 'attr_names', 'type': dict, 'doc': ''},
{'name': 'type_map', 'type': 'TypeMap', 'doc': ''},
{'name': 'type_map', 'type': 'hdmf.build.manager.TypeMap', 'doc': ''},
returns='the class for the given namespace and data_type', rtype=type)
def generate_class(self, **kwargs):
"""Get the container class from data type specification.
Expand Down
4 changes: 2 additions & 2 deletions src/hdmf/container.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ class HERDManager:
This class manages whether to set/attach an instance of HERD to the subclass.
"""

@docval({'name': 'herd', 'type': 'HERD',
@docval({'name': 'herd', 'type': 'hdmf.common.resources.HERD',
'doc': 'The external resources to be used for the container.'},)
def link_resources(self, **kwargs):
"""
Expand Down Expand Up @@ -389,7 +389,7 @@ def set_modified(self, **kwargs):
def children(self):
return tuple(self.__children)

@docval({'name': 'child', 'type': 'Container',
@docval({'name': 'child', 'type': 'hdmf.container.Container',
'doc': 'the child Container for this Container', 'default': None})
def add_child(self, **kwargs):
warn(DeprecationWarning('add_child is deprecated. Set the parent attribute instead.'))
Expand Down
14 changes: 8 additions & 6 deletions src/hdmf/spec/spec.py
Original file line number Diff line number Diff line change
Expand Up @@ -106,7 +106,7 @@ class Spec(ConstructableDict):
@docval({'name': 'doc', 'type': str, 'doc': 'a description about what this specification represents'},
{'name': 'name', 'type': str, 'doc': 'The name of this attribute', 'default': None},
{'name': 'required', 'type': bool, 'doc': 'whether or not this attribute is required', 'default': True},
{'name': 'parent', 'type': 'Spec', 'doc': 'the parent of this spec', 'default': None})
{'name': 'parent', 'type': 'hdmf.spec.spec.Spec', 'doc': 'the parent of this spec', 'default': None})
def __init__(self, **kwargs):
name, doc, required, parent = getargs('name', 'doc', 'required', 'parent', kwargs)
super().__init__()
Expand Down Expand Up @@ -210,7 +210,7 @@ def is_region(self):
{'name': 'dims', 'type': (list, tuple), 'doc': 'the dimensions of this dataset', 'default': None},
{'name': 'required', 'type': bool,
'doc': 'whether or not this attribute is required. ignored when "value" is specified', 'default': True},
{'name': 'parent', 'type': 'BaseStorageSpec', 'doc': 'the parent of this spec', 'default': None},
{'name': 'parent', 'type': 'hdmf.spec.spec.BaseStorageSpec', 'doc': 'the parent of this spec', 'default': None},
{'name': 'value', 'type': None, 'doc': 'a constant value for this attribute', 'default': None},
{'name': 'default_value', 'type': None, 'doc': 'a default value for this attribute', 'default': None}
]
Expand Down Expand Up @@ -372,7 +372,8 @@ def required(self):
''' Whether or not the this spec represents a required field '''
return self.quantity not in (ZERO_OR_ONE, ZERO_OR_MANY)

@docval({'name': 'inc_spec', 'type': 'BaseStorageSpec', 'doc': 'the data type this specification represents'})
@docval({'name': 'inc_spec', 'type': 'hdmf.spec.spec.BaseStorageSpec',
'doc': 'the data type this specification represents'})
def resolve_spec(self, **kwargs):
"""Add attributes from the inc_spec to this spec and track which attributes are new and overridden."""
inc_spec = getargs('inc_spec', kwargs)
Expand Down Expand Up @@ -713,7 +714,8 @@ def __is_sub_dtype(cls, orig, new):
return False
return new_prec >= orig_prec

@docval({'name': 'inc_spec', 'type': 'DatasetSpec', 'doc': 'the data type this specification represents'})
@docval({'name': 'inc_spec', 'type': 'hdmf.spec.spec.DatasetSpec',
'doc': 'the data type this specification represents'})
def resolve_spec(self, **kwargs):
inc_spec = getargs('inc_spec', kwargs)
if isinstance(self.dtype, list):
Expand Down Expand Up @@ -1298,7 +1300,7 @@ def add_dataset(self, **kwargs):
self.set_dataset(spec)
return spec

@docval({'name': 'spec', 'type': 'DatasetSpec', 'doc': 'the specification for the dataset'})
@docval({'name': 'spec', 'type': 'hdmf.spec.spec.DatasetSpec', 'doc': 'the specification for the dataset'})
def set_dataset(self, **kwargs):
''' Add the given specification for a dataset to this group specification '''
spec = getargs('spec', kwargs)
Expand Down Expand Up @@ -1331,7 +1333,7 @@ def add_link(self, **kwargs):
self.set_link(spec)
return spec

@docval({'name': 'spec', 'type': 'LinkSpec', 'doc': 'the specification for the object to link to'})
@docval({'name': 'spec', 'type': 'hdmf.spec.spec.LinkSpec', 'doc': 'the specification for the object to link to'})
def set_link(self, **kwargs):
''' Add a given specification for a link to this group specification '''
spec = getargs('spec', kwargs)
Expand Down
11 changes: 10 additions & 1 deletion src/hdmf/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -96,7 +96,11 @@ def check_type(value, argtype, allow_none=False):
return __is_float(value)
elif argtype == 'bool':
return __is_bool(value)
return argtype in [cls.__name__ for cls in value.__class__.__mro__]
cls_names = []
for cls in value.__class__.__mro__:
cls_names.append(f"{cls.__module__}.{cls.__qualname__}")
cls_names.append(cls.__name__)
return argtype in cls_names
elif isinstance(argtype, type):
if argtype is int:
return __is_int(value)
Expand Down Expand Up @@ -706,6 +710,11 @@ def to_str(argtype):
return ":py:class:`~{module}.{name}`".format(name=name, module=module.split('.')[0])
else:
return ":py:class:`~{module}.{name}`".format(name=name, module=module)
elif isinstance(argtype, str):
if "." in argtype: # type is (probably) a fully-qualified class name
return f":py:class:`~{argtype}`"
else: # type is locally resolved class name. just format as code
return f"``{argtype}``"
return argtype

def __sphinx_arg(arg):
Expand Down
1 change: 1 addition & 0 deletions tests/unit/test_io_hdf5_streaming.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ class TestRos3(TestCase):
def setUp(self):
# Skip ROS3 tests if internet is not available or the ROS3 driver is not installed
try:
# this is a 174 KB file
urllib.request.urlopen("https://dandiarchive.s3.amazonaws.com/ros3test.nwb", timeout=1)
except urllib.request.URLError:
self.skipTest("Internet access to DANDI failed. Skipping all Ros3 streaming tests.")
Expand Down
50 changes: 49 additions & 1 deletion tests/unit/utils_test/test_docval.py
Original file line number Diff line number Diff line change
Expand Up @@ -835,7 +835,7 @@ def method(self, **kwargs):
return []

doc = ('method(arg1)\n\n\n\nArgs:\n arg1 (:py:class:`~int`): an arg\n\nReturns:\n '
':py:class:`~list` or :py:class:`~list`, :py:class:`~bool` or :py:class:`~list`, Test: output')
':py:class:`~list` or :py:class:`~list`, :py:class:`~bool` or :py:class:`~list`, ``Test``: output')
self.assertEqual(method.__doc__, doc)


Expand Down Expand Up @@ -1128,3 +1128,51 @@ class Dummy2:
pass

self.assertTupleEqual(get_docval_macro('dummy'), (Dummy2, ))


class TestStringType(TestCase):

class Dummy1:
pass

class Dummy2:
pass

def test_check_type(self):
@docval(
{
"name": "arg1",
"type": (int, np.ndarray, "Dummy1", "tests.unit.utils_test.test_docval.TestStringType.Dummy2"),
"doc": "doc"
},
is_method=False,
)
def myfunc(**kwargs):
return kwargs["arg1"]

dummy1 = TestStringType.Dummy1()
assert dummy1 is myfunc(dummy1)

dummy2 = TestStringType.Dummy2()
assert dummy2 is myfunc(dummy2)

def test_docstring(self):
@docval(
{
"name": "arg1",
"type": (int, np.ndarray, "Dummy1", "tests.unit.utils_test.test_docval.TestStringType.Dummy2"),
"doc": "doc"
},
is_method=False,
)
def myfunc(**kwargs):
return kwargs["arg1"]

expected = """myfunc(arg1)
Args:
arg1 (:py:class:`~int` or :py:class:`~numpy.ndarray` or ``Dummy1`` or :py:class:`~tests.unit.utils_test.test_docval.TestStringType.Dummy2`): doc
""" # noqa: E501
assert myfunc.__doc__ == expected

0 comments on commit d5d6383

Please sign in to comment.