From e670d536ea08860e7f83e6258cee298047b2757d Mon Sep 17 00:00:00 2001 From: rly Date: Sat, 20 Jan 2024 15:05:10 -0800 Subject: [PATCH 1/4] Update docval to accept fully qualified class names --- src/hdmf/utils.py | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/src/hdmf/utils.py b/src/hdmf/utils.py index 12acebbc8..57a4bb465 100644 --- a/src/hdmf/utils.py +++ b/src/hdmf/utils.py @@ -96,7 +96,11 @@ def check_type(value, argtype, allow_none=False): return __is_float(value) elif argtype == 'bool': return __is_bool(value) - return argtype in [cls.__name__ for cls in value.__class__.__mro__] + cls_names = [] + for cls in value.__class__.__mro__: + cls_names.append(f"{cls.__module__}.{cls.__qualname__}") + cls_names.append(cls.__name__) + return argtype in cls_names elif isinstance(argtype, type): if argtype is int: return __is_int(value) @@ -706,6 +710,11 @@ def to_str(argtype): return ":py:class:`~{module}.{name}`".format(name=name, module=module.split('.')[0]) else: return ":py:class:`~{module}.{name}`".format(name=name, module=module) + elif isinstance(argtype, str): + if "." in argtype: # type is (probably) a fully-qualified class name + return f":py:class:`~{argtype}`" + else: # type is locally resolved class name. just format as code + return f"``{argtype}``" return argtype def __sphinx_arg(arg): From eea2adb6733a6982b0f0d88e79a8e6961801c6ce Mon Sep 17 00:00:00 2001 From: rly Date: Sat, 20 Jan 2024 15:05:54 -0800 Subject: [PATCH 2/4] Use fully qualified class names in docval --- src/hdmf/backends/hdf5/h5_utils.py | 6 ++-- src/hdmf/backends/hdf5/h5tools.py | 3 +- src/hdmf/backends/io.py | 3 +- src/hdmf/build/builders.py | 18 ++++++---- src/hdmf/build/classgenerator.py | 2 +- src/hdmf/container.py | 2 +- src/hdmf/spec/spec.py | 14 ++++---- tests/unit/test_io_hdf5_streaming.py | 1 + tests/unit/utils_test/test_docval.py | 50 +++++++++++++++++++++++++++- 9 files changed, 80 insertions(+), 19 deletions(-) diff --git a/src/hdmf/backends/hdf5/h5_utils.py b/src/hdmf/backends/hdf5/h5_utils.py index be3368f2b..8654e2b4b 100644 --- a/src/hdmf/backends/hdf5/h5_utils.py +++ b/src/hdmf/backends/hdf5/h5_utils.py @@ -86,7 +86,8 @@ def append(self, dataset, data): class H5Dataset(HDMFDataset): @docval({'name': 'dataset', 'type': (Dataset, Array), 'doc': 'the HDF5 file lazily evaluate'}, - {'name': 'io', 'type': 'HDF5IO', 'doc': 'the IO object that was used to read the underlying dataset'}) + {'name': 'io', 'type': 'hdmf.backends.hdf5.h5tools.HDF5IO', + 'doc': 'the IO object that was used to read the underlying dataset'}) def __init__(self, **kwargs): self.__io = popargs('io', kwargs) super().__init__(**kwargs) @@ -175,7 +176,8 @@ def get_object(self, h5obj): class AbstractH5TableDataset(DatasetOfReferences): @docval({'name': 'dataset', 'type': (Dataset, Array), 'doc': 'the HDF5 file lazily evaluate'}, - {'name': 'io', 'type': 'HDF5IO', 'doc': 'the IO object that was used to read the underlying dataset'}, + {'name': 'io', 'type': 'hdmf.backends.hdf5.h5tools.HDF5IO', + 'doc': 'the IO object that was used to read the underlying dataset'}, {'name': 'types', 'type': (list, tuple), 'doc': 'the IO object that was used to read the underlying dataset'}) def __init__(self, **kwargs): diff --git a/src/hdmf/backends/hdf5/h5tools.py b/src/hdmf/backends/hdf5/h5tools.py index 3cf2e715b..4c8eea983 100644 --- a/src/hdmf/backends/hdf5/h5tools.py +++ b/src/hdmf/backends/hdf5/h5tools.py @@ -399,7 +399,8 @@ def __cache_spec(self): ns_builder.export(self.__ns_spec_path, writer=writer) _export_args = ( - {'name': 'src_io', 'type': 'HDMFIO', 'doc': 'the HDMFIO object for reading the data to export'}, + {'name': 'src_io', 'type': 'hdmf.backends.io.HDMFIO', + 'doc': 'the HDMFIO object for reading the data to export'}, {'name': 'container', 'type': Container, 'doc': ('the Container object to export. If None, then the entire contents of the HDMFIO object will be ' 'exported'), diff --git a/src/hdmf/backends/io.py b/src/hdmf/backends/io.py index 4cd68e078..6ec6e6e65 100644 --- a/src/hdmf/backends/io.py +++ b/src/hdmf/backends/io.py @@ -98,7 +98,8 @@ def write(self, **kwargs): f_builder = self.__manager.build(container, source=self.__source, root=True) self.write_builder(f_builder, **kwargs) - @docval({'name': 'src_io', 'type': 'HDMFIO', 'doc': 'the HDMFIO object for reading the data to export'}, + @docval({'name': 'src_io', 'type': 'hdmf.backends.io.HDMFIO', + 'doc': 'the HDMFIO object for reading the data to export'}, {'name': 'container', 'type': Container, 'doc': ('the Container object to export. If None, then the entire contents of the HDMFIO object will be ' 'exported'), diff --git a/src/hdmf/build/builders.py b/src/hdmf/build/builders.py index 05a71f80c..73c683bbd 100644 --- a/src/hdmf/build/builders.py +++ b/src/hdmf/build/builders.py @@ -14,7 +14,8 @@ class Builder(dict, metaclass=ABCMeta): @docval({'name': 'name', 'type': str, 'doc': 'the name of the group'}, - {'name': 'parent', 'type': 'Builder', 'doc': 'the parent builder of this Builder', 'default': None}, + {'name': 'parent', 'type': 'hdmf.build.builders.Builder', 'doc': 'the parent builder of this Builder', + 'default': None}, {'name': 'source', 'type': str, 'doc': 'the source of the data in this builder e.g. file name', 'default': None}) def __init__(self, **kwargs): @@ -79,7 +80,8 @@ class BaseBuilder(Builder, metaclass=ABCMeta): @docval({'name': 'name', 'type': str, 'doc': 'The name of the builder.'}, {'name': 'attributes', 'type': dict, 'doc': 'A dictionary of attributes to create in this builder.', 'default': dict()}, - {'name': 'parent', 'type': 'GroupBuilder', 'doc': 'The parent builder of this builder.', 'default': None}, + {'name': 'parent', 'type': 'hdmf.build.builders.GroupBuilder', 'doc': 'The parent builder of this builder.', + 'default': None}, {'name': 'source', 'type': str, 'doc': 'The source of the data represented in this builder', 'default': None}) def __init__(self, **kwargs): @@ -134,7 +136,8 @@ class GroupBuilder(BaseBuilder): 'doc': ('A dictionary or list of links to add to this group. If a dict is provided, only the ' 'values are used.'), 'default': dict()}, - {'name': 'parent', 'type': 'GroupBuilder', 'doc': 'The parent builder of this builder.', 'default': None}, + {'name': 'parent', 'type': 'hdmf.build.builders.GroupBuilder', 'doc': 'The parent builder of this builder.', + 'default': None}, {'name': 'source', 'type': str, 'doc': 'The source of the data represented in this builder.', 'default': None}) def __init__(self, **kwargs): @@ -213,19 +216,22 @@ def __check_obj_type(self, name, obj_type): raise ValueError("'%s' already exists in %s.%s, cannot set in %s." % (name, self.name, self.obj_type[name], obj_type)) - @docval({'name': 'builder', 'type': 'GroupBuilder', 'doc': 'The GroupBuilder to add to this group.'}) + @docval({'name': 'builder', 'type': 'hdmf.build.builders.GroupBuilder', + 'doc': 'The GroupBuilder to add to this group.'}) def set_group(self, **kwargs): """Add a subgroup to this group.""" builder = getargs('builder', kwargs) self.__set_builder(builder, GroupBuilder.__group) - @docval({'name': 'builder', 'type': 'DatasetBuilder', 'doc': 'The DatasetBuilder to add to this group.'}) + @docval({'name': 'builder', 'type': 'hdmf.build.builders.DatasetBuilder', + 'doc': 'The DatasetBuilder to add to this group.'}) def set_dataset(self, **kwargs): """Add a dataset to this group.""" builder = getargs('builder', kwargs) self.__set_builder(builder, GroupBuilder.__dataset) - @docval({'name': 'builder', 'type': 'LinkBuilder', 'doc': 'The LinkBuilder to add to this group.'}) + @docval({'name': 'builder', 'type': 'hdmf.build.builders.LinkBuilder', + 'doc': 'The LinkBuilder to add to this group.'}) def set_link(self, **kwargs): """Add a link to this group.""" builder = getargs('builder', kwargs) diff --git a/src/hdmf/build/classgenerator.py b/src/hdmf/build/classgenerator.py index bdfbbc7da..d2e7d4fc0 100644 --- a/src/hdmf/build/classgenerator.py +++ b/src/hdmf/build/classgenerator.py @@ -35,7 +35,7 @@ def register_generator(self, **kwargs): {'name': 'spec', 'type': BaseStorageSpec, 'doc': ''}, {'name': 'parent_cls', 'type': type, 'doc': ''}, {'name': 'attr_names', 'type': dict, 'doc': ''}, - {'name': 'type_map', 'type': 'TypeMap', 'doc': ''}, + {'name': 'type_map', 'type': 'hdmf.build.manager.TypeMap', 'doc': ''}, returns='the class for the given namespace and data_type', rtype=type) def generate_class(self, **kwargs): """Get the container class from data type specification. diff --git a/src/hdmf/container.py b/src/hdmf/container.py index 229e20083..2d4cf9ba4 100644 --- a/src/hdmf/container.py +++ b/src/hdmf/container.py @@ -389,7 +389,7 @@ def set_modified(self, **kwargs): def children(self): return tuple(self.__children) - @docval({'name': 'child', 'type': 'Container', + @docval({'name': 'child', 'type': 'hdmf.container.Container', 'doc': 'the child Container for this Container', 'default': None}) def add_child(self, **kwargs): warn(DeprecationWarning('add_child is deprecated. Set the parent attribute instead.')) diff --git a/src/hdmf/spec/spec.py b/src/hdmf/spec/spec.py index 9a9d876c3..585fc6494 100644 --- a/src/hdmf/spec/spec.py +++ b/src/hdmf/spec/spec.py @@ -106,7 +106,7 @@ class Spec(ConstructableDict): @docval({'name': 'doc', 'type': str, 'doc': 'a description about what this specification represents'}, {'name': 'name', 'type': str, 'doc': 'The name of this attribute', 'default': None}, {'name': 'required', 'type': bool, 'doc': 'whether or not this attribute is required', 'default': True}, - {'name': 'parent', 'type': 'Spec', 'doc': 'the parent of this spec', 'default': None}) + {'name': 'parent', 'type': 'hdmf.spec.spec.Spec', 'doc': 'the parent of this spec', 'default': None}) def __init__(self, **kwargs): name, doc, required, parent = getargs('name', 'doc', 'required', 'parent', kwargs) super().__init__() @@ -210,7 +210,7 @@ def is_region(self): {'name': 'dims', 'type': (list, tuple), 'doc': 'the dimensions of this dataset', 'default': None}, {'name': 'required', 'type': bool, 'doc': 'whether or not this attribute is required. ignored when "value" is specified', 'default': True}, - {'name': 'parent', 'type': 'BaseStorageSpec', 'doc': 'the parent of this spec', 'default': None}, + {'name': 'parent', 'type': 'hdmf.spec.spec.BaseStorageSpec', 'doc': 'the parent of this spec', 'default': None}, {'name': 'value', 'type': None, 'doc': 'a constant value for this attribute', 'default': None}, {'name': 'default_value', 'type': None, 'doc': 'a default value for this attribute', 'default': None} ] @@ -372,7 +372,8 @@ def required(self): ''' Whether or not the this spec represents a required field ''' return self.quantity not in (ZERO_OR_ONE, ZERO_OR_MANY) - @docval({'name': 'inc_spec', 'type': 'BaseStorageSpec', 'doc': 'the data type this specification represents'}) + @docval({'name': 'inc_spec', 'type': 'hdmf.spec.spec.BaseStorageSpec', + 'doc': 'the data type this specification represents'}) def resolve_spec(self, **kwargs): """Add attributes from the inc_spec to this spec and track which attributes are new and overridden.""" inc_spec = getargs('inc_spec', kwargs) @@ -713,7 +714,8 @@ def __is_sub_dtype(cls, orig, new): return False return new_prec >= orig_prec - @docval({'name': 'inc_spec', 'type': 'DatasetSpec', 'doc': 'the data type this specification represents'}) + @docval({'name': 'inc_spec', 'type': 'hdmf.spec.spec.DatasetSpec', + 'doc': 'the data type this specification represents'}) def resolve_spec(self, **kwargs): inc_spec = getargs('inc_spec', kwargs) if isinstance(self.dtype, list): @@ -1298,7 +1300,7 @@ def add_dataset(self, **kwargs): self.set_dataset(spec) return spec - @docval({'name': 'spec', 'type': 'DatasetSpec', 'doc': 'the specification for the dataset'}) + @docval({'name': 'spec', 'type': 'hdmf.spec.spec.DatasetSpec', 'doc': 'the specification for the dataset'}) def set_dataset(self, **kwargs): ''' Add the given specification for a dataset to this group specification ''' spec = getargs('spec', kwargs) @@ -1331,7 +1333,7 @@ def add_link(self, **kwargs): self.set_link(spec) return spec - @docval({'name': 'spec', 'type': 'LinkSpec', 'doc': 'the specification for the object to link to'}) + @docval({'name': 'spec', 'type': 'hdmf.spec.spec.LinkSpec', 'doc': 'the specification for the object to link to'}) def set_link(self, **kwargs): ''' Add a given specification for a link to this group specification ''' spec = getargs('spec', kwargs) diff --git a/tests/unit/test_io_hdf5_streaming.py b/tests/unit/test_io_hdf5_streaming.py index 9729778c7..d1c9d1ab3 100644 --- a/tests/unit/test_io_hdf5_streaming.py +++ b/tests/unit/test_io_hdf5_streaming.py @@ -19,6 +19,7 @@ class TestRos3(TestCase): def setUp(self): # Skip ROS3 tests if internet is not available or the ROS3 driver is not installed try: + # this is a 174 KB file urllib.request.urlopen("https://dandiarchive.s3.amazonaws.com/ros3test.nwb", timeout=1) except urllib.request.URLError: self.skipTest("Internet access to DANDI failed. Skipping all Ros3 streaming tests.") diff --git a/tests/unit/utils_test/test_docval.py b/tests/unit/utils_test/test_docval.py index 50c487182..154a5c4b0 100644 --- a/tests/unit/utils_test/test_docval.py +++ b/tests/unit/utils_test/test_docval.py @@ -835,7 +835,7 @@ def method(self, **kwargs): return [] doc = ('method(arg1)\n\n\n\nArgs:\n arg1 (:py:class:`~int`): an arg\n\nReturns:\n ' - ':py:class:`~list` or :py:class:`~list`, :py:class:`~bool` or :py:class:`~list`, Test: output') + ':py:class:`~list` or :py:class:`~list`, :py:class:`~bool` or :py:class:`~list`, ``Test``: output') self.assertEqual(method.__doc__, doc) @@ -1128,3 +1128,51 @@ class Dummy2: pass self.assertTupleEqual(get_docval_macro('dummy'), (Dummy2, )) + + +class TestStringType(TestCase): + + class Dummy1: + pass + + class Dummy2: + pass + + def test_check_type(self): + @docval( + { + "name": "arg1", + "type": (int, np.ndarray, "Dummy1", "tests.unit.utils_test.test_docval.TestStringType.Dummy2"), + "doc": "doc" + }, + is_method=False, + ) + def myfunc(**kwargs): + return kwargs["arg1"] + + dummy1 = TestStringType.Dummy1() + assert dummy1 is myfunc(dummy1) + + dummy2 = TestStringType.Dummy2() + assert dummy2 is myfunc(dummy2) + + def test_docstring(self): + @docval( + { + "name": "arg1", + "type": (int, np.ndarray, "Dummy1", "tests.unit.utils_test.test_docval.TestStringType.Dummy2"), + "doc": "doc" + }, + is_method=False, + ) + def myfunc(**kwargs): + return kwargs["arg1"] + + expected = """myfunc(arg1) + + + +Args: + arg1 (:py:class:`~int` or :py:class:`~numpy.ndarray` or ``Dummy1`` or :py:class:`~tests.unit.utils_test.test_docval.TestStringType.Dummy2`): doc +""" # noqa: E501 + assert myfunc.__doc__ == expected From 91f42411b970e6eed5394b5dc9927d419a7ed331 Mon Sep 17 00:00:00 2001 From: Ryan Ly Date: Mon, 22 Jan 2024 18:02:19 -0800 Subject: [PATCH 3/4] Update CHANGELOG.md --- CHANGELOG.md | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index e256efeb0..ccb6bc516 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,11 @@ # HDMF Changelog +## HDMF 3.12.1 (Upcoming) + +### Bug fixes +- Fixed internal links in docstrings and tutorials. @stephprince [#1031](https://github.com/hdmf-dev/hdmf/pull/1031) +- Fixed issue with creating documentation links to classes in docval arguments. @rly [#1036](https://github.com/hdmf-dev/hdmf/pull/1036) + ## HDMF 3.12.0 (January 16, 2024) ### Enhancements From d8f8d5bfa57ab32bda1f88cfc50b6dd01c5d0742 Mon Sep 17 00:00:00 2001 From: rly Date: Tue, 23 Jan 2024 17:17:56 -0800 Subject: [PATCH 4/4] Fix missing HERD docval ref --- src/hdmf/backends/hdf5/h5tools.py | 2 +- src/hdmf/backends/io.py | 2 +- src/hdmf/container.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/src/hdmf/backends/hdf5/h5tools.py b/src/hdmf/backends/hdf5/h5tools.py index 4c8eea983..7a644f0b7 100644 --- a/src/hdmf/backends/hdf5/h5tools.py +++ b/src/hdmf/backends/hdf5/h5tools.py @@ -363,7 +363,7 @@ def copy_file(self, **kwargs): {'name': 'exhaust_dci', 'type': bool, 'doc': 'If True (default), exhaust DataChunkIterators one at a time. If False, exhaust them concurrently.', 'default': True}, - {'name': 'herd', 'type': 'HERD', + {'name': 'herd', 'type': 'hdmf.common.resources.HERD', 'doc': 'A HERD object to populate with references.', 'default': None}) def write(self, **kwargs): diff --git a/src/hdmf/backends/io.py b/src/hdmf/backends/io.py index 6ec6e6e65..35023066f 100644 --- a/src/hdmf/backends/io.py +++ b/src/hdmf/backends/io.py @@ -75,7 +75,7 @@ def read(self, **kwargs): return container @docval({'name': 'container', 'type': Container, 'doc': 'the Container object to write'}, - {'name': 'herd', 'type': 'HERD', + {'name': 'herd', 'type': 'hdmf.common.resources.HERD', 'doc': 'A HERD object to populate with references.', 'default': None}, allow_extra=True) def write(self, **kwargs): diff --git a/src/hdmf/container.py b/src/hdmf/container.py index 2d4cf9ba4..614fbbccd 100644 --- a/src/hdmf/container.py +++ b/src/hdmf/container.py @@ -34,7 +34,7 @@ class HERDManager: This class manages whether to set/attach an instance of HERD to the subclass. """ - @docval({'name': 'herd', 'type': 'HERD', + @docval({'name': 'herd', 'type': 'hdmf.common.resources.HERD', 'doc': 'The external resources to be used for the container.'},) def link_resources(self, **kwargs): """