Skip to content

Commit

Permalink
Cleanup
Browse files Browse the repository at this point in the history
  • Loading branch information
rly authored Feb 8, 2025
1 parent eb41dda commit f18b9c4
Show file tree
Hide file tree
Showing 6 changed files with 22 additions and 25 deletions.
2 changes: 1 addition & 1 deletion src/hdmf/backends/hdf5/h5tools.py
Original file line number Diff line number Diff line change
Expand Up @@ -1086,7 +1086,7 @@ def write_link(self, **kwargs):
self.__set_written(builder)
return link_obj

@docval({'name': 'parent', 'type': Group, 'doc': 'the parent HDF5 object'}, # noqa: C901
@docval({'name': 'parent', 'type': Group, 'doc': 'the parent HDF5 object'},
{'name': 'builder', 'type': DatasetBuilder, 'doc': 'the DatasetBuilder to write'},
{'name': 'link_data', 'type': bool,
'doc': 'If not specified otherwise link (True) or copy (False) HDF5 Datasets', 'default': True},
Expand Down
4 changes: 2 additions & 2 deletions src/hdmf/build/objectmapper.py
Original file line number Diff line number Diff line change
Expand Up @@ -184,7 +184,7 @@ def no_convert(cls, obj_type):
"""
cls.__no_convert.add(obj_type)

@classmethod # noqa: C901
@classmethod
def convert_dtype(cls, spec, value, spec_dtype=None): # noqa: C901
"""
Convert values to the specified dtype. For example, if a literal int
Expand Down Expand Up @@ -276,7 +276,7 @@ def __check_convert_numeric(cls, value_type):
np.issubdtype(value_dtype, np.integer)):
raise ValueError("Cannot convert from %s to 'numeric' specification dtype." % value_type)

@classmethod # noqa: C901
@classmethod
def __check_edgecases(cls, spec, value, spec_dtype): # noqa: C901
"""
Check edge cases in converting data to a dtype
Expand Down
4 changes: 2 additions & 2 deletions src/hdmf/common/resources.py
Original file line number Diff line number Diff line change
Expand Up @@ -1000,9 +1000,9 @@ def get_zip_directory(cls, path):
directory = os.path.dirname(os.path.realpath(path))
return directory

@classmethod # noqa: C901
@classmethod
@docval({'name': 'path', 'type': str, 'doc': 'The path to the zip file.'})
def from_zip(cls, **kwargs):
def from_zip(cls, **kwargs): # noqa: C901
"""
Method to read in zipped tsv files to populate HERD.
"""
Expand Down
4 changes: 2 additions & 2 deletions src/hdmf/common/table.py
Original file line number Diff line number Diff line change
Expand Up @@ -304,7 +304,7 @@ def __gather_columns(cls, name, bases, classdict):
except AttributeError: # raises error when "__columns__" is not an attr of item
continue

@docval({'name': 'name', 'type': str, 'doc': 'the name of this table'}, # noqa: C901
@docval({'name': 'name', 'type': str, 'doc': 'the name of this table'},
{'name': 'description', 'type': str, 'doc': 'a description of what is in this table'},
{'name': 'id', 'type': ('array_data', 'data', ElementIdentifiers), 'doc': 'the identifiers for this table',
'default': None},
Expand Down Expand Up @@ -749,7 +749,7 @@ def __eq__(self, other):
return False
return self.to_dataframe().equals(other.to_dataframe())

@docval({'name': 'name', 'type': str, 'doc': 'the name of this VectorData'}, # noqa: C901
@docval({'name': 'name', 'type': str, 'doc': 'the name of this VectorData'},
{'name': 'description', 'type': str, 'doc': 'a description for this column'},
{'name': 'data', 'type': ('array_data', 'data'),
'doc': 'a dataset where the first dimension is a concatenation of multiple vectors', 'default': list()},
Expand Down
29 changes: 13 additions & 16 deletions src/hdmf/spec/spec.py
Original file line number Diff line number Diff line change
Expand Up @@ -1062,7 +1062,7 @@ def is_inherited_spec(self, **kwargs):
return False

@docval({'name': 'spec', 'type': Spec, 'doc': 'the specification to check'})
def is_overridden_spec(self, **kwargs): # noqa: C901
def is_overridden_spec(self, **kwargs):
''' Returns 'True' if specification overrides a specification from the parent type '''
spec = getargs('spec', kwargs)
spec_name = spec.name
Expand All @@ -1086,23 +1086,20 @@ def is_overridden_spec(self, **kwargs): # noqa: C901
return self.is_overridden_dataset(spec_name)
elif spec_name in self.__data_types:
return self.is_overridden_type(spec_name)
elif super().is_overridden_spec(spec): # attribute spec
return True
else:
if super().is_overridden_spec(spec): # check if overridden attribute
return True
parent_name = spec.parent.name
if parent_name is None:
parent_name = spec.parent.data_type
if isinstance(spec.parent, DatasetSpec):
if (parent_name in self.__datasets and self.is_overridden_dataset(parent_name) and
self.__datasets[parent_name].is_overridden_spec(spec)):
return True
else:
parent_name = spec.parent.name
if parent_name is None:
parent_name = spec.parent.data_type
if isinstance(spec.parent, DatasetSpec):
if parent_name in self.__datasets:
if self.is_overridden_dataset(parent_name):
if self.__datasets[parent_name].is_overridden_spec(spec):
return True
else:
if parent_name in self.__groups:
if self.is_overridden_group(parent_name):
if self.__groups[parent_name].is_overridden_spec(spec):
return True
if (parent_name in self.__groups and self.is_overridden_group(parent_name) and
self.__groups[parent_name].is_overridden_spec(spec)):
return True
return False

@docval({'name': 'spec', 'type': (BaseStorageSpec, str), 'doc': 'the specification to check'})
Expand Down
4 changes: 2 additions & 2 deletions src/hdmf/validate/validator.py
Original file line number Diff line number Diff line change
Expand Up @@ -472,9 +472,9 @@ class GroupValidator(BaseStorageValidator):
def __init__(self, **kwargs):
super().__init__(**kwargs)

@docval({"name": "builder", "type": GroupBuilder, "doc": "the builder to validate"}, # noqa: C901
@docval({"name": "builder", "type": GroupBuilder, "doc": "the builder to validate"},
returns='a list of Errors', rtype=list)
def validate(self, **kwargs): # noqa: C901
def validate(self, **kwargs):
builder = getargs('builder', kwargs)
errors = super().validate(builder)
errors.extend(self.__validate_children(builder))
Expand Down

0 comments on commit f18b9c4

Please sign in to comment.