diff --git a/CHANGELOG.md b/CHANGELOG.md index 7a5358812..74e048a19 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,9 +1,10 @@ # HDMF Changelog -## HDMF 3.14.2 (???) +## HDMF 3.14.2 (Upcoming) ### Enhancements - Warn when unexpected keys are present in specs. @rly [#1134](https://github.com/hdmf-dev/hdmf/pull/1134) +- Support appending to zarr arrays. @mavaylon1 [#1136](https://github.com/hdmf-dev/hdmf/pull/1136) ### Bug fixes - Fix iterator increment causing an extra +1 added after the end of completion. @CodyCBakerPhD [#1128](https://github.com/hdmf-dev/hdmf/pull/1128) diff --git a/pyproject.toml b/pyproject.toml index 3a0034087..a089113c0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -36,12 +36,12 @@ dependencies = [ "pandas>=1.0.5", "ruamel.yaml>=0.16", "scipy>=1.4", + "zarr >= 2.12.0", "importlib-resources; python_version < '3.9'", # TODO: remove when minimum python version is 3.9 ] dynamic = ["version"] [project.optional-dependencies] -zarr = ["zarr>=2.12.0"] tqdm = ["tqdm>=4.41.0"] termset = ["linkml-runtime>=1.5.5; python_version >= '3.9'", "schemasheets>=0.1.23; python_version >= '3.9'", @@ -117,7 +117,7 @@ omit = [ # force-exclude = "src/hdmf/common/hdmf-common-schema|docs/gallery" [tool.ruff] -select = ["E", "F", "T100", "T201", "T203"] +lint.select = ["E", "F", "T100", "T201", "T203"] exclude = [ ".git", ".tox", @@ -132,11 +132,11 @@ exclude = [ ] line-length = 120 -[tool.ruff.per-file-ignores] +[tool.ruff.lint.per-file-ignores] "docs/gallery/*" = ["E402", "T201"] "src/*/__init__.py" = ["F401"] "setup.py" = ["T201"] "test_gallery.py" = ["T201"] -[tool.ruff.mccabe] +[tool.ruff.lint.mccabe] max-complexity = 17 diff --git a/src/hdmf/backends/hdf5/h5tools.py b/src/hdmf/backends/hdf5/h5tools.py index 0604881bb..8135d75e7 100644 --- a/src/hdmf/backends/hdf5/h5tools.py +++ b/src/hdmf/backends/hdf5/h5tools.py @@ -728,7 +728,7 @@ def __read_dataset(self, h5obj, name=None): def _check_str_dtype(self, h5obj): dtype = h5obj.dtype if dtype.kind == 'O': - if dtype.metadata.get('vlen') == str and H5PY_3: + if dtype.metadata.get('vlen') is str and H5PY_3: return StrDataset(h5obj, None) return h5obj diff --git a/src/hdmf/build/objectmapper.py b/src/hdmf/build/objectmapper.py index fed678d41..52fbfec49 100644 --- a/src/hdmf/build/objectmapper.py +++ b/src/hdmf/build/objectmapper.py @@ -1164,7 +1164,7 @@ def __get_subspec_values(self, builder, spec, manager): if not isinstance(builder, DatasetBuilder): # pragma: no cover raise ValueError("__get_subspec_values - must pass DatasetBuilder with DatasetSpec") if (spec.shape is None and getattr(builder.data, 'shape', None) == (1,) and - type(builder.data[0]) != np.void): + type(builder.data[0]) is not np.void): # if a scalar dataset is expected and a 1-element non-compound dataset is given, then read the dataset builder['data'] = builder.data[0] # use dictionary reference instead of .data to bypass error ret[spec] = self.__check_ref_resolver(builder.data) diff --git a/src/hdmf/data_utils.py b/src/hdmf/data_utils.py index f4ac6541f..71f5bdf6d 100644 --- a/src/hdmf/data_utils.py +++ b/src/hdmf/data_utils.py @@ -5,17 +5,20 @@ from warnings import warn from typing import Tuple from itertools import product, chain +from zarr import Array as ZarrArray import h5py import numpy as np from .utils import docval, getargs, popargs, docval_macro, get_data_shape - def append_data(data, arg): if isinstance(data, (list, DataIO)): data.append(arg) return data + elif isinstance(data, ZarrArray): + data.append([arg], axis=0) + return data elif type(data).__name__ == 'TermSetWrapper': # circular import data.append(arg) return data diff --git a/src/hdmf/testing/testcase.py b/src/hdmf/testing/testcase.py index 798df6fe4..1be4bcecd 100644 --- a/src/hdmf/testing/testcase.py +++ b/src/hdmf/testing/testcase.py @@ -174,7 +174,7 @@ def _assert_array_equal(self, :param message: custom additional message to show when assertions as part of this assert are failing """ array_data_types = tuple([i for i in get_docval_macro('array_data') - if (i != list and i != tuple and i != AbstractDataChunkIterator)]) + if (i is not list and i is not tuple and i is not AbstractDataChunkIterator)]) # We construct array_data_types this way to avoid explicit dependency on h5py, Zarr and other # I/O backends. Only list and tuple do not support [()] slicing, and AbstractDataChunkIterator # should never occur here. The effective value of array_data_types is then: diff --git a/src/hdmf/validate/validator.py b/src/hdmf/validate/validator.py index bdfc15f8f..e39011d9f 100644 --- a/src/hdmf/validate/validator.py +++ b/src/hdmf/validate/validator.py @@ -164,7 +164,7 @@ def get_type(data, builder_dtype=None): # Empty array else: # Empty string array - if data.dtype.metadata["vlen"] == str: + if data.dtype.metadata["vlen"] is str: return "utf", None # Undetermined variable length data type. else: # pragma: no cover diff --git a/tests/unit/utils_test/test_data_utils.py b/tests/unit/utils_test/test_data_utils.py new file mode 100644 index 000000000..2e0df7ba8 --- /dev/null +++ b/tests/unit/utils_test/test_data_utils.py @@ -0,0 +1,14 @@ +from hdmf.data_utils import append_data +from hdmf.testing import TestCase + +import numpy as np +from numpy.testing import assert_array_equal +import zarr + +class TestAppendData(TestCase): + + def test_append_data_zarr(self): + zarr_array = zarr.array([1,2,3]) + new = append_data(zarr_array, 4) + + assert_array_equal(new[:], np.array([1,2,3,4]))