Skip to content

Commit

Permalink
allow dtype-like arguments to from_array_props, and disallow auto as …
Browse files Browse the repository at this point in the history
…an array order (#43)
  • Loading branch information
d-v-b authored Aug 30, 2024
1 parent 5d90ef1 commit 06dc31c
Show file tree
Hide file tree
Showing 5 changed files with 32 additions and 49 deletions.
4 changes: 2 additions & 2 deletions src/pydantic_ome_ngff/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
if TYPE_CHECKING:
from collections.abc import Hashable, Iterable
from typing import Any

from zarr.storage import BaseStore


Expand Down Expand Up @@ -49,5 +50,4 @@ def get_path(store: BaseStore) -> str:
if hasattr(store, "path"):
return store.path

else:
return ""
return ""
10 changes: 6 additions & 4 deletions src/pydantic_ome_ngff/v04/multiscale.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
from typing_extensions import Literal, deprecated

if TYPE_CHECKING:
import numpy.typing as npt
from numcodecs.abc import Codec
from typing_extensions import Self

Expand Down Expand Up @@ -439,7 +440,7 @@ def from_arrays(
@classmethod
def from_array_props(
cls,
dtype: np.dtype[Any],
dtype: npt.DTypeLike,
shapes: Sequence[Sequence[int]],
paths: Sequence[str],
axes: Sequence[Axis],
Expand All @@ -453,7 +454,7 @@ def from_array_props(
| Literal["auto"] = "auto",
compressor: Codec = DEFAULT_COMPRESSOR,
fill_value: Any = 0,
order: Literal["C", "F", "auto"] = "auto",
order: Literal["C", "F"] = "C",
) -> Self:
"""
Create a `MultiscaleGroup` from a dtype and a sequence of shapes.
Expand Down Expand Up @@ -493,10 +494,12 @@ def from_array_props(
The memory layout used for chunks of Zarr arrays. The default is "C".
"""

dtype_normalized = np.dtype(dtype)

chunks_normalized = normalize_chunks(
chunks,
shapes=tuple(tuple(s) for s in shapes),
typesizes=tuple(dtype.itemsize for s in shapes),
typesizes=tuple(dtype_normalized.itemsize for s in shapes),
)

members_flat = {
Expand Down Expand Up @@ -528,7 +531,6 @@ def from_array_props(
members=GroupSpec.from_flat(members_flat).members,
attributes=MultiscaleGroupAttrs(multiscales=(multimeta,)),
)
return cls()

@model_validator(mode="after")
def check_arrays_exist(self) -> MultiscaleGroup:
Expand Down
4 changes: 2 additions & 2 deletions tests/test_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
import numpy as np

if TYPE_CHECKING:
from typing import Any, List
from typing import Any

import pytest

Expand All @@ -15,7 +15,7 @@
@pytest.mark.parametrize(
"data", [[0], [0, 1, 1, 1, 2], ["a", "a", "b", "b", "c", "c", "d"]]
)
def test_duplicates(data: List[Any]) -> None:
def test_duplicates(data: list[Any]) -> None:
dupes = duplicates(data)
for key, value in dupes.items():
assert data.count(key) == value
Expand Down
56 changes: 20 additions & 36 deletions tests/v04/test_multiscales.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
if TYPE_CHECKING:
from typing import Literal

import numpy.typing as npt
from zarr.storage import FSStore, MemoryStore, NestedDirectoryStore

import operator
Expand Down Expand Up @@ -278,14 +279,6 @@ def test_multiscale_group_datasets_exist(
ValidationError,
match="array with that name was found in the hierarchy",
):
bad_items = {
d.path + "x": ArraySpec(
shape=(1, 1, 1, 1),
dtype="uint8",
chunks=(1, 1, 1, 1),
)
for d in default_multiscale.datasets
}
MultiscaleGroup(attributes=group_attrs, members=bad_items)


Expand All @@ -312,15 +305,6 @@ def test_multiscale_group_datasets_rank(default_multiscale: MultiscaleMetadata)
}
match = "Transform dimensionality must match array dimensionality."
with pytest.raises(ValidationError, match=match):
# arrays with varying rank
bad_items = {
d.path: ArraySpec(
shape=(1,) * (idx + 1),
dtype="uint8",
chunks=(1,) * (idx + 1),
)
for idx, d in enumerate(default_multiscale.datasets)
}
MultiscaleGroup(attributes=group_attrs, members=bad_items)

# arrays with rank that doesn't match the transform
Expand Down Expand Up @@ -434,27 +418,30 @@ def test_from_arrays(

@pytest.mark.parametrize("name", [None, "foo"])
@pytest.mark.parametrize("type", [None, "foo"])
@pytest.mark.parametrize("dtype", ["uint8", np.uint8])
@pytest.mark.parametrize("path_pattern", ["{0}", "s{0}", "foo/{0}"])
@pytest.mark.parametrize("metadata", [None, {"foo": 10}])
@pytest.mark.parametrize("ndim", [2, 3, 4, 5])
@pytest.mark.parametrize("chunks", ["auto", "tuple", "tuple-of-tuple"])
@pytest.mark.parametrize("order", ["C", "F"])
def test_from_array_props(
name: str | None,
dtype: npt.DTypeLike,
type: str | None,
path_pattern: str,
metadata: dict[str, int] | None,
ndim: int,
chunks: Literal["auto", "tuple", "tuple-of-tuple"],
order: Literal["auto", "C", "F"],
order: Literal["C", "F"],
) -> None:
arrays = tuple(np.arange(x**ndim).reshape((x,) * ndim) for x in [3, 2, 1])
paths = tuple(path_pattern.format(idx) for idx in range(len(arrays)))
scales = tuple((2**idx,) * ndim for idx in range(len(arrays)))
shapes = tuple((x,) * ndim for x in [3, 2, 1])
dtype_normalized = np.dtype(dtype)
paths = tuple(path_pattern.format(idx) for idx in range(len(shapes)))
scales = tuple((2**idx,) * ndim for idx in range(len(shapes)))
translations = tuple(
(t,) * ndim
for t in accumulate(
[(2 ** (idx - 1)) for idx in range(len(arrays))], operator.add
[(2 ** (idx - 1)) for idx in range(len(shapes))], operator.add
)
)

Expand All @@ -478,24 +465,21 @@ def test_from_array_props(
chunks_arg: tuple[tuple[int, ...], ...] | tuple[int, ...] | Literal["auto"]
if chunks == "auto":
chunks_arg = chunks
chunks_expected = (
guess_chunks(arrays[0].shape, arrays[0].dtype.itemsize),
) * len(arrays)
chunks_expected = (guess_chunks(shapes[0], dtype_normalized.itemsize),) * len(
shapes
)
elif chunks == "tuple":
chunks_arg = (2,) * ndim
chunks_expected = (chunks_arg,) * len(arrays)
chunks_expected = (chunks_arg,) * len(shapes)
elif chunks == "tuple-of-tuple":
chunks_arg = tuple((idx,) * ndim for idx in range(1, len(arrays) + 1))
chunks_arg = tuple((idx,) * ndim for idx in range(1, len(shapes) + 1))
chunks_expected = chunks_arg

if order == "auto":
order_expected = "C"
else:
order_expected = order
order_expected = order

group = MultiscaleGroup.from_array_props(
dtype=arrays[0].dtype,
shapes=tuple(a.shape for a in arrays),
dtype=dtype,
shapes=shapes,
paths=paths,
axes=axes,
scales=scales,
Expand All @@ -514,11 +498,11 @@ def test_from_array_props(
assert group.attributes.multiscales[0].metadata == metadata
assert group.attributes.multiscales[0].coordinateTransformations is None
assert group.attributes.multiscales[0].axes == tuple(axes)
for idx, array in enumerate(arrays):
for idx, shape in enumerate(shapes):
array_model: ArraySpec = group_flat["/" + paths[idx]]
assert array_model.order == order_expected
assert array.shape == array_model.shape
assert array.dtype == array_model.dtype
assert shape == array_model.shape
assert dtype_normalized == array_model.dtype
assert chunks_expected[idx] == array_model.chunks
assert group.attributes.multiscales[0].datasets[
idx
Expand Down
7 changes: 2 additions & 5 deletions tests/v04/test_transform.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,4 @@
from __future__ import annotations

from typing import Tuple, Type

import pytest

from pydantic_ome_ngff.v04.transform import (
Expand All @@ -24,7 +21,7 @@
),
)
def test_scale_translation(
scale: Tuple[int, ...], translation: Tuple[int, ...]
scale: tuple[int, ...], translation: tuple[int, ...]
) -> None:
if len(scale) == len(translation):
result = scale_translation(scale=scale, translation=translation)
Expand Down Expand Up @@ -70,7 +67,7 @@ def test_ensure_dimensionality(
@pytest.mark.parametrize("num_dims", ((1, 3, 5)))
@pytest.mark.parametrize("transform", [VectorTranslation, VectorScale])
def test_ndim(
num_dims: int, transform: Type[VectorTranslation] | Type[VectorScale]
num_dims: int, transform: type[VectorTranslation] | type[VectorScale]
) -> None:
if transform == VectorScale:
params = {"scale": (1,) * num_dims}
Expand Down

0 comments on commit 06dc31c

Please sign in to comment.