Skip to content

Commit

Permalink
Merge pull request #20 from pattonw/image_stacks
Browse files Browse the repository at this point in the history
ImageStackVolumes
  • Loading branch information
aschampion authored Mar 1, 2019
2 parents 14602b8 + 7cfac66 commit 0533b79
Show file tree
Hide file tree
Showing 2 changed files with 90 additions and 13 deletions.
57 changes: 44 additions & 13 deletions diluvian/volumes.py
Original file line number Diff line number Diff line change
Expand Up @@ -1132,8 +1132,8 @@ class ImageStackVolume(Volume):
Coordinate Systems
----------
World: physical coordinates, in nanometers, as used by CATMAID
Pixel: pixel coordinates, starts at (0,0,0) and accounts for pixel resolution
Real: Physical coordinates, generally measured in nanometers
World: pixel coordinates, starts at (0,0,0) and accounts for pixel resolution
often (4x4x40) nanometers per pixel
Local: Downsampled pixel space
Expand Down Expand Up @@ -1169,10 +1169,39 @@ def from_catmaid_stack(stack_info, tile_source_parameters):
}[tile_source_parameters['tile_source_type']].format(**tile_source_parameters)
bounds = np.flipud(np.array(stack_info['bounds'], dtype=np.int64))
resolution = np.flipud(np.array(stack_info['resolution']))
translation = np.flipud(np.array(stack_info['translation']))
tile_width = int(tile_source_parameters['tile_width'])
tile_height = int(tile_source_parameters['tile_height'])
return ImageStackVolume(bounds, resolution, tile_width, tile_height, format_url,
missing_z=stack_info['broken_slices'])
return ImageStackVolume(bounds, resolution, translation, tile_width, tile_height,
format_url, missing_z=stack_info.get("broken_slices", None))

def from_toml(filename):
volumes = {}
with open(filename, "rb") as fin:
datasets = toml.load(fin).get("ImageStack", [])
for dataset in datasets:
# stack info
si = [
"bounds",
"resolution",
"translation",
"broken_slices",
]
# tile stack parameters
tsp = [
"source_base_url",
"file_extension",
"tile_width",
"tile_height",
"tile_source_type",
]
volume = ImageStackVolume.from_catmaid_stack(
{si[key]: dataset[key] for key in si},
{tsp[key]: dataset[key] for key in tsp},
)
volumes[dataset["title"]] = volume

return volumes

def __init__(self, bounds, orig_resolution, translation, tile_width, tile_height,
tile_format_url, zoom_level=0, missing_z=None, image_leaf_shape=None):
Expand All @@ -1182,6 +1211,7 @@ def __init__(self, bounds, orig_resolution, translation, tile_width, tile_height
self.tile_width = tile_width
self.tile_height = tile_height
self.tile_format_url = tile_format_url
self.mask_data = None

self.zoom_level = int(zoom_level)
if missing_z is None:
Expand All @@ -1201,16 +1231,16 @@ def __init__(self, bounds, orig_resolution, translation, tile_width, tile_height
self.label_data = None

def local_coord_to_world(self, a):
return self.pixel_coord_to_world(np.matmul(a, self.scale))
return np.multiply(a, self.scale)

def world_coord_to_local(self, a):
return np.floor_divide(self.world_coord_to_pixel(a), self.scale)
return np.floor_divide(a, self.scale)

def world_coord_to_pixel(self, a):
def real_coord_to_world(self, a):
return np.floor_divide(a - self.translation, self.orig_resolution)

def pixel_coord_to_world(self, a):
return np.matmul(a, self.orig_resolution) + self.translation
def world_coord_to_real(self, a):
return np.multiply(a, self.orig_resolution) + self.translation

@property
def resolution(self):
Expand All @@ -1223,6 +1253,7 @@ def downsample(self, resolution):
return ImageStackVolume(
self.orig_bounds,
self.orig_resolution,
self.translation,
self.tile_width,
self.tile_height,
self.tile_format_url,
Expand Down Expand Up @@ -1262,10 +1293,10 @@ def get_subvolume(self, bounds):

def image_populator(self, bounds):
image_subvol = np.zeros(tuple(bounds[1] - bounds[0]), dtype=np.float32)
col_range = map(int, (math.floor(bounds[0][self.DIM.X]/self.tile_width),
math.ceil(bounds[1][self.DIM.X]/self.tile_width)))
row_range = map(int, (math.floor(bounds[0][self.DIM.Y]/self.tile_height),
math.ceil(bounds[1][self.DIM.Y]/self.tile_height)))
col_range = list(map(int, (math.floor(bounds[0][self.DIM.X] / self.tile_width),
math.ceil(bounds[1][self.DIM.X] / self.tile_width))))
row_range = list(map(int, (math.floor(bounds[0][self.DIM.Y] / self.tile_height),
math.ceil(bounds[1][self.DIM.Y] / self.tile_height))))
tile_size = np.array([1, self.tile_height, self.tile_width]).astype(np.int64)
for z in xrange(bounds[0][self.DIM.Z], bounds[1][self.DIM.Z]):
if z in self.missing_z:
Expand Down
46 changes: 46 additions & 0 deletions tests/test_diluvian.py
Original file line number Diff line number Diff line change
Expand Up @@ -135,6 +135,52 @@ def test_volume_transforms():
np.testing.assert_array_equal(dpsv.image, sv.image.reshape((1, 4, 1, 4, 1, 1)).mean(5).mean(3).mean(1))


def test_volume_transforms_image_stacks():
# stack info
si = {
"bounds": [28128, 31840, 4841],
"resolution": [3.8, 3.8, 50],
"tile_width": 512,
"tile_height": 512,
"translation": [0, 0, 0],
}
# tile stack parameters
tsp = {
"source_base_url": "https://neurocean.janelia.org/ssd-tiles-no-cache/0111-8/",
"file_extension": "jpg",
"tile_width": 512,
"tile_height": 512,
"tile_source_type": 4,
}
v = volumes.ImageStackVolume.from_catmaid_stack(si, tsp)
pv = v.partition(
[2, 1, 1], [1, 0, 0]
) # Note axes are flipped after volume initialization
dpv = pv.downsample((50, 15.2, 15.2))

np.testing.assert_array_equal(
dpv.local_coord_to_world(np.array([2, 2, 2])), np.array([2422, 8, 8])
)
np.testing.assert_array_equal(
dpv.world_coord_to_local(np.array([2422, 8, 8])), np.array([2, 2, 2])
)

svb = volumes.SubvolumeBounds(
np.array((2420, 0, 0), dtype=np.int64),
np.array((2421, 4, 4), dtype=np.int64),
)
sv = v.get_subvolume(svb)

dpsvb = volumes.SubvolumeBounds(
np.array((0, 0, 0), dtype=np.int64), np.array((1, 1, 1), dtype=np.int64)
)
dpsv = dpv.get_subvolume(dpsvb)

np.testing.assert_array_equal(
dpsv.image, sv.image.reshape((1, 4, 1, 4, 1, 1)).mean(5).mean(3).mean(1)
)


def test_volume_identity_downsample_returns_self():
resolution = (27, 185, 90)
v = volumes.Volume(resolution, image_data=np.zeros((1, 1, 1)), label_data=np.zeros((1, 1, 1)))
Expand Down

0 comments on commit 0533b79

Please sign in to comment.