Skip to content

Commit

Permalink
Merge pull request #11 from BrainLesion/hendrik_dev
Browse files Browse the repository at this point in the history
Hendrik dev
  • Loading branch information
neuronflow authored Nov 10, 2023
2 parents dd51f79 + 05bc463 commit ae35c33
Show file tree
Hide file tree
Showing 20 changed files with 1,211 additions and 548 deletions.
15 changes: 9 additions & 6 deletions examples/example_cfos_3d.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
from auxiliary.nifti.io import read_nifti

from panoptica import CCABackend, SemanticSegmentationEvaluator
from panoptica import SemanticPair, Panoptic_Evaluator, ConnectedComponentsInstanceApproximator, CCABackend, NaiveOneToOneMatching

pred_masks = read_nifti(
input_nifti_path="/home/florian/flow/cfos_analysis/data/ablation/2021-11-25_23-50-56_2021-10-25_19-38-31_tr_dice_bce_11/patchvolume_695_2.nii.gz"
Expand All @@ -9,11 +9,14 @@
input_nifti_path="/home/florian/flow/cfos_analysis/data/reference/patchvolume_695_2/patchvolume_695_2_binary.nii.gz",
)

eva = SemanticSegmentationEvaluator(cca_backend=CCABackend.cc3d)
res = eva.evaluate(
reference_mask=ref_masks,
prediction_mask=pred_masks,
sample = SemanticPair(pred_masks, ref_masks)

evaluator = Panoptic_Evaluator(
expected_input=SemanticPair,
instance_approximator=ConnectedComponentsInstanceApproximator(cca_backend=CCABackend.cc3d),
instance_matcher=NaiveOneToOneMatching(),
iou_threshold=0.5,
)

print(res)
result, debug_data = evaluator.evaluate(sample)
print(result)
24 changes: 11 additions & 13 deletions examples/example_spine_instance.py
Original file line number Diff line number Diff line change
@@ -1,21 +1,19 @@
from auxiliary.nifti.io import read_nifti
import numpy as np

from panoptica import InstanceSegmentationEvaluator

ref_masks = read_nifti(
"examples/spine_seg/instance_example/sub-0007_mod-T2w_seg-vert_msk.nii.gz"
)
pred_masks = read_nifti(
"examples/spine_seg/instance_example/sub-0007_mod-T2w_seg-vert_msk_new.nii.gz"
)
from panoptica import MatchedInstancePair, Panoptic_Evaluator, NaiveOneToOneMatching

ref_masks = read_nifti("repo/examples/spine_seg/instance_example/sub-0007_mod-T2w_seg-vert_msk.nii.gz")
pred_masks = read_nifti("repo/examples/spine_seg/instance_example/sub-0007_mod-T2w_seg-vert_msk_new.nii.gz")

eva = InstanceSegmentationEvaluator()
sample = MatchedInstancePair(prediction_arr=pred_masks, reference_arr=ref_masks)

res = eva.evaluate(
reference_mask=ref_masks,
prediction_mask=pred_masks,
evaluator = Panoptic_Evaluator(
expected_input=MatchedInstancePair,
instance_approximator=None,
instance_matcher=NaiveOneToOneMatching(),
iou_threshold=0.5,
)

print(res)
result, debug_data = evaluator.evaluate(sample)
print(result)
23 changes: 11 additions & 12 deletions examples/example_spine_semantic.py
Original file line number Diff line number Diff line change
@@ -1,20 +1,19 @@
from auxiliary.nifti.io import read_nifti

from panoptica import CCABackend, SemanticSegmentationEvaluator
from panoptica import SemanticPair, Panoptic_Evaluator, ConnectedComponentsInstanceApproximator, CCABackend, NaiveOneToOneMatching

ref_masks = read_nifti("examples/spine_seg/semantic_example/sub-0007_mod-T2w_seg-spine_msk.nii.gz")
pred_masks = read_nifti("examples/spine_seg/semantic_example/sub-0007_mod-T2w_seg-spine_msk_new.nii.gz")

ref_masks = read_nifti(
"examples/spine_seg/semantic_example/sub-0007_mod-T2w_seg-spine_msk.nii.gz"
)
pred_masks = read_nifti(
"examples/spine_seg/semantic_example/sub-0007_mod-T2w_seg-spine_msk_new.nii.gz"
)

sample = SemanticPair(pred_masks, ref_masks)

eva = SemanticSegmentationEvaluator(cca_backend=CCABackend.cc3d)
res = eva.evaluate(
reference_mask=ref_masks,
prediction_mask=pred_masks,
evaluator = Panoptic_Evaluator(
expected_input=SemanticPair,
instance_approximator=ConnectedComponentsInstanceApproximator(cca_backend=CCABackend.cc3d),
instance_matcher=NaiveOneToOneMatching(),
iou_threshold=0.5,
)

print(res)
result, debug_data = evaluator.evaluate(sample)
print(result)
12 changes: 5 additions & 7 deletions panoptica/__init__.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,5 @@
from panoptica.instance_evaluation.instance_evaluator import (
InstanceSegmentationEvaluator,
)
from panoptica.semantic_evaluation.connected_component_backends import CCABackend
from panoptica.semantic_evaluation.semantic_evaluator import (
SemanticSegmentationEvaluator,
)
from panoptica.instance_approximator import ConnectedComponentsInstanceApproximator, CCABackend
from panoptica.instance_matcher import NaiveOneToOneMatching
from panoptica.evaluator import Panoptic_Evaluator
from panoptica.result import PanopticaResult
from panoptica.utils.datatypes import SemanticPair, UnmatchedInstancePair, MatchedInstancePair
91 changes: 91 additions & 0 deletions panoptica/_functionals.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,91 @@
import numpy as np
from multiprocessing import Pool
from panoptica.utils.metrics import _compute_instance_iou
from panoptica.utils.constants import CCABackend


def _calc_iou_matrix(prediction_arr: np.ndarray, reference_arr: np.ndarray, ref_labels: list[int], pred_labels: list[int]):
"""
Calculate the Intersection over Union (IoU) matrix between reference and prediction arrays.
Args:
prediction_arr (np.ndarray): Numpy array containing the prediction labels.
reference_arr (np.ndarray): Numpy array containing the reference labels.
ref_labels (list[int]): List of unique reference labels.
pred_labels (list[int]): List of unique prediction labels.
Returns:
np.ndarray: IoU matrix where each element represents the IoU between a reference and prediction instance.
Example:
>>> _calc_iou_matrix(np.array([1, 2, 3]), np.array([4, 5, 6]), [1, 2, 3], [4, 5, 6])
array([[0. , 0. , 0. ],
[0. , 0. , 0. ],
[0. , 0. , 0. ]])
"""
num_ref_instances = len(ref_labels)
num_pred_instances = len(pred_labels)

# Create a pool of worker processes to parallelize the computation
with Pool() as pool:
# Generate all possible pairs of instance indices for IoU computation
instance_pairs = [(reference_arr, prediction_arr, ref_idx, pred_idx) for ref_idx in ref_labels for pred_idx in pred_labels]

# Calculate IoU for all instance pairs in parallel using starmap
iou_values = pool.starmap(_compute_instance_iou, instance_pairs)

# Reshape the resulting IoU values into a matrix
iou_matrix = np.array(iou_values).reshape((num_ref_instances, num_pred_instances))
return iou_matrix


def _map_labels(arr: np.ndarray, label_map: dict[np.integer, np.integer]) -> np.ndarray:
"""
Maps labels in the given array according to the label_map dictionary.
Args:
label_map (dict): A dictionary that maps the original label values (str or int) to the new label values (int).
Returns:
np.ndarray: Returns a copy of the remapped array
"""
data = arr.copy()
for v in np.unique(data):
if v in label_map: # int needed to match non-integer data-types
data[arr == v] = label_map[v]
return data


def _connected_components(
array: np.ndarray,
cca_backend: CCABackend,
) -> tuple[np.ndarray, int]:
"""
Label connected components in a binary array using a specified connected components algorithm.
Args:
array (np.ndarray): Binary array containing connected components.
cca_backend (CCABackend): Enum indicating the connected components algorithm backend (CCABackend.cc3d or CCABackend.scipy).
Returns:
tuple[np.ndarray, int]: A tuple containing the labeled array and the number of connected components.
Raises:
NotImplementedError: If the specified connected components algorithm backend is not implemented.
Example:
>>> _connected_components(np.array([[1, 0, 1], [0, 1, 1], [1, 0, 0]]), CCABackend.scipy)
(array([[1, 0, 2], [0, 3, 3], [4, 0, 0]]), 4)
"""
if cca_backend == CCABackend.cc3d:
import cc3d

cc_arr, n_instances = cc3d.connected_components(array, return_N=True)
elif cca_backend == CCABackend.scipy:
from scipy.ndimage import label

cc_arr, n_instances = label(array)
else:
raise NotImplementedError(cca_backend)

return cc_arr, n_instances
Loading

0 comments on commit ae35c33

Please sign in to comment.