forked from bunkerhillhealth/bunkerhill-sdk
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathmodel.py
85 lines (64 loc) · 3.34 KB
/
model.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
"""The class definition and model server entrypoint for the MSD Hippocampus model."""
import subprocess
from typing import Dict
import numpy as np
from bunkerhill import nnunet_wrapper
from bunkerhill.base_model import BaseModel
from bunkerhill.bunkerhill_types import Outputs, SeriesInstanceUID
from bunkerhill.nnunet_wrapper import NNUNetPaths
from bunkerhill.model_runner import ModelRunner
class MSDHippocampusModel(BaseModel):
"""A wrapper around the trained nnUNet model for the MSD Hippocampus model.
This wrapper utilizes the nnUNet command line tools to load the model weights and run inference
on each individual test instance (a DICOM series).
For details about the Medical Segmentation Decathlon and the Hippocampus dataset, see:
http://medicaldecathlon.com/
The model weights are downloaded from:
https://zenodo.org/record/4003545/files/Task004_Hippocampus.zip
Attributes:
_paths: The paths required by nnUNet to run inference.
"""
_DATA_DIRNAME: str = '/data'
_TASK_NAME: str = 'Task004_Hippocampus'
_PRETRAINED_MODEL_FILENAME: str = f'/app/{_TASK_NAME}.zip'
_SEGMENTATION_OUTPUT_ATTRIBUTE_NAME: str = 'hippocampus_seg_pred'
_SOFTMAX_OUTPUT_ATTRIBUTE_NAME: str = 'hippocampus_softmax_pred'
_LOAD_WEIGHTS_COMMAND: str = 'nnUNet_install_pretrained_model_from_zip'
_INFERENCE_COMMAND: str = 'nnUNet_predict'
_SAVE_SOFTMAX_FLAG: str = '--save_npz'
_paths: NNUNetPaths
def __init__(self):
self._paths = nnunet_wrapper.setup_paths(self._DATA_DIRNAME, self._TASK_NAME)
# Install the pretrained Hippocampus model from the weights bundled in the .zip file. The
# weights are downloaded to the Docker image in the Dockerfile.
install_pretrained_model_cmd = [self._LOAD_WEIGHTS_COMMAND, self._PRETRAINED_MODEL_FILENAME]
subprocess.check_call(install_pretrained_model_cmd, timeout=300)
def inference(self, pixel_array: Dict[SeriesInstanceUID, np.ndarray]) -> Outputs:
"""Runs inference on the pixel array for a DICOM series.
Args:
pixel_array: A dict mapping the DICOM series UID to its pixel array.
Returns:
A dictionary containing the output segmentation and softmax ndarrays.
"""
# Convert Bunkerhill pipeline's model arguments into format expected by nnUNet.
nnunet_wrapper.dump_pixel_array(next(iter(pixel_array.values())), self._paths.test_data_dirname)
# Run model inference using nnUNet_predict command line tool. Save the softmax tensor in
# addition to the segmentation.
inference_cmd_args = [
self._INFERENCE_COMMAND, '-i', self._paths.test_data_dirname, '-o',
self._paths.output_dirname, '-t', self._TASK_NAME, '-m', '3d_fullres', self._SAVE_SOFTMAX_FLAG
]
subprocess.check_call(inference_cmd_args, timeout=300)
# Convert nnUNet segmentation and softmax tensors into output attributes.
series_uid = next(iter(pixel_array.keys()))
segmentation_output_attribute = nnunet_wrapper.load_segmentation(
self._paths.output_dirname, self._SEGMENTATION_OUTPUT_ATTRIBUTE_NAME, series_uid
)
softmax_output_attribute = nnunet_wrapper.load_softmax(
self._paths.output_dirname, self._SOFTMAX_OUTPUT_ATTRIBUTE_NAME, series_uid
)
return {**segmentation_output_attribute, **softmax_output_attribute}
if __name__ == '__main__':
model = MSDHippocampusModel()
model_runner = ModelRunner(model)
model_runner.start_run_loop()