Skip to content

Commit

Permalink
Add readme.
Browse files Browse the repository at this point in the history
Renamed several variables.
  • Loading branch information
ChenglongWang committed Jul 18, 2022
1 parent 671906b commit 29524e9
Show file tree
Hide file tree
Showing 15 changed files with 49 additions and 499 deletions.
4 changes: 2 additions & 2 deletions Scripts/prepare_label-mps.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,8 +11,8 @@
error_json = Path(r"\\mega\homesall\clwang\Data\jsph_lung\YHBLXA_YXJB\error_dict.json")
SAVE_PATH = Path(r'\\mega\yliu\Data\pn_cls_data\MPS')

refer_train_path = Path(r"\\mega\yliu\Data\medlp_exp\classification\jsph_mps\resnet50-BCE-BN-sgd-plateau-0622_1914-lr0.1-CTRList\train_files.yml")
refer_valid_path = Path(r"\\mega\yliu\Data\medlp_exp\classification\jsph_mps\resnet50-BCE-BN-sgd-plateau-0622_1914-lr0.1-CTRList\valid_files.yml")
refer_train_path = Path(r"\\mega\yliu\Data\strix_exp\classification\jsph_mps\resnet50-BCE-BN-sgd-plateau-0622_1914-lr0.1-CTRList\train_files.yml")
refer_valid_path = Path(r"\\mega\yliu\Data\strix_exp\classification\jsph_mps\resnet50-BCE-BN-sgd-plateau-0622_1914-lr0.1-CTRList\valid_files.yml")

_FEATURE_KEY = ['lobulation', 'spiculation', 'Relation to bronchus', 'Relation to Vessel']
FEATURE_KEY = [
Expand Down
2 changes: 1 addition & 1 deletion Scripts/prepare_label_mvi_MB.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
import nibabel as nib
from ..utils import check_dir, get_items_from_file
from tqdm import tqdm
from medlp.data_io.base_dataset.classification_dataset import BasicClassificationDataset
from strix.data_io.base_dataset.classification_dataset import BasicClassificationDataset
from monai.data import CacheDataset, PersistentDataset, Dataset
from monai_ex.transforms import *

Expand Down
2 changes: 1 addition & 1 deletion datasets/__init__.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from medlp.utilities.registry import DatasetRegistry
from strix.utilities.registry import DatasetRegistry

CLASSIFICATION_DATASETS = DatasetRegistry()
SEGMENTATION_DATASETS = DatasetRegistry()
Expand Down
4 changes: 2 additions & 2 deletions datasets/jsph_mps_datasets.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,8 @@
import numpy as np
from datasets import CLASSIFICATION_DATASETS
from scipy.ndimage import generate_binary_structure, binary_erosion
from medlp.data_io.base_dataset.classification_dataset import BasicClassificationDataset
from medlp.configures import config as cfg
from strix.data_io.base_dataset.classification_dataset import BasicClassificationDataset
from strix.configures import config as cfg


from monai_ex.data import CacheDataset, PersistentDataset
Expand Down
2 changes: 1 addition & 1 deletion datasets/jsph_mvi_datasets.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
import numpy as np

from datasets import CLASSIFICATION_DATASETS
from medlp.data_io.base_dataset.classification_dataset import BasicClassificationDataset
from strix.data_io.base_dataset.classification_dataset import BasicClassificationDataset
from monai_ex.data import CacheDataset, PersistentDataset
from monai_ex.transforms import *

Expand Down
4 changes: 2 additions & 2 deletions datasets/lidc_idri_datasets.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,8 @@
import numpy as np

from datasets import CLASSIFICATION_DATASETS
from medlp.data_io.base_dataset.classification_dataset import BasicClassificationDataset
from medlp.configures import config as cfg
from strix.data_io.base_dataset.classification_dataset import BasicClassificationDataset
from strix.configures import config as cfg

from monai_ex.data import CacheDataset, PersistentDataset
from monai_ex.transforms import *
Expand Down
6 changes: 3 additions & 3 deletions main.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,11 +3,11 @@

from train_v2 import train_core
from utils import check_dir, get_items_from_file, PathlibEncoder
from sklearn.model_selection import train_test_split, KFold
from sklearn.model_selection import train_test_split
from datasets import CLASSIFICATION_DATASETS

@click.command('train')
@click.option("param-path", type=click.Path(), default=Path.home()/"Code"/"ExPN-Net"/"param.list")
@click.option("--param-path", type=click.Path(), default=Path.home()/"Code"/"ExPN-Net"/"param.json")
def train(param_path):
confs = get_items_from_file(param_path, format='json')
random_seed = 42
Expand All @@ -17,7 +17,7 @@ def train(param_path):
else:
save_dir_name = f"{time.strftime('%m%d_%H%M')}-{confs['net']}-lr_{confs['lr']}-{confs['loss_name']}-{confs['optim']}{confs['postfix']}"
confs['out_dir'] = check_dir(f"/homes/yliu/Data/pn_cls_exp/{confs['dataset_name']}/{save_dir_name}")
with open(confs['out_dir']/'param.list', 'w') as f:
with open(confs['out_dir']/'param.json', 'w') as f:
json.dump(confs, f, indent=2, cls=PathlibEncoder)

dataset_type = CLASSIFICATION_DATASETS[f"{confs['dimensions']}D"][confs['dataset_name']]['FN']
Expand Down
2 changes: 1 addition & 1 deletion nets/HESAM.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
from monai.networks.nets.basic_unet import Down

# from monai.networks.nets import DynUNet
from medlp.models.cnn import DynUNet
from strix.models.cnn import DynUNet
from monai_ex.networks.layers import Act, Norm, Conv, Pool
from blocks.basic_block import TwoConv, UpCat, ResidualUnit, SimpleASPP
from nets.utils import save_activation
Expand Down
4 changes: 2 additions & 2 deletions nets/hesam_aag.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,9 +9,9 @@
from monai.networks.nets.basic_unet import Down

# from monai.networks.nets import DynUNet
from medlp.models.cnn import DynUNet
from strix.models.cnn import DynUNet
from monai_ex.networks.layers import Act, Norm, Conv, Pool
from medlp.models.cnn.layers.anatomical_gate import AnatomicalAttentionGate as AAG
from strix.models.cnn.layers.anatomical_gate import AnatomicalAttentionGate as AAG
from monai.networks.blocks.dynunet_block import get_conv_layer
from torch.nn.modules.activation import ReLU
from blocks.basic_block import TwoConv, UpCat, ResidualUnit, SimpleASPP
Expand Down
6 changes: 3 additions & 3 deletions nets/resnet_agg_sam.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,9 +5,9 @@
import torch.nn as nn
from typing import Callable
import numpy as np
from medlp.models.cnn.nets.resnet import ResNet, BasicBlock, Bottleneck
from medlp.models.cnn.layers.anatomical_gate import AnatomicalAttentionGate as AAG
from medlp.models.cnn.utils import set_trainable
from strix.models.cnn.nets.resnet import ResNet, BasicBlock, Bottleneck
from strix.models.cnn.layers.anatomical_gate import AnatomicalAttentionGate as AAG
from strix.models.cnn.utils import set_trainable
from monai_ex.networks.layers import Act, Norm, Conv, Pool
from blocks.basic_block import UpCat, ResidualUnit
from nets.HESAM import MultiChannelLinear
Expand Down
6 changes: 3 additions & 3 deletions nets/unet_cls.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,9 +7,9 @@
import numpy as np

# from monai.networks.nets import DynUNet
from medlp.models.cnn import DynUNet
from medlp.models.cnn.blocks.dynunet_block import UnetBasicBlock, UnetUpBlock
from medlp.models.cnn.layers.anatomical_gate import AnatomicalAttentionGate as AAG
from strix.models.cnn import DynUNet
from strix.models.cnn.blocks.dynunet_block import UnetBasicBlock, UnetUpBlock
from strix.models.cnn.layers.anatomical_gate import AnatomicalAttentionGate as AAG
from monai_ex.networks.layers import Act, Norm, Conv, Pool
from monai.networks.blocks.dynunet_block import get_conv_layer
from monai.networks.nets.basic_unet import Down
Expand Down
File renamed without changes.
25 changes: 25 additions & 0 deletions readme.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
## Official repo of the paper "Towards Reliable and Explainable AI Model for Pulmonary Nodule Diagnosis"
---

### For the model training:
`python main.py --param-path /your/parameters_file.json` \
An simple example of parameter file is shown in [param.json](param.json).

### For the model testing:
`python test.py` \
Notice: change the hard-coded variable `model_path` and `test_json` for your project.

### Requirements
You need to pre-install the following packages for this program:
- pytorch
- tb-nightly
- click
- tqdm
- numpy
- scipy
- scikit-learn
- nibabel
- pytorch-ignite
- [strix](https://github.com/Project-Strix/Strix)
- [monai_ex](https://github.com/Project-Strix/MONAI_EX)
- [utils_cw](https://gitlab.com/ChingRyu/py_utils_cw)
6 changes: 3 additions & 3 deletions test.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,8 +13,8 @@
from utils import DrawROCList, get_network, check_dir, get_items_from_file
from metrics import AUC_Confidence_Interval, save_roc_curve_fn

model_path = Path("/homes/yliu/Data/pn_cls_exp/lidc-paper_new-sphericity_prob/1215_2358-raw_hesam_agg-slice_1-lr_0.01-plateau-multi-BCE-sgd-sum-parallel-wooffset-2nd/Models/SBAA/BestModel@42with12.817.pt")
config_path = model_path.parent.parent.parent/'param.list'
model_path = Path("/homes/yliu/Data/pn_cls_exp/BestModel.pt")
config_path = model_path.parent.parent.parent/'param.json'
configures = get_items_from_file(config_path, format='json')

IMAGE_KEY = 'image'
Expand All @@ -30,7 +30,7 @@
device = torch.device("cuda")


test_json = Path("/homes/yliu/Data/pn_cls_data/LIDC-IDRI/all_datalist_int_equals3.json")
test_json = Path("/homes/yliu/Data/LIDC-IDRI/test_datalist.json")
dataset_name = configures['dataset_name']
dimensions = configures['dimensions']
dataset_type = CLASSIFICATION_DATASETS[f'{dimensions}D'][dataset_name]['FN']
Expand Down
Loading

0 comments on commit 29524e9

Please sign in to comment.