Skip to content
This repository has been archived by the owner on Feb 11, 2023. It is now read-only.

Commit

Permalink
update docs (#20)
Browse files Browse the repository at this point in the history
* update init for setup
* update docs types
* fix codecov
  • Loading branch information
Borda authored May 16, 2019
1 parent 78e5115 commit 9d8967e
Show file tree
Hide file tree
Showing 36 changed files with 322 additions and 304 deletions.
6 changes: 3 additions & 3 deletions .codecov.yml
Original file line number Diff line number Diff line change
Expand Up @@ -6,13 +6,13 @@ codecov:
coverage:
precision: 0 # 2 = xx.xx%, 0 = xx%
round: nearest # how coverage is rounded: down/up/nearest
range: 10...90 # custom range of coverage colors from red -> yellow -> green
range: 30...100 # custom range of coverage colors from red -> yellow -> green
status:
# https://codecov.readme.io/v1.0/docs/commit-status
project:
default:
against: auto
target: 70% # specify the target coverage for each commit status
target: 90% # specify the target coverage for each commit status
threshold: 50% # allow this little decrease on project
# https://github.com/codecov/support/wiki/Filtering-Branches
# branches: master
Expand All @@ -39,4 +39,4 @@ comment:
layout: header, diff
require_changes: false
behavior: default # update if exists else create new
branches: *
# branches: *
9 changes: 9 additions & 0 deletions docs/source/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -73,6 +73,7 @@
# 'sphinx.ext.viewcode',
'sphinx.ext.linkcode',
'sphinx.ext.napoleon',
'sphinx.ext.autosummary',
'recommonmark',
# 'm2r',
'nbsphinx',
Expand Down Expand Up @@ -309,3 +310,11 @@ def find_source():
# universal_newlines=True).communicate()[0][:-1]
return "https://github.com/%s/%s/blob/master/%s" \
% (github_user, github_repo, filename)


autodoc_member_order = 'groupwise'
autoclass_content = 'both'
autodoc_default_flags = [
'members', 'undoc-members', 'show-inheritance', 'private-members',
# 'special-members', 'inherited-members'
]
2 changes: 1 addition & 1 deletion experiments_ovary_centres/gui_annot_center_correction.py
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,7 @@
def arg_parse_params():
"""
SEE: https://docs.python.org/3/library/argparse.html
:return {str: ...}:
:return dict:
"""
parser = argparse.ArgumentParser()
parser.add_argument('-imgs', '--path_images', type=str, required=False,
Expand Down
20 changes: 10 additions & 10 deletions experiments_ovary_centres/run_center_candidate_training.py
Original file line number Diff line number Diff line change
Expand Up @@ -125,7 +125,7 @@
def arg_parse_params(params):
"""
SEE: https://docs.python.org/3/library/argparse.html
:return {str: ...}:
:return dict:
"""
parser = argparse.ArgumentParser()
parser.add_argument('-list', '--path_list', type=str, required=False,
Expand Down Expand Up @@ -352,7 +352,7 @@ def estim_points_compute_features(name, img, segm, params):
:param ndarray img:
:param ndarray segm:
:param {str: any} params:
:return (str, ndarray, [(int, int)], [[float]], [str]):
:return (str, ndarray, [(int, int)], [[float]], list(str)):
"""
# superpixels on image
assert img.shape[:2] == segm.shape[:2], \
Expand All @@ -373,7 +373,7 @@ def compute_points_features(segm, points, params):
:param ndarray segm: segmentations
:param [(int, int)] points: positions in image
:param {str: any} params: parameters
:return ([[float]], [str]): [[float] * nb_features] * nb_points, [str] * nb_features
:return ([[float]], list(str)): [[float] * nb_features] * nb_points, list(str) * nb_features
"""
features, feature_names = np.empty((len(points), 0)), list()

Expand Down Expand Up @@ -454,7 +454,7 @@ def dataset_load_images_segms_compute_features(params, df_paths, nb_workers=NB_T
:param {str: any} params: parameters
:param DF df_paths: DataFrame
:param int nb_workers: parallel
:return {str: ...}:
:return dict:
"""
dict_imgs, dict_segms, dict_center = dict(), dict(), dict()
logging.info('loading input data (images, segmentation and centers)')
Expand Down Expand Up @@ -538,9 +538,9 @@ def compute_statistic_centers(dict_stat, img, segm, center, slic, points, labels
:param ndarray slic:
:param points:
:param labels:
:param {str: ...} params:
:param dict params:
:param str path_out:
:return {}:
:return dict:
"""
labels_gt = label_close_points(center, points, params)

Expand Down Expand Up @@ -579,11 +579,11 @@ def detect_center_candidates(name, image, segm, centers_gt, slic, points,
:param slic: np.array
:param [(int, int)] points:
:param features:
:param [str] feature_names:
:param {} params:
:param list(str) feature_names:
:param dict params:
:param str path_out:
:param classif: obj
:return {}:
:return dict:
"""
labels = classif.predict(features)
# proba = classif.predict_proba(features)
Expand Down Expand Up @@ -618,7 +618,7 @@ def load_dump_data(path_dump_data):
""" loading saved data prom previous stages
:param path_dump_data:
:return {}:
:return dict:
"""
logging.info('loading dumped data "%s"', path_dump_data)
# with open(os.path.join(path_out, NAME_DUMP_TRAIN_DATA), 'r') as f:
Expand Down
6 changes: 3 additions & 3 deletions experiments_ovary_centres/run_center_clustering.py
Original file line number Diff line number Diff line change
Expand Up @@ -126,10 +126,10 @@ def export_draw_image_centers_clusters(path_out, name, img, centres, points=None
def cluster_points_draw_export(dict_row, params, path_out=None):
""" cluster points into centers and export visualisations
:param {} dict_row:
:param {str: ...} params:
:param dict dict_row:
:param dict params:
:param str path_out:
:return {}:
:return dict:
"""
assert all(n in dict_row for n in ['path_points', 'path_image', 'path_segm']), \
'missing some required fields: %r' % dict_row
Expand Down
2 changes: 1 addition & 1 deletion experiments_ovary_centres/run_center_evaluation.py
Original file line number Diff line number Diff line change
Expand Up @@ -238,7 +238,7 @@ def evaluate_detection_stage(df_paths, stage, path_info, path_out, nb_workers=1)
def main(params):
""" PIPELINE for new detections
:param {str: ...} params:
:param dict params:
"""
params['path_expt'] = os.path.join(params['path_output'],
run_detect.FOLDER_EXPERIMENT % params['name'])
Expand Down
2 changes: 1 addition & 1 deletion experiments_ovary_centres/run_center_prediction.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ def load_compute_detect_centers(idx_row, params, classif=None, path_classif='',
generate points, compute features and using given classifier predict labels
:param (int, DF:row) idx_row:
:param {} params:
:param dict params:
:param obj classif:
:param str path_classif:
:param str path_output:
Expand Down
4 changes: 2 additions & 2 deletions experiments_ovary_detect/run_ellipse_annot_match.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@
def arg_parse_params(params):
"""
SEE: https://docs.python.org/3/library/argparse.html
:return {str: ...}:
:return dict:
"""
parser = argparse.ArgumentParser()
parser.add_argument('-imgs', '--path_images', type=str, required=False,
Expand Down Expand Up @@ -77,7 +77,7 @@ def select_optimal_ellipse(idx_row, path_dir_csv, overlap_thr=OVERLAP_THRESHOLD)
:param (int, row) idx_row: index and row with user annotation
:param str path_dir_csv: path to list of ellipse parameters
:param float overlap_thr: skip all annot. which Jaccard lower then threshold
:return {}:
:return dict:
"""
_, row = idx_row
dict_row = dict(row)
Expand Down
6 changes: 3 additions & 3 deletions experiments_ovary_detect/run_ovary_egg-segmentation.py
Original file line number Diff line number Diff line change
Expand Up @@ -598,7 +598,7 @@ def simplify_segm_3cls(seg, lut=(0., 0.8, 1.), smooth=True):
def create_dict_segmentation(params, slic, segm, img, centers):
""" create dictionary of segmentation function hash, function and parameters
:param {str: ...} params:
:param dict params:
:param ndarray slic:
:param ndarray segm:
:param [[float]] centers:
Expand Down Expand Up @@ -679,7 +679,7 @@ def image_segmentation(idx_row, params, debug_export=DEBUG_EXPORT):
and perform segmentation of various imsegm methods
:param (int, str) idx_row: input image and centres
:param {str: ...} params: segmentation parameters
:param dict params: segmentation parameters
:return str: image name
"""
_, row_path = idx_row
Expand Down Expand Up @@ -770,7 +770,7 @@ def export_partial(str_key, obj_content, path_dir, name):
def main(params, debug_export=DEBUG_EXPORT):
""" the main entry point
:param {str: ...} params: segmentation parameters
:param dict params: segmentation parameters
:param bool debug_export: whether export visualisations
"""
logging.getLogger().setLevel(logging.DEBUG)
Expand Down
4 changes: 2 additions & 2 deletions experiments_ovary_detect/run_ovary_segm_evaluation.py
Original file line number Diff line number Diff line change
Expand Up @@ -104,7 +104,7 @@ def arg_parse_params(paths):
def compute_metrics(row):
""" load segmentation and compute similarity metrics
:param {str: ...} row:
:param dict row:
:return {str: float}:
"""
logging.debug('loading annot "%s"\n and segm "%s"', row['path_annot'],
Expand Down Expand Up @@ -144,7 +144,7 @@ def compute_metrics(row):
def expert_visual(row, method_name, path_out, max_fig_size=10):
""" export several visualisation segmentation and annotation
:param {str: ...} row:
:param dict row:
:param str method_name:
:param str path_out:
:param int max_fig_size:
Expand Down
2 changes: 1 addition & 1 deletion experiments_segmentation/run_compute_stat_annot_segm.py
Original file line number Diff line number Diff line change
Expand Up @@ -110,7 +110,7 @@ def fill_lut(lut, segm, offset=0):
def export_visual(name, annot, segm, img, path_out, drop_labels, segm_alpha=1.):
""" given visualisation of segmented image and annotation
:param {str: ...} df_row:
:param dict df_row:
:param str path_out: path to the visualisation directory
:param [int] drop_labels: whether skip some labels
"""
Expand Down
6 changes: 3 additions & 3 deletions experiments_segmentation/run_eval_superpixels.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@
def arg_parse_params(params):
"""
SEE: https://docs.python.org/3/library/argparse.html
:return {str: ...}:
:return dict:
"""
parser = argparse.ArgumentParser()
parser.add_argument('-imgs', '--path_images', type=str, required=False,
Expand Down Expand Up @@ -97,7 +97,7 @@ def compute_boundary_distance(idx_row, params, path_out=''):
""" compute nearest distance between two segmentation contours
:param (int, str) idx_row:
:param {} params:
:param dict params:
:param str path_out:
:return (str, float):
"""
Expand Down Expand Up @@ -125,7 +125,7 @@ def compute_boundary_distance(idx_row, params, path_out=''):
def main(params):
""" compute the distance among segmented superpixels and given annotation
:param {str: ...} params:
:param dict params:
"""
if os.path.isdir(params['path_out']):
logging.info('Missing output dir -> no visual export & results table.')
Expand Down
26 changes: 13 additions & 13 deletions experiments_segmentation/run_segm_slic_classif_graphcut.py
Original file line number Diff line number Diff line change
Expand Up @@ -178,7 +178,7 @@ def load_image_annot_compute_features_labels(idx_row, params,
""" load image and annotation, and compute superpixel features and labels
:param (int, {...}) idx_row: row from table with paths
:param {str: ...} params: segmentation parameters
:param dict params: segmentation parameters
:param bool show_debug_imgs: whether show debug images
:return (...):
"""
Expand Down Expand Up @@ -230,9 +230,9 @@ def dataset_load_images_annot_compute_features(params,
1) load image and annotation
2) compute superpixel features and labels
:param {str: ...} params: segmentation parameters
:param dict params: segmentation parameters
:param bool show_debug_imgs: whether show debug images
:return ({str: ndarray} * 6, [str]):
:return ({str: ndarray} * 6, list(str)):
"""
dict_images, dict_annots = dict(), dict()
dict_slics, dict_features, dict_labels, dict_label_hist = \
Expand Down Expand Up @@ -266,7 +266,7 @@ def load_dump_data(path_dump_data):
""" load dumped data from previous run of experiment
:param str path_dump_data:
:return ({str: ndarray} * 6, [str]):
:return ({str: ndarray} * 6, list(str)):
"""
logging.info('loading dumped data "%s"', path_dump_data)
# with open(os.path.join(path_out, NAME_DUMP_TRAIN_DATA), 'r') as f:
Expand Down Expand Up @@ -294,7 +294,7 @@ def save_dump_data(path_dump_data, imgs, annot, slics, features, labels,
:param {str: ndarray} features: dictionary {name: data} of features
:param {str: ndarray} labels: dictionary {name: data} of lables
:param {str: ndarray} label_hist: dictionary {name: data} of
:param [str] feature_names: list of feature names
:param list(str) feature_names: list of feature names
"""
logging.info('save (dump) data to "%s"', path_dump_data)
np.savez_compressed(path_dump_data, dict_images=imgs, dict_annot=annot,
Expand All @@ -316,7 +316,7 @@ def segment_image(imgs_idx_path, params, classif, path_out, path_visu=None,
and trained classifier, and save results
:param (int, str) imgs_idx_path:
:param {str: ...} params: segmentation parameters
:param dict params: segmentation parameters
:param obj classif: trained classifier
:param str path_out: path for output
:param str path_visu: the existing patch means export also visualisation
Expand Down Expand Up @@ -379,7 +379,7 @@ def eval_segment_with_annot(params, dict_annot, dict_segm, dict_label_hist=None,
nb_workers=1):
""" evaluate the segmentation results according given annotation
:param {str: ...} params:
:param dict params:
:param {str: ndarray} dict_annot:
:param {str: ndarray} dict_segm:
:param {str: ndarray} dict_label_hist:
Expand Down Expand Up @@ -415,7 +415,7 @@ def retrain_lpo_segment_image(list_imgs_idx_path,
""" load the classifier, and dumped data, subtract the image,
retrain the classif without it and do the segmentation
:param [str] list_imgs_idx_path: path to input image
:param list(str) list_imgs_idx_path: path to input image
:param str path_classif: path to saved classifier
:param str path_dump: path to dumped data
:param, str path_out: path to segmentation outputs
Expand Down Expand Up @@ -498,15 +498,15 @@ def experiment_lpo(params, df_stat, dict_annot, idx_paths_img, path_classif,
path_dump, nb_holdout, show_debug_imgs=SHOW_DEBUG_IMAGES):
""" experiment Leave-P-samples-Out
:param {str: ...} params:
:param dict params:
:param DF df_stat:
:param {str: ndarray} dict_annot:
:param [str] paths_img:
:param list(str) paths_img:
:param str path_classif:
:param str path_dump:
:param int nb_holdout:
:param bool show_debug_imgs: whether show debug images
:return {}:
:return dict:
"""
logging.info('run prediction on training images as Leave-%i-Out...',
nb_holdout)
Expand Down Expand Up @@ -624,8 +624,8 @@ def main_train(params):
3) load or train classifier with hyper-parameters search
4) perform Leave-One-Out and Leave-P-Out experiments on images
:param {str: ...} params:
:return {str: ...}:
:param dict params:
:return dict:
"""
logging.getLogger().setLevel(logging.DEBUG)
logging.info('running TRAINING...')
Expand Down
Loading

0 comments on commit 9d8967e

Please sign in to comment.