Skip to content

Commit

Permalink
bug fixes + add anatomist plot class + data provider + average meshes (
Browse files Browse the repository at this point in the history
…#13)

* fix setup.py bugs after info.py

* increment version_micro

* add build/ folder to .gitignore

* update install requirements

* add volume-to-point-loud to CLI

* add Anatomist class

* add region overlaps to data provider

* small doc revision

* add recipes for pcpm meshes

* refactor
  • Loading branch information
mpascucci authored Mar 2, 2022
1 parent 7c04e81 commit 3e1162b
Show file tree
Hide file tree
Showing 17 changed files with 483 additions and 89 deletions.
5 changes: 3 additions & 2 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,8 @@ __pycache__/
*.py[cod]
scripts/

*.ipynb
venv

.pytest_cache/
.pytest_cache/
build/
examples_local/
16 changes: 6 additions & 10 deletions dico_toolbox/__init__.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
import functools
from .info import __version__
from . import _aims_tools
from . import transform
from . import data
from . import data_provider
from . import database
from . import wrappers
from . import convert
Expand All @@ -9,16 +11,10 @@
from . import bucket
from . import test_data
from . import mesh
from . import anatomist
from .recipes import *
from .data_provider import *

import logging

log = logging.getLogger(__name__)

try:
from soma import aims as _aims
_HAS_AIMS = True
except ImportError:
_HAS_AIMS = False
log.warn("Can not import pyAims, are you in a brainvisa environment?")

from .info import __version__
1 change: 1 addition & 0 deletions dico_toolbox/_dev.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
import logging

logger = logging.getLogger("dico_toolbox_dev")

def _deprecation_alert_decorator(use_instead):
def real_decorator(function):
@functools.wraps(function)
Expand Down
19 changes: 19 additions & 0 deletions dico_toolbox/_tools.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
import functools
import logging
log = logging.getLogger("Dico_toolbox")

try:
from soma import aims as _aims
_HAS_AIMS = True
except ImportError:
_HAS_AIMS = False
log.warn("Can not import pyAims, are you in a brainvisa environment?")

def _with_brainvisa(fun):
@functools.wraps(fun)
def wrapper(*args, **kwargs):
if not _HAS_AIMS:
raise RuntimeError(
"This function is only available in a brainvisa environment")
return fun(*args, **kwargs)
return wrapper
41 changes: 41 additions & 0 deletions dico_toolbox/anatomist/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
import os
import logging
import anatomist.api as anatomist


log = logging.getLogger(__name__)

os.environ["QT_API"] = "pyqt5"

message_lines = ["NOTES for jupyter users:"
"Remember to set '%gui qt' in a cell of your notebook"
"To hide anatomist logs, use the '%%capture' magic command at the beginning of the cell"]


class Anatomist():
info_displayed = False
instance = None
windows = {}

def __init__(self):
log.warning("/n".join(message_lines))
self.instance = anatomist.Anatomist()

def new_window_3D(self, name="Default"):
w = self.instance.createWindow("3D", geometry=[1200, 350, 500, 500])
self.windows[name] = w

def close(self):
self.instance.close()

def add_objects(self, objects, window_name="Default"):
if type(objects) == dict:
pass
elif type(objects) in [list, tuple]:
# list to dict
objects = {str(n): obj for n, obj in enumerate(object)}

for name, obj in objects.items():
m = self.instance.toAObject(obj)
m.name = name
m.addInWindows(self.windows[window_name])
1 change: 1 addition & 0 deletions dico_toolbox/cli/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
from .volume_to_point_cloud import volume_to_point_cloud
73 changes: 73 additions & 0 deletions dico_toolbox/cli/volume_to_point_cloud.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,73 @@
import argparse
import os
from glob import glob
from tqdm import tqdm
import dico_toolbox as dtb
import numpy as np
from multiprocessing import Pool, cpu_count

import logging
log = logging.getLogger(__name__)

try:
from soma import aims
except:
ImportError(
"pyAims could not be imported. Cannot use this tool outside a brainvisa environment.")


def get_fname(path): return os.path.basename(path).split('.')[0]


def volume_to_point_cloud(path):
"""convert a volume into a point-colud."""

fname = get_fname(path)

try:
vol = aims.read(path)
point_cloud = dtb.convert.volume_to_bucket_numpy(vol)
error_mgs = None
except Exception as e:
point_cloud = None
error_mgs = str(e)

return {"name": fname, "point-cloud": point_cloud, "error_mgs": error_mgs}


def main(*args, **kwargs):
parser = argparse.ArgumentParser(
description="Convert specified AIMS volume files into point clouds and store them in one compressed numpy file.")
parser.add_argument(
"input_path", help="The path to the volume to convert (wildcards are admitted m e.g. *.nii)", nargs='*', type=str)
parser.add_argument("-o", "--output_path",
help="The path of the output file containing the bucket", type=str)
args = parser.parse_args()

out_path = args.output_path
if out_path is None:
out_path = "point_clouds.npz"
base_out_dir = os.path.dirname(out_path)

# check output path validity
if not os.path.isdir(base_out_dir):
log.critical(f'"{base_out_dir}" is not a valid directory')
return 1

fun = volume_to_point_cloud

with Pool(cpu_count() - 3) as pool:
out = list(tqdm(
pool.imap(fun, args.input_path), total=len(args.input_path)))

pcs = {d['name']: d['point-cloud'] for d in out if d['error_mgs'] is None}
errors = [d['error_mgs'] for d in out if d['error_mgs'] is not None]

print("Creating output file...", end='')

np.savez_compressed(out_path, **pcs)
print("Done.")

if len(errors) > 0:
error_str = [f"There were {len(errors)} ERRORS:", *errors]
log.error("\n".join(error_str))
46 changes: 5 additions & 41 deletions dico_toolbox/convert.py
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,7 @@ def bucket_aims_to_ndarray(aims_bucket, voxel_size=(1, 1, 1)):
return v


def bucket_numpy_to_bucketMap_aims(ndarray, voxel_size=(1,1,1)):
def bucket_numpy_to_bucketMap_aims(ndarray, voxel_size=(1, 1, 1)):
"""Transform a (N,3) ndarray into an aims BucketMap_VOID.
The coordinates in the input array are casted to int.
"""
Expand Down Expand Up @@ -107,7 +107,6 @@ def bucket_numpy_to_bucket_aims(ndarray):
pass



def volume_to_ndarray(volume):
"""Transform aims volume in numpy array.
Expand Down Expand Up @@ -237,12 +236,13 @@ def volume_to_bucket_numpy(volume):
return np.argwhere(volume_to_ndarray(volume))


def volume_to_bucketMap_aims(volume, voxel_size=(1,1,1)):
def volume_to_bucketMap_aims(volume, voxel_size=(1, 1, 1)):
"""Convert a volume (aims or numpy) into an AIMS bucket"""
points_cloud = np.argwhere(volume_to_ndarray(volume))
#add 4th dimension to the vozel size with a default size of 1
# add 4th dimension to the vozel size with a default size of 1
return bucket_numpy_to_bucketMap_aims(points_cloud, voxel_size=voxel_size)


@_deprecation_alert_decorator(volume_to_bucketMap_aims)
def volume_to_bucket_aims(volume):
pass
Expand Down Expand Up @@ -380,7 +380,7 @@ def bucket_to_mesh(bucket, gblur_sigma=0, threshold=1,
raise ValueError("Input is a BucketMap, not a bucket.")

if any([x-int(x) != 0 for x in bucket[:].ravel()]):
log.warn(
log.debug(
"This bucket's coordinates are not integers. Did you apply any transformation to it?")

volume, offset = bucket_numpy_to_volume_numpy(bucket)
Expand All @@ -393,42 +393,6 @@ def bucket_to_mesh(bucket, gblur_sigma=0, threshold=1,
return mesh


def buket_to_aligned_mesh(*args, **kwargs):
raise SyntaxError(
"This function is deprecated due to misspelling of 'bucket', please use bucket_to_aligned_mesh")


def bucket_to_aligned_mesh(raw_bucket, talairach_dxyz, talairach_rot, talairach_tr, align_rot, align_tr, flip=False, **kwargs):
"""Generate the mesh of the given bucket.
The mesh is transformed according to the given rotations and translations.
The Talairach parameters are the scaling vector, the rotation matrix and the translation vector of the Talairach transform.
The align paramenters are the rotation matrix and translation vector of the alignment with the central subjet.
The kwargs are directly passed to cld.aims_tools.bucket_to_mesh().
"""

# Generate mesh
mesh = bucket_to_mesh(raw_bucket, **kwargs)

dxyz = talairach_dxyz.copy()

# Rescale mesh
_mesh.rescale_mesh(mesh, dxyz)

# apply Talairach transform
_mesh.transform_mesh(mesh, talairach_rot, talairach_tr)

if flip:
_mesh.flip_mesh(mesh)

# apply alignment transform
_mesh.transform_mesh(mesh, align_rot, align_tr)

return mesh


# ALIASES FOR DEPRECATED FUNCTIONS
@_deprecation_alert_decorator(use_instead=ndarray_to_volume_aims)
def ndarray_to_aims_volume(*args, **kwargs): pass
2 changes: 2 additions & 0 deletions dico_toolbox/data_provider/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
from . import nomenclature
from . import data
23 changes: 1 addition & 22 deletions dico_toolbox/data.py → dico_toolbox/data_provider/data.py
Original file line number Diff line number Diff line change
@@ -1,33 +1,12 @@
from glob import glob
import os
import re
import logging
from difflib import get_close_matches
import functools

log = logging.getLogger(__name__)

try:
from soma import aims as _aims
HAS_AIMS = True
except ImportError:
HAS_AIMS = False


def _with_brainvisa(fun):
@functools.wraps(fun)
def wrapper(*args, **kwargs):
if not HAS_AIMS:
raise RuntimeError(
"This function is only available in a brainvisa environment")
return fun(*args, **kwargs)
return wrapper

from .._tools import _with_brainvisa

class paths:
dico = "/neurospin/dico"


class pclean:
"""file provider for the plcean folder"""

Expand Down
29 changes: 29 additions & 0 deletions dico_toolbox/data_provider/json_data.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@

import json
import re

class JsonData:
"""Basic handling of json data"""

def __init__(self, json_string):
self.data = json.loads(json_string)

def _filter_list(self, regex, iterable):
"""Filter list items by regular expression"""
return list(filter(regex.search, iterable))

def _filter_dict(self, regex, dictionnary):
"""Filter dictionnary items by regular expression"""
return {k:v for k,v in dictionnary.items() if regex.search(k)}


def _filter(self, regular_expression, object):
"""Filter objects by reuglar expression"""
result = None
regex = re.compile(regular_expression)
if isinstance(object, dict):
result = self._filter_dict(regex, object)
elif isinstance(object, list):
result = self._filter_list(regex, object)

return result
Loading

0 comments on commit 3e1162b

Please sign in to comment.