Skip to content

Commit

Permalink
test update with simulated data
Browse files Browse the repository at this point in the history
  • Loading branch information
m-miedema committed Jun 6, 2019
1 parent 8357cd9 commit 3c6d009
Show file tree
Hide file tree
Showing 11 changed files with 196 additions and 40 deletions.
42 changes: 30 additions & 12 deletions cfg.yml
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
file_io:
dirs:
studyDir: /biotic/home/marym/data/thesis
subjectsDir: /biotic/home/marym/data/thesis/subjects
subjectsDir: /media/NAS/mmiedema/reliability-mapping/subjects
simDir: /biotic/home/marym/data/thesis/sim_data
relDir: /biotic/home/marym/data/thesis/reliability-maps
relDir: /media/NAS/mmiedema/reliability-mapping/
file_ext:
src_fif: -vol-5-src.fif
mri_fif: COR-coreg.fif
Expand All @@ -14,32 +14,50 @@ file_io:
sim_epoch_fif: SEF_simDipole-epo.fif
reliability_mapping:
data_division:
num_replications: 5
chronological: False
num_replications: 4
chronological: True
nn:
neighbourhood: 2
radius: 0.005 # only used if neighbourhood = False
likelihood:
ML:
num_thresholds: 10
init_beta: 0.3
max_iterations: 400
conv_tolerance: 0.001
max_iterations: 50
conv_tolerance: 0.0001
files:
MASK: None
map_times:
- 20
map_thresholds:
- 0.8
- 0.5
data_processing:
baseline:
baseStart: -0.02
baseEnd: 0.
LCMV:
bfBaselineMin: -0.5 # this is different
# spacing: 8 # mm
bfBaselineMin: -0.5
bfBaselineMax: 0.0
bfActiveMin: 0.0
bfActiveMax: 0.1
regularization: 0.01
studySettings:
subjects:
- sub01
- sub02
# - sub01
# - sub02
- sub03
sim_modes:
- one_fixed
# - one_jitter
# - one_fixed
# - one_jitter
- real_SEF
simTestData:
base_subject:
- sub03
base_mode:
- real_SEF
sim_subject:
- simsub
sim_mode:
- single-source

27 changes: 17 additions & 10 deletions cfg.yml~
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ file_io:
dirs:
studyDir: /biotic/home/marym/data/thesis
subjectsDir: /biotic/home/marym/data/thesis/subjects
simDir: /biotic/home/marym/data/thesis/sim_data/sim_data
simDir: /biotic/home/marym/data/thesis/sim_data
relDir: /biotic/home/marym/data/thesis/reliability-maps
file_ext:
src_fif: -vol-5-src.fif
Expand All @@ -19,17 +19,24 @@ reliability_mapping:
nn:
neighbourhood: 2
radius: 0.005 # only used if neighbourhood = False
likelihood:
init_beta: 0.3
max_iterations: 400
conv_tolerance: 0.001
ML:
num_thresholds: 10
init_beta: 2.3454
max_iterations: 5
conv_tolerance: 0.005
files:
MASK: None
map_times:
- 20
map_thresholds:
- 0.8
- 0.5
data_processing:
baseline:
baseStart: -0.02
baseEnd: 0.
LCMV:
LCMV:
# spacing: 8 # mm
bfBaselineMin: -0.5 # this is different
bfBaselineMax: 0.0
bfActiveMin: 0.0
Expand All @@ -38,8 +45,8 @@ data_processing:
studySettings:
subjects:
- sub01
- sub02
- sub03
# - sub02
# - sub03
sim_modes:
- one_fixed
# - one_jitter
# - one_fixed
- one_jitter
5 changes: 2 additions & 3 deletions map_reliability.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,9 +13,8 @@
import logging.config
#import warnings
#import sys
import map_reliability_support.set_up_replications as setrep
#import map_reliability.map_reliability_support.likelihood_calc as likelihood
#import map_reliability.map_reliability_support.create_reliability_map as rmap
import map_reliability_support.set_up_replications as setrep
import map_reliability_support.likelihood_calc as likelihood


logger = logging.getLogger(__name__)
Expand Down
Binary file modified map_reliability.pyc
Binary file not shown.
17 changes: 8 additions & 9 deletions map_reliability.py~
Original file line number Diff line number Diff line change
Expand Up @@ -11,18 +11,17 @@ import argparse
import yaml
#import mne
import logging.config
import warnings
#import warnings
#import sys
import map_reliability_support.set_up_replications as setrep
#import map_reliability.map_reliability_support.likelihood_calc as likelihood
#import map_reliability.map_reliability_support.create_reliability_map as rmap
import map_reliability_support.set_up_replications as setrep
import map_reliability_support.likelihood_calc as likelihood


logger = logging.getLogger(__name__)
#mne.set_log_level("WARNING")

warnings.simplefilter("ignore", category=DeprecationWarning)
warnings.simplefilter("ignore", category=RuntimeWarning)
#warnings.simplefilter("ignore", category=DeprecationWarning)
#warnings.simplefilter("ignore", category=RuntimeWarning)

def main(configFile):
"""Top-level run script for mapping the reliability of MEG data."""
Expand Down Expand Up @@ -52,9 +51,9 @@ def main(configFile):
pass

start_time = time.time()
logger.info("Setting up replications & calculating nearest neighbours.")
setrep.set_up(cfg)
logger.info("Set-up completed.")
#logger.info("Setting up replications & calculating nearest neighbours.")
#setrep.set_up(cfg)
#logger.info("Set-up completed.")
logger.info("Starting maximum likelihood calculations.")
#likelihood.calc(cfg)
#likelihood.initialize()
Expand Down
Empty file.
27 changes: 25 additions & 2 deletions map_reliability_support/set_up_replications.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,8 @@ def set_up(cfg):
bfBaselineMax = cfg["data_processing"]["LCMV"]["bfBaselineMax"]
bfActiveMin = cfg["data_processing"]["LCMV"]["bfActiveMin"]
bfActiveMax = cfg["data_processing"]["LCMV"]["bfActiveMax"]
regularization = cfg["data_processing"]["LCMV"]["regularization"]
regularization = cfg["data_processing"]["LCMV"]["regularization"]
#spacing = cfg["data_processing"]["LCMV"]["spacing"]

for subject_id in cfg["studySettings"]["subjects"]:
# subject-specific data:
Expand Down Expand Up @@ -122,4 +123,26 @@ def set_up(cfg):
mne.save_stc_as_volume(niiFile, stc, forward['src'], dest='surf',
mri_resolution=False)

split_num = split_num + 1
split_num = split_num + 1

# also create an ERB map for the entire dataset
evoked = epochs.average()
evoked.apply_baseline((baseStart, baseEnd))
# calculate covariance
noise_cov = mne.compute_covariance(epochs,
tmin=bfBaselineMin, tmax=bfBaselineMax, n_jobs = 4)
data_cov = mne.compute_covariance(epochs,
tmin=bfActiveMin, tmax=bfActiveMax, n_jobs = 4)
# run LCMV beamformer
filters = mne.beamformer.make_lcmv(epochs.info, forward, data_cov,
reg=regularization, noise_cov=noise_cov, pick_ori='max-power')
stc = mne.beamformer.apply_lcmv(evoked, filters)
# crop beamformer result to times of interest
stc.crop(bfActiveMin, bfActiveMax)
# take absolute value of beamformer (to eliminate anti-phase issue)
np.abs(stc.data, out=stc.data)

# save ERB map
niiFile = os.path.join(repDir, "".join(['full', cfg["file_io"]["file_ext"]["nifti_file"]]))
mne.save_stc_as_volume(niiFile, stc, forward['src'], dest='surf',
mri_resolution=False)
Binary file removed map_reliability_support/set_up_replications.pyc
Binary file not shown.
29 changes: 26 additions & 3 deletions map_reliability_support/set_up_replications.py~
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,8 @@ def set_up(cfg):
bfBaselineMax = cfg["data_processing"]["LCMV"]["bfBaselineMax"]
bfActiveMin = cfg["data_processing"]["LCMV"]["bfActiveMin"]
bfActiveMax = cfg["data_processing"]["LCMV"]["bfActiveMax"]
regularization = cfg["data_processing"]["LCMV"]["regularization"]
regularization = cfg["data_processing"]["LCMV"]["regularization"]
spacing = cfg["data_processing"]["LCMV"]["spacing"]

for subject_id in cfg["studySettings"]["subjects"]:
# subject-specific data:
Expand All @@ -62,7 +63,7 @@ def set_up(cfg):
# split epochs file into a number of epochs files (ie. replications) & save an ERB map for each

epochFile = os.path.join(simDir, 'sim_data', subject_id, sim_mode,
"".join([subject_id, cfg["file_io"]["file_ext"]["sim_epoch_fif"]]))
cfg["file_io"]["file_ext"]["sim_epoch_fif"])

repDir = os.path.join(relDir, subject_id, sim_mode)
if not os.path.exists(repDir):
Expand Down Expand Up @@ -122,4 +123,26 @@ def set_up(cfg):
mne.save_stc_as_volume(niiFile, stc, forward['src'], dest='surf',
mri_resolution=False)

split_num = split_num + 1
split_num = split_num + 1

# also create an ERB map for the entire dataset
evoked = epochs.average()
evoked.apply_baseline((baseStart, baseEnd))
# calculate covariance
noise_cov = mne.compute_covariance(epochs,
tmin=bfBaselineMin, tmax=bfBaselineMax, n_jobs = 4)
data_cov = mne.compute_covariance(epochs,
tmin=bfActiveMin, tmax=bfActiveMax, n_jobs = 4)
# run LCMV beamformer
filters = mne.beamformer.make_lcmv(epochs.info, forward, data_cov,
reg=regularization, noise_cov=noise_cov, pick_ori='max-power')
stc = mne.beamformer.apply_lcmv(evoked, filters)
# crop beamformer result to times of interest
stc.crop(bfActiveMin, bfActiveMax)
# take absolute value of beamformer (to eliminate anti-phase issue)
np.abs(stc.data, out=stc.data)

# save ERB map
niiFile = os.path.join(repDir, "".join(['full', cfg["file_io"]["file_ext"]["nifti_file"]]))
mne.save_stc_as_volume(niiFile, stc, forward['src'], dest='surf',
mri_resolution=False)
89 changes: 88 additions & 1 deletion map_reliability_support/support_functions.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,10 +6,12 @@
"""

from scipy import spatial
from random import randint
import os
import mne
import pandas as pd
import numpy as np
import nibabel as nib
import logging.config
import warnings

Expand Down Expand Up @@ -70,4 +72,89 @@ def get_nn_src(srcFile, nnFile, neighbourhood, radius):
# to do: it would be nice to rank neighbours in DF & get rid of neighbourhood/spacing options
nn_dfs.append(pt_df)
nn_df = pd.concat(nn_dfs)
nn_df.to_csv(nnFile)
nn_df.to_csv(nnFile)

def sim_replications(cfg):
base_subject = cfg["simTestData"]["base_subject"]
print(base_subject)
base_mode = cfg["simTestData"]["base_mode"]
sim_subject = cfg["simTestData"]["sim_subject"]
sim_mode = cfg["simTestData"]["sim_mode"]
relDir = cfg["file_io"]["dirs"]["relDir"]
replicDir = os.path.join(relDir, base_subject[0], base_mode[0])
simDir = os.path.join(relDir, sim_subject[0], sim_mode[0])
num_replic = cfg["reliability_mapping"]["data_division"]["num_replications"]



# first read in the orignial replications

for split_num in range(1,num_replic+1):
# load ERB data for each 'replication'
replicFile = os.path.join(replicDir, "".join(['replication_',
str(split_num), '_', cfg["file_io"]["file_ext"]["nifti_file"]]))
replic_obj = nib.load(replicFile)
replic_data = replic_obj.get_fdata()#[:,:,:,self.timept]
sim_data = []
for ts in range(0,replic_data.shape[3]):
# generate noise
base_dist = replic_data[:,:,:,ts]
stdev = np.std(base_dist)
mean = np.mean(base_dist)
noise_dist = np.abs(np.random.normal(loc=mean,scale=stdev,size=replic_data.shape[0:-1]))
# add a source
amp = np.amax(base_dist)
pos = [8+randint(-2,3), 16+randint(-4,4), 22 + randint(-5, 5)]
stdev = 5
source_dist = Gaussian_amp(0.5*noise_dist, amp, pos, stdev)
# set voxels outside the head to zero
active = np.where(base_dist.flatten()>0., 1., 0.)
sim_data_ts = np.multiply(source_dist,np.reshape(active,source_dist.shape))
sim_data.append(sim_data_ts)
sim_data = np.stack(sim_data,axis=3)
sim_nii_data = nib.Nifti1Image(sim_data,None,header=replic_obj.header.copy())
nib.save(sim_nii_data,os.path.join(simDir,"".join(['replication_',
str(split_num), '_', cfg["file_io"]["file_ext"]["nifti_file"]])))


#also save "full" ERB file
for i in range(0,1):
# load ERB data for each 'replication'
replicFile = os.path.join(replicDir, "".join(['full', cfg["file_io"]["file_ext"]["nifti_file"]]))
replic_obj = nib.load(replicFile)
replic_data = replic_obj.get_fdata()#[:,:,:,self.timept]
sim_data = []
for ts in range(0,replic_data.shape[3]):
# generate noise
base_dist = replic_data[:,:,:,ts]
stdev = np.std(base_dist)
mean = np.mean(base_dist)
noise_dist = np.abs(np.random.normal(loc=mean,scale=stdev,size=replic_data.shape[0:-1]))
# add a source
amp = np.amax(base_dist)
pos = [8+randint(-2,3), 16+randint(-4,4), 22 + randint(-5, 5)]
stdev = 5
source_dist = Gaussian_amp(0.5*noise_dist, amp, pos, stdev)
# set voxels outside the head to zero
active = np.where(base_dist.flatten()>0., 1., 0.)
sim_data_ts = np.multiply(source_dist,np.reshape(active,source_dist.shape))
sim_data.append(sim_data_ts)
sim_data = np.stack(sim_data,axis=3)
sim_nii_data = nib.Nifti1Image(sim_data,None,header=replic_obj.header.copy())
nib.save(sim_nii_data,os.path.join(simDir,"".join(['full', cfg["file_io"]["file_ext"]["nifti_file"]])))




def Gaussian_amp(array, amp, pos, stdev):
# Generates a Gaussian point source in a 3D array based on specified parameters

for x in range(0, array.shape[0]):
for y in range(0, array.shape[1]):
for z in range(0, array.shape[2]):
array[x,y,z] = array[x,y,z] + amp*np.exp(0.5*(-(x-pos[0])**2. - (y-pos[1])**2. - (z-pos[2])**2.)/stdev**2.)

return array



Binary file modified map_reliability_support/support_functions.pyc
Binary file not shown.

0 comments on commit 3c6d009

Please sign in to comment.