From 215c98a74571c7049a604ad20109dcb0caf1ca7a Mon Sep 17 00:00:00 2001 From: sangfrois Date: Thu, 20 Jul 2023 18:12:46 -0400 Subject: [PATCH 01/14] adding neurokit2 --- peakdet/operations.py | 58 +++++++++++++++++++++++++++++++++++++++++-- peakdet/physio.py | 28 ++++++++++++++++++++- 2 files changed, 83 insertions(+), 3 deletions(-) diff --git a/peakdet/operations.py b/peakdet/operations.py index a7bc7c5..7075372 100644 --- a/peakdet/operations.py +++ b/peakdet/operations.py @@ -34,13 +34,12 @@ def filter_physio(data, cutoffs, method, *, order=3): Filtered input `data` """ - _valid_methods = ['lowpass', 'highpass', 'bandpass', 'bandstop'] + _valid_methods = ['lowpass', 'highpass', 'bandpass', 'bandstop' ] data = utils.check_physio(data, ensure_fs=True) if method not in _valid_methods: raise ValueError('Provided method {} is not permitted; must be in {}.' .format(method, _valid_methods)) - cutoffs = np.array(cutoffs) if method in ['lowpass', 'highpass'] and cutoffs.size != 1: raise ValueError('Cutoffs must be length 1 when using {} filter' @@ -60,6 +59,60 @@ def filter_physio(data, cutoffs, method, *, order=3): return filtered +@utils.make_operation() +def neurokit_processing(data, modality, method): + """ + Applies an `order`-order digital `method` Butterworth filter to `data` + + Parameters + ---------- + data : Physio_like + Input physiological data to be filtered + modality : str + Modality of the data. + One of 'ECG', 'PPG', 'RSP', 'EDA', + method : str + The name of the processing procedure to apply to `data` + + Returns + ------- + clean : :class:`peakdet.Physio` + Filtered input `data` + """ + try: + import neurokit2 as nk + except ImportError: + raise ImportError('neurokit2 is required to use this function') + modality = modality.upper() + if modality not in ['ECG', 'PPG', 'RSP', 'EDA']: + raise ValueError('Provided modality {} is not permitted; must be in {}.' + .format(modality, ['ECG', 'PPG', 'RSP', 'EDA'])) + + data = utils.check_physio(data, ensure_fs=True) + if modality == 'ECG': + # NOTE: change for bottenhorn filtering + data = filter_physio(data, cutoffs=40, method='lowpass') + signal, info = nk.ecg_peaks(data, sampling_rate=data.fs, method=method) + info[f'{modality}_Peaks'] = info['ECG_R_Peaks'] + elif modality == 'PPG': + signal, info = nk.ppg_process(data, sampling_rate=data.fs, method=method) + elif modality == 'RSP': + signal, info = nk.rsp_process(data, sampling_rate=data.fs, method=method) + elif modality == 'EDA': + signal, info = nk.eda_process(data, sampling_rate=data.fs, method=method) + info[f'{modality}_Peaks'] = info['SCR_Peaks'] + data._metadata['peaks'] = np.array(info[f'{modality}_Peaks']) + try: + info[f'{modality}_Troughs'] + data._metadata['troughs'] = np.array(info[f'{modality}_Troughs']) + data._metadata['troughs'] = utils.check_troughs(data, data.peaks, data.troughs) + except KeyError: + pass + data._features['info'] = info + data._features['signal'] = signal + clean = utils.new_physio_like(data, signal[f'{modality}_Clean'].values) + # ADD IN OTHER INFO as features + return clean @utils.make_operation() def interpolate_physio(data, target_fs, *, kind='cubic'): @@ -251,6 +304,7 @@ def edit_physio(data): # no point in manual edits if peaks/troughs aren't defined if not (len(data.peaks) and len(data.troughs)): + print(data.peaks, data.troughs) return # perform manual editing diff --git a/peakdet/physio.py b/peakdet/physio.py index c7652ec..da0c20f 100644 --- a/peakdet/physio.py +++ b/peakdet/physio.py @@ -4,6 +4,7 @@ """ import numpy as np +import pandas as pd @@ -37,11 +38,14 @@ class Physio(): Indices of peaks in `data` troughs : :obj:`numpy.ndarray` Indices of troughs in `data` + features : dict + Dictionary of features extracted from `data` + when running :func:`operations.neurokit_processing` suppdata : :obj:`numpy.ndarray` Secondary physiological waveform """ - def __init__(self, data, fs=None, history=None, metadata=None, suppdata=None): + def __init__(self, data, fs=None, history=None, metadata=None, features=None, suppdata=None): self._data = np.asarray(data).squeeze() if self.data.ndim > 1: raise ValueError('Provided data dimensionality {} > 1.' @@ -72,6 +76,23 @@ def __init__(self, data, fs=None, history=None, metadata=None, suppdata=None): self._metadata = dict(peaks=np.empty(0, dtype=int), troughs=np.empty(0, dtype=int), reject=np.empty(0, dtype=int)) + if features is not None: + if not isinstance(self._features, dict): + raise TypeError('Provided features {} must be dict-like.' + .format(self._features)) + for k in ['info', 'signal']: + # info must be a dict and signal must be a dataframe + if k == 'info': + if not isinstance(self._features.get(k), dict): + raise TypeError('Provided features must be dict-like' + 'with dict entries.') + elif k == 'signal': + if not isinstance(self._features.get(k), pd.DataFrame): + raise TypeError('Provided features must be dict-like' + 'with dataframe entries.') + self._features = dict(**features) + else: + self._features = dict(info=dict(), signal=pd.DataFrame()) self._suppdata = None if suppdata is None else np.asarray(suppdata).squeeze() def __array__(self): @@ -127,6 +148,11 @@ def _masked(self): def suppdata(self): """ Physiological data """ return self._suppdata + + @property + def features(self): + """ Features extracted from physiological data """ + return self._features def phys2neurokit(self, copy_data, copy_peaks, copy_troughs, module, neurokit_path=None): """ Physio to neurokit dataframe From 055c0fd32b56d7951ff08b45cab70ce47bfa707d Mon Sep 17 00:00:00 2001 From: "Marie-Eve Picard (she/her)" <77584086+me-pic@users.noreply.github.com> Date: Fri, 21 Jul 2023 09:33:56 -0400 Subject: [PATCH 02/14] update setup.cfg file --- setup.cfg | 68 +++++++++++++++++++++++++++++++++++++------------------ 1 file changed, 46 insertions(+), 22 deletions(-) diff --git a/setup.cfg b/setup.cfg index 09c05dd..c969287 100644 --- a/setup.cfg +++ b/setup.cfg @@ -20,7 +20,6 @@ long_description_content_type = text/markdown; charset=UTF-8 platforms = OS Independent provides = peakdet - [options] python_requires = >=3.6.1 install_requires = @@ -39,37 +38,39 @@ test_suite = pytest zip_safe = False packages = find: include_package_data = True - [options.extras_require] duecredit = duecredit nk = + neurokit pandas doc = - %(nk)s - pandas sphinx >=2.0 sphinx-argparse sphinx_rtd_theme + myst-parser style = - flake8 >=3.7 - flake8-docstrings >=1.5 + flake8>=4.0 + black<23.0.0 + isort<6.0.0 + pydocstyle + codespell test = - pytest >=5.3 - pytest-cov - %(style)s %(nk)s -all = %(doc)s - %(duecredit)s %(style)s + pytest >=5.3 + pytest-cov + coverage +devtools = + pre-commit +dev = + %(devtools)s + %(doc)s %(test)s - - [options.package_data] -* = +* = peakdet/tests/data/* - [flake8] exclude= *build/ @@ -79,20 +80,43 @@ ignore = E126, E203, E402, W503 max-line-length = 99 per-file-ignores = */__init__.py:F401 - -[coverage:run] -omit = peakdet/cli/* - +[isort] +profile = black +skip_gitignore = true +extend_skip = + .autorc + .coverage* + .readthedocs.yml + .zenodo.json + codecov.yml + setup.py + versioneer.py + peakdet/_version.py +skip_glob = + docs/* +[pydocstyle] +convention = numpy +match = + peakdet/*.py +match_dir = peakdet/[^tests]* +[codespell] +skip = versioneer.py,.git,build,./docs/_build +ignore-words-list = nd,commun +write-changes = +count = +quiet-level = 3 [tool:pytest] doctest_optionflags = NORMALIZE_WHITESPACE xfail_strict = true addopts = -rx -norecursedirs = peakdet/cli - +[coverage:run] +branch = True +omit = + peakdet/cli/* [versioneer] VCS = git style = pep440 versionfile_source = peakdet/_version.py versionfile_build = peakdet/_version.py tag_prefix = -parentdir_prefix = +parentdir_prefix = \ No newline at end of file From b7778e2269a92026bee0aaac79a29c686824ba21 Mon Sep 17 00:00:00 2001 From: sangfrois Date: Fri, 21 Jul 2023 10:47:34 -0400 Subject: [PATCH 03/14] first draft adding ecg gradient removal --- peakdet/operations.py | 344 +++++++++++++++++++++++++++++++++++------- 1 file changed, 289 insertions(+), 55 deletions(-) diff --git a/peakdet/operations.py b/peakdet/operations.py index 7075372..8d8f521 100644 --- a/peakdet/operations.py +++ b/peakdet/operations.py @@ -59,61 +59,6 @@ def filter_physio(data, cutoffs, method, *, order=3): return filtered -@utils.make_operation() -def neurokit_processing(data, modality, method): - """ - Applies an `order`-order digital `method` Butterworth filter to `data` - - Parameters - ---------- - data : Physio_like - Input physiological data to be filtered - modality : str - Modality of the data. - One of 'ECG', 'PPG', 'RSP', 'EDA', - method : str - The name of the processing procedure to apply to `data` - - Returns - ------- - clean : :class:`peakdet.Physio` - Filtered input `data` - """ - try: - import neurokit2 as nk - except ImportError: - raise ImportError('neurokit2 is required to use this function') - modality = modality.upper() - if modality not in ['ECG', 'PPG', 'RSP', 'EDA']: - raise ValueError('Provided modality {} is not permitted; must be in {}.' - .format(modality, ['ECG', 'PPG', 'RSP', 'EDA'])) - - data = utils.check_physio(data, ensure_fs=True) - if modality == 'ECG': - # NOTE: change for bottenhorn filtering - data = filter_physio(data, cutoffs=40, method='lowpass') - signal, info = nk.ecg_peaks(data, sampling_rate=data.fs, method=method) - info[f'{modality}_Peaks'] = info['ECG_R_Peaks'] - elif modality == 'PPG': - signal, info = nk.ppg_process(data, sampling_rate=data.fs, method=method) - elif modality == 'RSP': - signal, info = nk.rsp_process(data, sampling_rate=data.fs, method=method) - elif modality == 'EDA': - signal, info = nk.eda_process(data, sampling_rate=data.fs, method=method) - info[f'{modality}_Peaks'] = info['SCR_Peaks'] - data._metadata['peaks'] = np.array(info[f'{modality}_Peaks']) - try: - info[f'{modality}_Troughs'] - data._metadata['troughs'] = np.array(info[f'{modality}_Troughs']) - data._metadata['troughs'] = utils.check_troughs(data, data.peaks, data.troughs) - except KeyError: - pass - data._features['info'] = info - data._features['signal'] = signal - clean = utils.new_physio_like(data, signal[f'{modality}_Clean'].values) - # ADD IN OTHER INFO as features - return clean - @utils.make_operation() def interpolate_physio(data, target_fs, *, kind='cubic'): """ @@ -351,3 +296,292 @@ def plot_physio(data, *, ax=None): time[data.troughs], data[data.troughs], '.g') return ax + +@utils.make_operation() +def neurokit_processing(data, modality, method, **kwargs): + """ + Applies an `order`-order digital `method` Butterworth filter to `data` + + Parameters + ---------- + data : Physio_like + Input physiological data to be filtered + modality : str + Modality of the data. + One of 'ECG', 'PPG', 'RSP', 'EDA', + method : str + The processing pipeline to apply, choose from neurokit2 lists + + Returns + ------- + clean : :class:`peakdet.Physio` + Filtered input `data` + """ + try: + import neurokit2 as nk + except ImportError: + raise ImportError('neurokit2 is required to use this function') + modality = modality.upper() + if modality not in ['ECG', 'PPG', 'RSP', 'EDA']: + raise ValueError('Provided modality {} is not permitted; must be in {}.' + .format(modality, ['ECG', 'PPG', 'RSP', 'EDA'])) + + data = utils.check_physio(data, ensure_fs=True) + if modality == 'ECG': + data = fmri_ecg_clean(data, method=method_cleaning, **kwargs) + signal, info = nk.ecg_peaks(data, method=method_peaks) + info[f'{modality}_Peaks'] = info['ECG_R_Peaks'] + elif modality == 'PPG': + signal, info = nk.ppg_process(data, sampling_rate=data.fs, method=method) + elif modality == 'RSP': + signal, info = nk.rsp_process(data, sampling_rate=data.fs, method=method) + elif modality == 'EDA': + signal, info = nk.eda_process(data, sampling_rate=data.fs, method=method) + info[f'{modality}_Peaks'] = info['SCR_Peaks'] + data._metadata['peaks'] = np.array(info[f'{modality}_Peaks']) + try: + info[f'{modality}_Troughs'] + data._metadata['troughs'] = np.array(info[f'{modality}_Troughs']) + data._metadata['troughs'] = utils.check_troughs(data, data.peaks, data.troughs) + except KeyError: + pass + data._features['info'] = info + data._features['signal'] = signal + clean = utils.new_physio_like(data, signal[f'{modality}_Clean'].values) + # ADD IN OTHER INFO as features + return clean + +# ====================================================================== +# Electrocardiogram (ECG) +# ======================================================================= + +@utils.make_operation() +def fmri_ecg_clean(data, method="biopac", me=False, **kwargs): + """ + Clean an ECG signal. + + Prepare a raw ECG signal for R-peak detection with the specified method. + + Parameters + ---------- + data : Physio_like + The raw ECG signal to clean. + sampling_rate : float + The sampling frequency of `ecg_signal` (in Hz, i.e., samples/second). + Default to 10000. + method : str + The processing pipeline to apply between 'biopac' and 'bottenhorn'. + Default to 'biopac'. + me : bool + Specify if the MRI sequence used was the multi-echo (True) + or the single-echo (False). + Default to False. + downsampling : int + The desired sampling frequency (Hz). If None, the signal is not resample. + Default to None. + + Returns + ------- + ecg_clean : :class:`peakdet.Physio` + The cleaned ECG signal in object. + """ + # check if the TR is specified + if "tr" not in kwargs.keys(): + raise ValueError( + "The TR must be specified when using the multi-echo sequence." + ) + tr = kwargs["tr"] + # check if the MB factor is specified + if "mb" not in kwargs.keys(): + raise ValueError( + "The multiband factor must be specified when using the multi-echo sequence." + ) + mb = kwargs["mb"] + # check if the number of slices is specified + if "slices" not in kwargs.keys(): + raise ValueError( + "The number of slices must be specified when using the multi-echo sequence." + ) + slices = kwargs["slices"] + + if method in ["biopac"]: + data = _ecg_clean_biopac(data, tr=tr, slices=slices) + elif method in ["bottenhorn", "bottenhorn2022"]: + # Apply comb band pass filter with Bottenhorn correction + print("... Applying the corrected comb band pass filter.") + ecg_clean = _ecg_clean_bottenhorn(data, tr=tr, mb=mb, slices=slices) + else: + raise ValueError( + "The specified method is not supported. Please choose between 'biopac' and 'bottenhorn'." + ) + + return ecg_clean + + +# ============================================================================= +# ECG internal : biopac recommendations +# ============================================================================= +def _ecg_clean_biopac(data, tr=1.49, slices=60, Q=100): + """ + Single-band sequence gradient noise reduction. + + This function is a reverse-engineered appropriation of BIOPAC's application note 242. + It only applies to signals polluted by single-band (f)MRI sequence. + + Parameters + ---------- + data : Physio_like + The ECG signal in object. + sampling_rate: float + The sampling frequency of `ecg_signal` (in Hz, i.e., samples/second). + Default to 10000. + tr : int + The time Repetition of the MRI scanner. + Default to 1.49. + slices : + The number of volumes acquired in the tr period. + Default to 60. + Q : int + The filter quality factor. + Default to 100. + + Returns + ------- + ecg_clean : array + The cleaned ECG signal. + + References + ---------- + Biopac Systems, Inc. Application Notes: application note 242 + ECG Signal Processing During fMRI + https://www.biopac.com/wp-content/uploads/app242x.pdf + """ + # Setting scanner sequence parameters + nyquist = np.float64(sampling_rate / 2) + notches = {"slices": slices / tr, "tr": 1 / tr} + # remove baseline wandering + data = filter_physio( + data, + cutoffs=2, + method="highpass", + ) + # Filtering at specific harmonics + data = _comb_band_stop(notches, nyquist, data, Q) + # bandpass filtering + data_clean = filter_physio( + data, + cutoffs=[2,20], + method="bandpass", + order=5, + ) + + return ecg_clean + + +def _ecg_clean_bottenhorn( + ecg_signal, sampling_rate=10000.0, tr=1.49, mb=4, slices=60, Q=100 +): + """ + Multiband sequence gradient noise reduction. + + Parameters + ---------- + ecg_signal : array + The ECG channel. + sampling_rate : float + The sampling frequency of `ecg_signal` (in Hz, i.e., samples/second). + Default to 10000. + tr : float + The time Repetition of the MRI scanner. + Default to 1.49. + mb : 4 + The multiband acceleration factor. + Default to 4. + slices : int + The number of volumes acquired in the tr period. + Default to 60. + Q : int + The filter quality factor. + Default to 100. + + Returns + ------- + ecg_clean : array + The cleaned ECG signal. + + References + ---------- + Bottenhorn, K. L., Salo, T., Riedel, M. C., Sutherland, M. T., Robinson, J. L., + Musser, E. D., & Laird, A. R. (2021). Denoising physiological data collected + during multi-band, multi-echo EPI sequences. bioRxiv, 2021-04. + https://doi.org/10.1101/2021.04.01.437293 + + See also + -------- + https://neuropsychology.github.io/NeuroKit/_modules/neurokit2/signal/signal_filter.html#signal_filter + """ + # Setting scanner sequence parameters + nyquist = np.float64(sampling_rate / 2) + notches = {"slices": slices / mb / tr, "tr": 1 / tr} + + # Remove low frequency artefacts: respiration & baseline wander using high pass butterworth filter (order=2) + print("... Applying high pass filter.") + ecg_clean = filter_physio( + data, cutoffs=2, method="highpass" + ) + # Filtering at fundamental and specific harmonics per Biopac application note #265 + print("... Applying notch filter.") + ecg_clean = _comb_band_stop(notches, nyquist, ecg_clean, Q) + # Low pass filtering at 40Hz per Biopac application note #242 + print("... Applying low pass filtering.") + ecg_clean = filter_physio(data, cutoffs=40, method="lowpass") + # bandpass filtering + ecg_clean = filter_physio( + data, + cutoffs=[2, 20], + method="bandpass", + order=5, + ) + + return ecg_clean + +@utils.make_operation() +def _comb_band_stop(notches, nyquist, filtered, Q): + """ + A serie of notch filters aligned with the scanner gradient's harmonics. + + Parameters + ---------- + notches : dict + Frequencies to use in the IIR notch filter. + nyquist : float + The Nyquist frequency. + filtered : Physio_like + Data to be filtered. + Q : int + The filter quality factor. + + Returns + ------- + filtered : Physio_like + The filtered signal. + + References + ---------- + Biopac Systems, Inc. Application Notes: application note 242 + ECG Signal Processing During fMRI + https://www.biopac.com/wp-content/uploads/app242x.pdf + + See also + -------- + https://docs.scipy.org/doc/scipy/reference/generated/scipy.signal.filtfilt.html + https://docs.scipy.org/doc/scipy/reference/generated/scipy.signal.iirnotch.html + """ + # band stoping each frequency specified with notches dict + for notch in notches: + for i in np.arange(1, int(nyquist / notches[notch])): + f0 = notches[notch] * i + w0 = f0 / nyquist + b, a = signal.iirnotch(w0, Q) + filtered = utils.new_physio_like(data, signal.filtfilt(b, a, filtered)) + return filtered \ No newline at end of file From f56ec611bbfc77f82419c08102227b819460a57c Mon Sep 17 00:00:00 2001 From: sangfrois Date: Fri, 21 Jul 2023 12:30:15 -0400 Subject: [PATCH 04/14] linting --- peakdet/operations.py | 60 ++++++++++++++++--------------------------- peakdet/physio.py | 12 ++++++--- 2 files changed, 30 insertions(+), 42 deletions(-) diff --git a/peakdet/operations.py b/peakdet/operations.py index 8d8f521..05f682f 100644 --- a/peakdet/operations.py +++ b/peakdet/operations.py @@ -34,7 +34,7 @@ def filter_physio(data, cutoffs, method, *, order=3): Filtered input `data` """ - _valid_methods = ['lowpass', 'highpass', 'bandpass', 'bandstop' ] + _valid_methods = ['lowpass', 'highpass', 'bandpass', 'bandstop'] data = utils.check_physio(data, ensure_fs=True) if method not in _valid_methods: @@ -59,6 +59,7 @@ def filter_physio(data, cutoffs, method, *, order=3): return filtered + @utils.make_operation() def interpolate_physio(data, target_fs, *, kind='cubic'): """ @@ -207,29 +208,6 @@ def add_peaks(data, add): return data -@utils.make_operation() -def add_peaks(data, add): - """ - Add `newpeak` to add them in `data` - - Parameters - ---------- - data : Physio_like - add : int - - Returns - ------- - data : Physio_like - """ - - data = utils.check_physio(data, ensure_fs=False, copy=True) - idx = np.searchsorted(data._metadata['peaks'], add) - data._metadata['peaks'] = np.insert(data._metadata['peaks'], idx, add) - data._metadata['troughs'] = utils.check_troughs(data, data.peaks) - - return data - - def edit_physio(data): """ Opens interactive plot with `data` to permit manual editing of time series @@ -297,6 +275,7 @@ def plot_physio(data, *, ax=None): return ax + @utils.make_operation() def neurokit_processing(data, modality, method, **kwargs): """ @@ -307,7 +286,7 @@ def neurokit_processing(data, modality, method, **kwargs): data : Physio_like Input physiological data to be filtered modality : str - Modality of the data. + Modality of the data. One of 'ECG', 'PPG', 'RSP', 'EDA', method : str The processing pipeline to apply, choose from neurokit2 lists @@ -328,8 +307,8 @@ def neurokit_processing(data, modality, method, **kwargs): data = utils.check_physio(data, ensure_fs=True) if modality == 'ECG': - data = fmri_ecg_clean(data, method=method_cleaning, **kwargs) - signal, info = nk.ecg_peaks(data, method=method_peaks) + data = fmri_ecg_clean(data, method=method, **kwargs) + signal, info = nk.ecg_peaks(data, method=method) info[f'{modality}_Peaks'] = info['ECG_R_Peaks'] elif modality == 'PPG': signal, info = nk.ppg_process(data, sampling_rate=data.fs, method=method) @@ -355,6 +334,7 @@ def neurokit_processing(data, modality, method, **kwargs): # Electrocardiogram (ECG) # ======================================================================= + @utils.make_operation() def fmri_ecg_clean(data, method="biopac", me=False, **kwargs): """ @@ -412,7 +392,8 @@ def fmri_ecg_clean(data, method="biopac", me=False, **kwargs): ecg_clean = _ecg_clean_bottenhorn(data, tr=tr, mb=mb, slices=slices) else: raise ValueError( - "The specified method is not supported. Please choose between 'biopac' and 'bottenhorn'." + "The specified method is not supported. " + "Please choose between 'biopac' and 'bottenhorn'." ) return ecg_clean @@ -457,7 +438,7 @@ def _ecg_clean_biopac(data, tr=1.49, slices=60, Q=100): https://www.biopac.com/wp-content/uploads/app242x.pdf """ # Setting scanner sequence parameters - nyquist = np.float64(sampling_rate / 2) + nyquist = np.float64(data.fs / 2) notches = {"slices": slices / tr, "tr": 1 / tr} # remove baseline wandering data = filter_physio( @@ -470,16 +451,16 @@ def _ecg_clean_biopac(data, tr=1.49, slices=60, Q=100): # bandpass filtering data_clean = filter_physio( data, - cutoffs=[2,20], + cutoffs=[2, 20], method="bandpass", order=5, ) - return ecg_clean + return data_clean def _ecg_clean_bottenhorn( - ecg_signal, sampling_rate=10000.0, tr=1.49, mb=4, slices=60, Q=100 + data, tr=1.49, mb=4, slices=60, Q=100 ): """ Multiband sequence gradient noise reduction. @@ -521,10 +502,11 @@ def _ecg_clean_bottenhorn( https://neuropsychology.github.io/NeuroKit/_modules/neurokit2/signal/signal_filter.html#signal_filter """ # Setting scanner sequence parameters - nyquist = np.float64(sampling_rate / 2) + nyquist = np.float64(data.fs / 2) notches = {"slices": slices / mb / tr, "tr": 1 / tr} - # Remove low frequency artefacts: respiration & baseline wander using high pass butterworth filter (order=2) + # Remove low frequency artefacts: respiration & baseline wander using + # high pass butterworth filter (order=2) print("... Applying high pass filter.") ecg_clean = filter_physio( data, cutoffs=2, method="highpass" @@ -545,8 +527,9 @@ def _ecg_clean_bottenhorn( return ecg_clean + @utils.make_operation() -def _comb_band_stop(notches, nyquist, filtered, Q): +def _comb_band_stop(notches, nyquist, data, Q): """ A serie of notch filters aligned with the scanner gradient's harmonics. @@ -556,7 +539,7 @@ def _comb_band_stop(notches, nyquist, filtered, Q): Frequencies to use in the IIR notch filter. nyquist : float The Nyquist frequency. - filtered : Physio_like + data : Physio_like Data to be filtered. Q : int The filter quality factor. @@ -583,5 +566,6 @@ def _comb_band_stop(notches, nyquist, filtered, Q): f0 = notches[notch] * i w0 = f0 / nyquist b, a = signal.iirnotch(w0, Q) - filtered = utils.new_physio_like(data, signal.filtfilt(b, a, filtered)) - return filtered \ No newline at end of file + filtered = utils.new_physio_like(data, signal.filtfilt(b, a, data)) + + return filtered diff --git a/peakdet/physio.py b/peakdet/physio.py index da0c20f..be45f8f 100644 --- a/peakdet/physio.py +++ b/peakdet/physio.py @@ -2,12 +2,10 @@ """ Helper class for holding physiological data and associated metadata inforamtion """ - import numpy as np import pandas as pd - class Physio(): """ Class to hold physiological data and relevant information @@ -22,6 +20,9 @@ class Physio(): Functions performed on `data`. Default: None metadata : dict, optional Metadata associated with `data`. Default: None + features : dict, optional + Features extracted from `data`. Default: None + Returned by :func:`operations.neurokit_processing` suppdata : array_like, optional Support data array. Default: None @@ -148,7 +149,7 @@ def _masked(self): def suppdata(self): """ Physiological data """ return self._suppdata - + @property def features(self): """ Features extracted from physiological data """ @@ -175,7 +176,10 @@ def phys2neurokit(self, copy_data, copy_peaks, copy_troughs, module, neurokit_pa if neurokit_path is not None: df = pd.read_csv(neurokit_path, sep='\t') else: - df = pd.DataFrame(0, index=np.arange(len(self.data)), columns=['%s_Raw' % module, '%s_Peaks' % module, '%s_Troughs' % module]) + df = pd.DataFrame(0, index=np.arange(len(self.data)), + columns=['%s_Raw' % module, + '%s_Peaks' % module, + '%s_Troughs' % module]) if copy_data: df.loc[:, df.columns.str.endswith('Raw')] = self.data From 382fb687e869ecedd9798741c9a500b9e03d9b64 Mon Sep 17 00:00:00 2001 From: sangfrois Date: Fri, 21 Jul 2023 14:13:20 -0400 Subject: [PATCH 05/14] [ENH] added neurokit2 features as attributes --- peakdet/physio.py | 6 +++--- peakdet/utils.py | 7 +++++-- 2 files changed, 8 insertions(+), 5 deletions(-) diff --git a/peakdet/physio.py b/peakdet/physio.py index be45f8f..aa79e13 100644 --- a/peakdet/physio.py +++ b/peakdet/physio.py @@ -78,17 +78,17 @@ def __init__(self, data, fs=None, history=None, metadata=None, features=None, su troughs=np.empty(0, dtype=int), reject=np.empty(0, dtype=int)) if features is not None: - if not isinstance(self._features, dict): + if not isinstance(features, dict): raise TypeError('Provided features {} must be dict-like.' .format(self._features)) for k in ['info', 'signal']: # info must be a dict and signal must be a dataframe if k == 'info': - if not isinstance(self._features.get(k), dict): + if not isinstance(features.get(k), dict): raise TypeError('Provided features must be dict-like' 'with dict entries.') elif k == 'signal': - if not isinstance(self._features.get(k), pd.DataFrame): + if not isinstance(features.get(k), pd.DataFrame): raise TypeError('Provided features must be dict-like' 'with dataframe entries.') self._features = dict(**features) diff --git a/peakdet/utils.py b/peakdet/utils.py index 7858f0b..02f4371 100644 --- a/peakdet/utils.py +++ b/peakdet/utils.py @@ -147,7 +147,7 @@ def check_physio(data, ensure_fs=True, copy=False): def new_physio_like(ref_physio, data, *, fs=None, suppdata=None, dtype=None, - copy_history=True, copy_metadata=True, copy_suppdata=True): + copy_history=True, copy_metadata=True, copy_features=True, copy_suppdata=True): """ Makes `data` into physio object like `ref_data` @@ -168,6 +168,8 @@ def new_physio_like(ref_physio, data, *, fs=None, suppdata=None, dtype=None, Copy history from `ref_physio` to new physio object. Default: True copy_metadata : bool, optional Copy metadata from `ref_physio` to new physio object. Default: True + copy_features : bool, optional + Copy features from `ref_physio` to new physio object. Default: True copy_suppdata : bool, optional Copy suppdata from `ref_physio` to new physio object. Default: True @@ -183,6 +185,7 @@ def new_physio_like(ref_physio, data, *, fs=None, suppdata=None, dtype=None, dtype = ref_physio.data.dtype history = list(ref_physio.history) if copy_history else [] metadata = dict(**ref_physio._metadata) if copy_metadata else None + features = dict(**ref_physio._features) if copy_features else None if suppdata is None: suppdata = ref_physio._suppdata if copy_suppdata else None @@ -190,7 +193,7 @@ def new_physio_like(ref_physio, data, *, fs=None, suppdata=None, dtype=None, # make new class out = ref_physio.__class__(np.array(data, dtype=dtype), fs=fs, history=history, metadata=metadata, - suppdata=suppdata) + suppdata=suppdata, features=features) return out From 78c52247bd11fcdce2222ae560ea8b28f1d1f2dd Mon Sep 17 00:00:00 2001 From: sangfrois Date: Fri, 21 Jul 2023 14:22:23 -0400 Subject: [PATCH 06/14] fixing ECG workflow --- peakdet/operations.py | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/peakdet/operations.py b/peakdet/operations.py index 05f682f..ed867dc 100644 --- a/peakdet/operations.py +++ b/peakdet/operations.py @@ -300,15 +300,21 @@ def neurokit_processing(data, modality, method, **kwargs): import neurokit2 as nk except ImportError: raise ImportError('neurokit2 is required to use this function') + + # sanity check modality = modality.upper() if modality not in ['ECG', 'PPG', 'RSP', 'EDA']: raise ValueError('Provided modality {} is not permitted; must be in {}.' .format(modality, ['ECG', 'PPG', 'RSP', 'EDA'])) - + data = utils.check_physio(data, ensure_fs=True) + + # apply neurokit2 processing to a specific modality if modality == 'ECG': - data = fmri_ecg_clean(data, method=method, **kwargs) - signal, info = nk.ecg_peaks(data, method=method) + method_cleaning = kwargs.get('method_cleaning') + method_peaks = kwargs.get('method_peaks') + data = fmri_ecg_clean(data, method=method_cleaning, **kwargs) + signal, info = nk.ecg_peaks(data, data.fs, method=method_peaks, correct_artifacts=True) info[f'{modality}_Peaks'] = info['ECG_R_Peaks'] elif modality == 'PPG': signal, info = nk.ppg_process(data, sampling_rate=data.fs, method=method) From d56f62eb4263e2339507ba9df894377cdc1d28c3 Mon Sep 17 00:00:00 2001 From: sangfrois Date: Fri, 21 Jul 2023 14:53:14 -0400 Subject: [PATCH 07/14] YAY! ecg cleaning works --- peakdet/operations.py | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/peakdet/operations.py b/peakdet/operations.py index ed867dc..7568326 100644 --- a/peakdet/operations.py +++ b/peakdet/operations.py @@ -277,7 +277,7 @@ def plot_physio(data, *, ax=None): @utils.make_operation() -def neurokit_processing(data, modality, method, **kwargs): +def neurokit_processing(data, modality, method=None, **kwargs): """ Applies an `order`-order digital `method` Butterworth filter to `data` @@ -306,13 +306,14 @@ def neurokit_processing(data, modality, method, **kwargs): if modality not in ['ECG', 'PPG', 'RSP', 'EDA']: raise ValueError('Provided modality {} is not permitted; must be in {}.' .format(modality, ['ECG', 'PPG', 'RSP', 'EDA'])) - + data = utils.check_physio(data, ensure_fs=True) # apply neurokit2 processing to a specific modality if modality == 'ECG': method_cleaning = kwargs.get('method_cleaning') method_peaks = kwargs.get('method_peaks') + print(method_peaks) data = fmri_ecg_clean(data, method=method_cleaning, **kwargs) signal, info = nk.ecg_peaks(data, data.fs, method=method_peaks, correct_artifacts=True) info[f'{modality}_Peaks'] = info['ECG_R_Peaks'] @@ -332,8 +333,11 @@ def neurokit_processing(data, modality, method, **kwargs): pass data._features['info'] = info data._features['signal'] = signal - clean = utils.new_physio_like(data, signal[f'{modality}_Clean'].values) - # ADD IN OTHER INFO as features + try: + clean = utils.new_physio_like(data, signal[f'{modality}_Clean'].values) + except KeyError: + # data already has the clean signal + clean = data return clean # ====================================================================== From c43ffc54287235c6dd0a939e92957b080afac8f6 Mon Sep 17 00:00:00 2001 From: "Marie-Eve Picard (she/her)" <77584086+me-pic@users.noreply.github.com> Date: Sat, 22 Jul 2023 07:47:45 -0400 Subject: [PATCH 08/14] update workflow --- peakdet/blocks.py | 39 ++++++++ peakdet/workflow.py | 234 ++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 273 insertions(+) create mode 100644 peakdet/blocks.py create mode 100644 peakdet/workflow.py diff --git a/peakdet/blocks.py b/peakdet/blocks.py new file mode 100644 index 0000000..61ad5b8 --- /dev/null +++ b/peakdet/blocks.py @@ -0,0 +1,39 @@ +from peakdet import load_physio, save_physio +from peakdet.operations import edit_physio, interpolate_physio, filter_physio, peakfind_physio + +FUNCTION_MAPPINGS = { + "interpolate_physio": interpolate_physio, + "filter_physio": filter_physio, + "peakfind_physio": peakfind_physio +} + +def process_signals(data, steps): + """ + Parameters + ---------- + data : :class:`peakdet.Physio` + steps : list + + Return + ------ + data : :class:`peakdet.Physio` (w/ features from peakfind_physio) + """ + for step in steps: + func = list(step.keys())[0] + data = FUNCTION_MAPPINGS[func](data, **step[func]) + return data + + +def manual_peaks(data, fname): + """ + data : str or array_like or Physio_like + Input physiological data. If array_like, should be one-dimensional + fname : str + Path to output file; .phys will be appended if necessary + """ + # Load signals + phys = load_physio(data, allow_pickle=True) + # Edit peaks + phys = edit_physio(data) + # Save edits + save_physio(fname, phys) \ No newline at end of file diff --git a/peakdet/workflow.py b/peakdet/workflow.py new file mode 100644 index 0000000..db070b8 --- /dev/null +++ b/peakdet/workflow.py @@ -0,0 +1,234 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +""" +`peakdet` main workflow and related functions. +""" +import os +import re +import sys +import json +import logging +import datetime +import pandas as pd +from pathlib import Path +from peakdet import _version, Physio, save_physio +from peakdet.cli.run import _get_parser +from peakdet.blocks import process_signals, manual_peaks +from peakdet.operations import filter_physio, peakfind_physio, interpolate_physio + +TRIGGER_NAMES = ["trig", "trigger", "ttl"] + +FUNCTION_MAPPINGS = { + "interpolate_physio": interpolate_physio, + "filter_physio": filter_physio, + "peakfind_physio": peakfind_physio +} + +LGR = logging.getLogger(__name__) +LGR.setLevel(logging.INFO) + +def find_chtrig(data): + joint_match = "§".join(TRIGGER_NAMES) + indexes = [] + for n, case in enumerate(data.columns): + name = re.split(r"(\W+|\d|_|\s)", case) + name = list(filter(None, name)) + if re.search("|".join(name), joint_match, re.IGNORECASE): + indexes = indexes + [n] + + if indexes: + if len(indexes) > 1: + raise Exception( + "More than one possible trigger channel was automatically found. " + "Please run phys2bids specifying the -chtrig argument." + ) + else: + return int(indexes[0]) + else: + return None + + + +def save_bash_call(fname, outdir, outname): + + if outdir is None: + if outname is None: + if len(fname) == 1: + outdir = os.path.dirname(fname[0]) + else: + outdir = os.path.commonpath(fname) + else: + outdir = os.path.split(outname)[0] + + if outdir == "" or outdir == "/": + outdir = "." + outdir = os.path.join(outdir, "peakdet") + + outdir = os.path.abspath(outdir) + log_path = os.path.join(outdir, "logs") + os.makedirs(log_path, exist_ok=True) + arg_str = " ".join(sys.argv[1:]) + call_str = f"peakdet {arg_str}" + outdir = os.path.abspath(outdir) + log_path = os.path.join(outdir, "logs") + os.makedirs(log_path, exist_ok=True) + isotime = datetime.datetime.now().strftime("%Y-%m-%dT%H%M%S") + f = open(os.path.join(log_path, f"peakdet_call_{isotime}.sh"), "a") + f.write(f"#!bin/bash \n{call_str}") + f.close() + + +def peakdet(fname, + config_file, + outname=None, + outdir=None, + phys_idx=None, + chtrig=None, + manual_detector=True, + lgr_degree="info"): + """ + fname : str + Path to the physiological data file ('tsv.gz') + config_file : str + Path to config file specifying the processing steps for each modality. For config + file examples, check `peakdet/configs/` + outname : str, os.PathLike, or None, optional + Path to the output file - or just its full name. If an extension is *not* declared, + the program will automatically append .phys to the specified name. It is *not* necessary + to declare both this and `outdir` - the full path can be specified here. + outdir : str, os.PathLike, or None, optional + Path to the output folder. If it doesn't exist, it will be created. + If both `outdir` and `outname` are declared, `outdir` overrides the path + specified in `outname` (but not the filename!) + phys_idx : int or list of int + Index(es) of the column(s) in the fname containing the timeserie to clean and process. + If None, the workflow will go through all the columns of the fname file in `source`. + If you run the workflow on Phys2Bids outputs, please keep in mind the channel 0 is the time. + chtrig : + The column number of the trigger channel. Default is None. If chtrig is left as None peakdet will + perform an automatic trigger channel search by channel names. + manual_detector : bool + Flag for manual peaks check. Default to True. + lgr_degree : 'debug', 'info', or 'quiet', optional + The degree of verbosity of the logger. Default is 'info'. + """ + # Prepare folders + if outdir is None: + if outname is None: + outdir = os.path.commonpath(fname) + else: + outdir = os.path.split(outname)[0] + + if outdir == "" or outdir == "/": + outdir = "." + outdir = os.path.join(outdir, "peakdet") + + outdir = os.path.abspath(outdir) + log_path = os.path.join(outdir, "logs") + os.makedirs(log_path, exist_ok=True) + + # Create logfile name + basename = "peakdet_" + extension = "tsv" + isotime = datetime.datetime.now().strftime("%Y-%m-%dT%H%M%S") + logname = os.path.join(log_path, f"{basename}{isotime}.{extension}") + + # Set logging format + log_formatter = logging.Formatter( + "%(asctime)s\t%(name)-12s\t%(levelname)-8s\t%(message)s", + datefmt="%Y-%m-%dT%H:%M:%S", + ) + + # Set up logging file and open it for writing + log_handler = logging.FileHandler(logname) + log_handler.setFormatter(log_formatter) + sh = logging.StreamHandler() + + if lgr_degree == "quiet": + logging.basicConfig( + level=logging.WARNING, + handlers=[log_handler, sh], + format="%(levelname)-10s %(message)s", + ) + elif lgr_degree == "debug": + logging.basicConfig( + level=logging.DEBUG, + handlers=[log_handler, sh], + format="%(levelname)-10s %(message)s", + ) + else: + logging.basicConfig( + level=logging.INFO, + handlers=[log_handler, sh], + format="%(levelname)-10s %(message)s", + ) + + version_number = _version.get_versions()["version"] + LGR.info(f"Currently running peakdet version {version_number}") + + # Checks arguments (e.g. config file structure) + # TODO + + # Load config file + with open(config_file) as c: + config = json.load(c) + c.close() + + # Load data + fname = Path(fname) + with open(str(fname).rstrip(''.join(fname.suffixes)) + ".json") as p: + info = json.load(p) + p.close() + data = pd.read_csv(os.path.join(fname), names=info['Columns'], sep="\t") + + # If phys_idx not None, keep only the specified columns + if phys_idx is not None: + data = pd.DataFrame(data.iloc[:,phys_idx]) + else : + # Remove time and trigger columns + if chtrig !=0 : + chtime = data.columns.get_loc("time") + data.drop(data.columns[chtime, chtrig], axis=1, inplace=True) + else: + #find automatically trigger channel index + LGR.info("Running automatic trigger detection.") + chtrig = find_chtrig(data) + if chtrig is not None: + data.drop(data.columns[chtime, chtrig], axis=1, inplace=True) + else: + LGR.warning("No trigger channel specified nor found, the workflow will be run on all columns of the dataframe") + data.drop(data.columns[chtime], axis=1, inplace=True) + + # Looping through timeseries to clean and process each modality one at a time + for idx, col in enumerate(data): + # Get the sampling frequency + if isinstance(info['SamplingFrequency'], (list)): + fs = info['SamplingFrequency'][idx] + else: + fs = info['SamplingFrequency'] + # Create Physio obj + physio_obj = Physio(data[col], fs=fs) + # Call process_signals specifying the processing steps for the given modality + physio_obj = process_signals(physio_obj, config[col]) + + # Call manual_peaks function if manual_detector flag set to True + if manual_detector: + # Perform manual peaks detection and saving output + manual_peaks(physio_obj, os.path.join(outdir, fname+f"_{col}.phys")) + else: + # Save outputs + save_physio(os.path.join(outdir, fname+f"_{col}.phys", physio_obj)) + + LGR.info(f"peakdet finished! Check results in {outdir}.") + + +def _main(argv=None): + options = _get_parser().parse_args(argv) + + save_bash_call(options.fname, options.outdir, options.outname) + + peakdet(**vars(options)) + + +if __name__ == "__main__": + _main(sys.argv[1:]) \ No newline at end of file From f71331101f4bdd74fc8412da47792340b37956d4 Mon Sep 17 00:00:00 2001 From: "Marie-Eve Picard (she/her)" <77584086+me-pic@users.noreply.github.com> Date: Thu, 27 Jul 2023 14:30:56 -0400 Subject: [PATCH 09/14] automatic trigger channel detection --- peakdet/utils.py | 45 +++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 45 insertions(+) diff --git a/peakdet/utils.py b/peakdet/utils.py index 7858f0b..60da341 100644 --- a/peakdet/utils.py +++ b/peakdet/utils.py @@ -7,9 +7,19 @@ from functools import wraps import inspect import numpy as np +import re from peakdet import physio +from peakdet.operations import filter_physio, peakfind_physio, interpolate_physio +TRIGGER_NAMES = ["trig", "trigger", "ttl"] + +FUNCTION_MAPPINGS = { + "interpolate_physio": interpolate_physio, + "filter_physio": filter_physio, + "peakfind_physio": peakfind_physio +} + def make_operation(*, exclude=None): """ Wrapper to make functions into Physio operations @@ -225,3 +235,38 @@ def check_troughs(data, peaks, troughs=None): all_troughs[f] = idx return all_troughs + + +def find_chtrig(data): + """ + Parameters + ---------- + data : DataFrame + DataFrame containing the timeseries + Returns + ------- + Trigger channel index + + References + ---------- + Daniel Alcalá, Apoorva Ayyagari, Katie Bottenhorn, Molly Bright, César Caballero-Gaudes, Inés Chavarría, Vicente Ferrer, Soichi Hayashi, Vittorio Iacovella, François Lespinasse, Ross Markello, Stefano Moia, Robert Oostenveld, Taylor Salo, Rachael Stickland, Eneko Uruñuela, Merel van der Thiel, & Kristina Zvolanek. (2023). physiopy/phys2bids: BIDS formatting of physiological recordings (2.10.0). Zenodo. https://doi.org/10.5281/zenodo.7896344 + """ + joint_match = "§".join(TRIGGER_NAMES) + indexes = [] + for n, case in enumerate(data.columns): + name = re.split(r"(\W+|\d|_|\s)", case) + name = list(filter(None, name)) + if re.search("|".join(name), joint_match, re.IGNORECASE): + indexes = indexes + [n] + + if indexes: + if len(indexes) > 1: + raise Exception( + "More than one possible trigger channel was automatically found. " + "Please run phys2bids specifying the -chtrig argument." + ) + else: + return int(indexes[0]) + else: + return None + From 3ca88c803eda14fd625105a77531c50bd31e46b1 Mon Sep 17 00:00:00 2001 From: "Marie-Eve Picard (she/her)" <77584086+me-pic@users.noreply.github.com> Date: Thu, 27 Jul 2023 14:31:49 -0400 Subject: [PATCH 10/14] reorganize info --- peakdet/workflow.py | 31 ++----------------------------- 1 file changed, 2 insertions(+), 29 deletions(-) diff --git a/peakdet/workflow.py b/peakdet/workflow.py index db070b8..d3b0c4c 100644 --- a/peakdet/workflow.py +++ b/peakdet/workflow.py @@ -12,42 +12,15 @@ import pandas as pd from pathlib import Path from peakdet import _version, Physio, save_physio +from peakdet import utils from peakdet.cli.run import _get_parser +from peakdet.utils import find_chtrig from peakdet.blocks import process_signals, manual_peaks -from peakdet.operations import filter_physio, peakfind_physio, interpolate_physio -TRIGGER_NAMES = ["trig", "trigger", "ttl"] - -FUNCTION_MAPPINGS = { - "interpolate_physio": interpolate_physio, - "filter_physio": filter_physio, - "peakfind_physio": peakfind_physio -} LGR = logging.getLogger(__name__) LGR.setLevel(logging.INFO) -def find_chtrig(data): - joint_match = "§".join(TRIGGER_NAMES) - indexes = [] - for n, case in enumerate(data.columns): - name = re.split(r"(\W+|\d|_|\s)", case) - name = list(filter(None, name)) - if re.search("|".join(name), joint_match, re.IGNORECASE): - indexes = indexes + [n] - - if indexes: - if len(indexes) > 1: - raise Exception( - "More than one possible trigger channel was automatically found. " - "Please run phys2bids specifying the -chtrig argument." - ) - else: - return int(indexes[0]) - else: - return None - - def save_bash_call(fname, outdir, outname): From 6c3a152533e26271932d204a65d33f5a816d3dc0 Mon Sep 17 00:00:00 2001 From: "Marie-Eve Picard (she/her)" <77584086+me-pic@users.noreply.github.com> Date: Sun, 3 Dec 2023 21:51:59 -0500 Subject: [PATCH 11/14] Add config examples --- peakdet/configs/config_ppg_example.json | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) create mode 100644 peakdet/configs/config_ppg_example.json diff --git a/peakdet/configs/config_ppg_example.json b/peakdet/configs/config_ppg_example.json new file mode 100644 index 0000000..5cabcc0 --- /dev/null +++ b/peakdet/configs/config_ppg_example.json @@ -0,0 +1,16 @@ +{ + "PPG": [ + { + "filter_physio": { + "cutoffs": 1.0, + "method": "lowpass" + } + }, + { + "filter_physio": { + "cutoffs": 40, + "method": "highpass" + } + } + ] +} \ No newline at end of file From d4c1447969690e3dd3b373b1bbf19871bdbcd870 Mon Sep 17 00:00:00 2001 From: "Marie-Eve Picard (she/her)" <77584086+me-pic@users.noreply.github.com> Date: Sun, 3 Dec 2023 21:55:23 -0500 Subject: [PATCH 12/14] change script --- peakdet/cli/run.py | 19 ++++++++----------- 1 file changed, 8 insertions(+), 11 deletions(-) diff --git a/peakdet/cli/run.py b/peakdet/cli/run.py index 72fc06b..c4900d5 100644 --- a/peakdet/cli/run.py +++ b/peakdet/cli/run.py @@ -8,8 +8,6 @@ from gooey import Gooey, GooeyParser import peakdet -TARGET = 'pythonw' if sys.platform == 'darwin' else 'python' -TARGET += ' -u ' + os.path.abspath(__file__) LOADERS = dict( rtpeaks=peakdet.load_rtpeaks, @@ -45,9 +43,8 @@ @Gooey(program_name='Physio pipeline', program_description='Physiological processing pipeline', - default_size=(800, 600), - target=TARGET) -def get_parser(): + default_size=(800, 600)) +def _get_parser(): """ Parser for GUI and command-line arguments """ parser = GooeyParser() parser.add_argument('file_template', metavar='Filename template', @@ -224,10 +221,10 @@ def workflow(*, file_template, modality, fs, source='MRI', channel=1, dest.write(','.join([fname] + outputs) + '\n') -def main(): - opts = get_parser().parse_args() - workflow(**vars(opts)) - - if __name__ == '__main__': - main() + raise RuntimeError( + "peakdet/cli/run.py should not be run directly;\n" + "Please `pip install` peakdet and use the " + "`peakdet` command" + ) + From f42400edf90d5cb558036ca1e3375e7035c06db2 Mon Sep 17 00:00:00 2001 From: "Marie-Eve Picard (she/her)" <77584086+me-pic@users.noreply.github.com> Date: Tue, 5 Dec 2023 09:34:13 -0500 Subject: [PATCH 13/14] Update style and exclusions --- setup.cfg | 18 ++++++++++++++++-- 1 file changed, 16 insertions(+), 2 deletions(-) diff --git a/setup.cfg b/setup.cfg index 7e37d42..bbe5320 100644 --- a/setup.cfg +++ b/setup.cfg @@ -45,8 +45,11 @@ doc = sphinx_rtd_theme myst-parser style = - flake8 >=3.7 - flake8-docstrings >=1.5 + flake8 >=4.0 + black <23.0.0 + isort <6.0.0 + pydocstyle + codespell enhgui = Gooey wxpython @@ -71,6 +74,10 @@ exclude= *build/ heuristics tests + _version.py + ./peakdet/tests/* + ./peakdet/cli/__init__.py + versioneer.py ignore = E126, E203, E402, W503 max-line-length = 99 per-file-ignores = @@ -108,6 +115,13 @@ addopts = -rx branch = True omit = peakdet/cli/* + peakdet/tests/* + docs/* + setup.py + versioneer.py + __init__.py + */__init__.py + */*/__init__.py [versioneer] VCS = git style = pep440 From 74cf3e2992a67067cebf84c65954041241bca5bf Mon Sep 17 00:00:00 2001 From: "Marie-Eve Picard (she/her)" <77584086+me-pic@users.noreply.github.com> Date: Tue, 5 Dec 2023 16:43:38 -0500 Subject: [PATCH 14/14] Fix import bugs --- peakdet/utils.py | 7 ------- setup.cfg | 2 +- 2 files changed, 1 insertion(+), 8 deletions(-) diff --git a/peakdet/utils.py b/peakdet/utils.py index f3cc996..ef2306a 100644 --- a/peakdet/utils.py +++ b/peakdet/utils.py @@ -9,17 +9,10 @@ import numpy as np import re from peakdet import physio -from peakdet.operations import filter_physio, peakfind_physio, interpolate_physio TRIGGER_NAMES = ["trig", "trigger", "ttl"] -FUNCTION_MAPPINGS = { - "interpolate_physio": interpolate_physio, - "filter_physio": filter_physio, - "peakfind_physio": peakfind_physio -} - def make_operation(*, exclude=None): """ Wrapper to make functions into Physio operations diff --git a/setup.cfg b/setup.cfg index bbe5320..95b1e3b 100644 --- a/setup.cfg +++ b/setup.cfg @@ -37,7 +37,7 @@ include_package_data = True duecredit = duecredit nk = - neurokit + neurokit2 pandas doc = sphinx >=2.0