Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Allow recompute via _make_file func #1093

Draft
wants to merge 28 commits into
base: master
Choose a base branch
from
Draft
Changes from 1 commit
Commits
Show all changes
28 commits
Select commit Hold shift + click to select a range
9192573
WIP: remove AnalysisNwbfileLog
CBroz1 Sep 6, 2024
27f0004
WIP: recompute
CBroz1 Sep 6, 2024
743502d
WIP: recompute 2
CBroz1 Sep 11, 2024
9d23949
WIP: recompute 3
CBroz1 Sep 12, 2024
39f07bf
WIP: recompute 4
CBroz1 Sep 12, 2024
1b38818
WIP: recompute 5, electrodes object
CBroz1 Sep 18, 2024
282d553
WIP: recompute 6, add file hash
CBroz1 Sep 19, 2024
94168de
WIP: recompute 7
CBroz1 Sep 20, 2024
b553f77
Merge branch 'master' of https://github.com/LorenFrankLab/spyglass in…
CBroz1 Sep 20, 2024
a594786
✅ : recompute
CBroz1 Sep 20, 2024
df1800e
w
CBroz1 Oct 21, 2024
6d0df07
Handle groups and links
CBroz1 Oct 21, 2024
1587997
Remove debug
CBroz1 Oct 22, 2024
1ed831e
Add directory hasher
CBroz1 Nov 12, 2024
7547fe2
Merge branch 'rcp' of https://github.com/CBroz1/spyglass into rcp
CBroz1 Nov 13, 2024
23799f8
Merge branch 'master' of https://github.com/LorenFrankLab/spyglass in…
CBroz1 Nov 13, 2024
d0011bf
Update directory hasher
CBroz1 Nov 13, 2024
ad7c74a
WIP: update hasher
CBroz1 Jan 8, 2025
558f38b
WIP: fetch upstream, resolve conflicts
CBroz1 Jan 8, 2025
54a3ca1
WIP: error specificity
CBroz1 Jan 9, 2025
1e41698
Add tables for recompute processing
CBroz1 Feb 4, 2025
ae52aed
WIP: incorporate feedback
CBroz1 Feb 21, 2025
0795fa6
WIP: fetch upstream
CBroz1 Mar 3, 2025
2e89070
WIP: enforce environment restriction
CBroz1 Mar 3, 2025
8c172aa
WIP: fetch upstream
CBroz1 Mar 3, 2025
bfe49d1
WIP: typo
CBroz1 Mar 3, 2025
72f8a25
WIP: add tests
CBroz1 Mar 4, 2025
9c27d87
WIP: start add V0 hasher
CBroz1 Mar 5, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Next Next commit
WIP: remove AnalysisNwbfileLog
CBroz1 committed Sep 6, 2024
commit 919257388f05364ef6334331a02bdc7cf6af96fb
7 changes: 7 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -6,10 +6,17 @@

<!-- Running draft to be removed immediately prior to release. -->

```python
import datajoint as dj

dj.FreeTable(dj.conn(), "common_nwbfile.analysis_nwbfile_log").drop()
```

### Infrastructure

- Disable populate transaction protection for long-populating tables #1066
- Add docstrings to all public methods #1076
- Remove `AnalysisNwbfileLog` #10XX

### Pipelines

8 changes: 2 additions & 6 deletions src/spyglass/common/common_ephys.py
Original file line number Diff line number Diff line change
@@ -463,7 +463,7 @@ def make(self, key):
"""
# get the NWB object with the data; FIX: change to fetch with
# additional infrastructure
lfp_file_name = AnalysisNwbfile().create(key["nwb_file_name"]) # logged
lfp_file_name = AnalysisNwbfile().create(key["nwb_file_name"])

rawdata = Raw().nwb_object(key)
sampling_rate, interval_list_name = (Raw() & key).fetch1(
@@ -553,7 +553,6 @@ def make(self, key):
},
replace=True,
)
AnalysisNwbfile().log(key, table=self.full_table_name)
self.insert1(key)

def nwb_object(self, key):
@@ -748,9 +747,7 @@ def make(self, key):
6. Adds resulting interval list to IntervalList table.
"""
# create the analysis nwb file to store the results.
lfp_band_file_name = AnalysisNwbfile().create( # logged
key["nwb_file_name"]
)
lfp_band_file_name = AnalysisNwbfile().create(key["nwb_file_name"])

# get the NWB object with the lfp data;
# FIX: change to fetch with additional infrastructure
@@ -946,7 +943,6 @@ def make(self, key):
"previously saved lfp band times do not match current times"
)

AnalysisNwbfile().log(lfp_band_file_name, table=self.full_table_name)
self.insert1(key)

def fetch1_dataframe(self, *attrs, **kwargs) -> pd.DataFrame:
108 changes: 3 additions & 105 deletions src/spyglass/common/common_nwbfile.py
Original file line number Diff line number Diff line change
@@ -3,7 +3,6 @@
import stat
import string
from pathlib import Path
from time import time
from uuid import uuid4

import datajoint as dj
@@ -172,8 +171,6 @@ class AnalysisNwbfile(SpyglassMixin, dj.Manual):

# See #630, #664. Excessive key length.

_creation_times = {}

def create(self, nwb_file_name: str) -> str:
"""Open the NWB file, create copy, write to disk and return new name.

@@ -190,9 +187,6 @@ def create(self, nwb_file_name: str) -> str:
analysis_file_name : str
The name of the new NWB file.
"""
# To allow some times to occur before create
# creation_time = self._creation_times.pop("pre_create_time", time())

nwb_file_abspath = Nwbfile.get_abs_path(nwb_file_name)
alter_source_script = False
with pynwb.NWBHDF5IO(
@@ -214,16 +208,19 @@ def create(self, nwb_file_name: str) -> str:
alter_source_script = True

analysis_file_name = self.__get_new_file_name(nwb_file_name)

# write the new file
logger.info(f"Writing new NWB file {analysis_file_name}")
analysis_file_abs_path = AnalysisNwbfile.get_abs_path(
analysis_file_name
)

# export the new NWB file
with pynwb.NWBHDF5IO(
path=analysis_file_abs_path, mode="w", manager=io.manager
) as export_io:
export_io.export(io, nwbf)

if alter_source_script:
self._alter_spyglass_version(analysis_file_abs_path)

@@ -235,8 +232,6 @@ def create(self, nwb_file_name: str) -> str:
permissions = stat.S_IRUSR | stat.S_IWUSR | stat.S_IRGRP | stat.S_IROTH
os.chmod(analysis_file_abs_path, permissions)

# self._creation_times[analysis_file_name] = creation_time

return analysis_file_name

@staticmethod
@@ -699,100 +694,3 @@ def nightly_cleanup():
# a separate external files clean up required - this is to be done
# during times when no other transactions are in progress.
AnalysisNwbfile.cleanup(True)

def log(self, *args, **kwargs):
"""Null log method. Revert to _disabled_log to turn back on."""
logger.debug("Logging disabled.")

def _disabled_log(self, analysis_file_name, table=None):
"""Passthrough to the AnalysisNwbfileLog table. Avoid new imports."""
if isinstance(analysis_file_name, dict):
analysis_file_name = analysis_file_name["analysis_file_name"]
time_delta = time() - self._creation_times[analysis_file_name]
file_size = Path(self.get_abs_path(analysis_file_name)).stat().st_size

AnalysisNwbfileLog().log(
analysis_file_name=analysis_file_name,
time_delta=time_delta,
file_size=file_size,
table=table,
)

def increment_access(self, *args, **kwargs):
"""Null method. Revert to _disabled_increment_access to turn back on."""
logger.debug("Incrementing access disabled.")

def _disabled_increment_access(self, keys, table=None):
"""Passthrough to the AnalysisNwbfileLog table. Avoid new imports."""
if not isinstance(keys, list):
key = [keys]

for key in keys:
AnalysisNwbfileLog().increment_access(key, table=table)


@schema
class AnalysisNwbfileLog(dj.Manual):
definition = """
id: int auto_increment
---
-> AnalysisNwbfile
dj_user : varchar(64) # user who created the file
timestamp = CURRENT_TIMESTAMP : timestamp # when the file was created
table = null : varchar(64) # creating table
time_delta = null : float # how long it took to create
file_size = null : float # size of the file in bytes
accessed = 0 : int # n times accessed
unique index (analysis_file_name)
"""

def log(
self,
analysis_file_name=None,
time_delta=None,
file_size=None,
table=None,
):
"""Log the creation of an analysis NWB file.

Parameters
----------
analysis_file_name : str
The name of the analysis NWB file.
"""

self.insert1(
{
"dj_user": dj.config["database.user"],
"analysis_file_name": analysis_file_name,
"time_delta": time_delta,
"file_size": file_size,
"table": table[:64],
}
)

def increment_access(self, key, table=None):
"""Increment the accessed field for the given analysis file name.

Parameters
----------
key : Union[str, dict]
The name of the analysis NWB file, or a key to the table.
table : str, optional
The table that created the file.
"""
if isinstance(key, str):
key = {"analysis_file_name": key}

if not (query := self & key):
self.log(**key, table=table)
entries = query.fetch(as_dict=True)

inserts = []
for entry in entries:
entry["accessed"] += 1
if table and not entry.get("table"):
entry["table"] = table
inserts.append(entry)

self.insert(inserts, replace=True)
6 changes: 1 addition & 5 deletions src/spyglass/common/common_position.py
Original file line number Diff line number Diff line change
@@ -88,9 +88,7 @@ def make(self, key):
"""Insert smoothed head position, orientation and velocity."""
logger.info(f"Computing position for: {key}")

analysis_file_name = AnalysisNwbfile().create( # logged
key["nwb_file_name"]
)
analysis_file_name = AnalysisNwbfile().create(key["nwb_file_name"])

raw_position = RawPosition.PosObject & key
spatial_series = raw_position.fetch_nwb()[0]["raw_position"]
@@ -117,8 +115,6 @@ def make(self, key):

AnalysisNwbfile().add(key["nwb_file_name"], analysis_file_name)

AnalysisNwbfile().log(key, table=self.full_table_name)

self.insert1(key)

@staticmethod
3 changes: 1 addition & 2 deletions src/spyglass/decoding/v0/clusterless.py
Original file line number Diff line number Diff line change
@@ -159,7 +159,7 @@ def make(self, key):
4. Saves the marks as a TimeSeries object in a new AnalysisNwbfile.
"""
# create a new AnalysisNwbfile and a timeseries for the marks and save
key["analysis_file_name"] = AnalysisNwbfile().create( # logged
key["analysis_file_name"] = AnalysisNwbfile().create(
key["nwb_file_name"]
)
# get the list of mark parameters
@@ -246,7 +246,6 @@ def make(self, key):
key["analysis_file_name"], nwb_object
)
AnalysisNwbfile().add(key["nwb_file_name"], key["analysis_file_name"])
AnalysisNwbfile().log(key, table=self.full_table_name)
self.insert1(key)

def fetch1_dataframe(self) -> pd.DataFrame:
3 changes: 0 additions & 3 deletions src/spyglass/decoding/v1/waveform_features.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
import os
from itertools import chain
from time import time

import datajoint as dj
import numpy as np
@@ -108,7 +107,6 @@ class UnitWaveformFeatures(SpyglassMixin, dj.Computed):

def make(self, key):
"""Populate UnitWaveformFeatures table."""
AnalysisNwbfile()._creation_times["pre_create_time"] = time()
# get the list of feature parameters
params = (WaveformFeaturesParams & key).fetch1("params")

@@ -175,7 +173,6 @@ def make(self, key):
nwb_file_name,
key["analysis_file_name"],
)
AnalysisNwbfile().log(key, table=self.full_table_name)

self.insert1(key)

5 changes: 1 addition & 4 deletions src/spyglass/lfp/analysis/v1/lfp_band.py
Original file line number Diff line number Diff line change
@@ -175,9 +175,7 @@ class LFPBandV1(SpyglassMixin, dj.Computed):
def make(self, key):
"""Populate LFPBandV1"""
# create the analysis nwb file to store the results.
lfp_band_file_name = AnalysisNwbfile().create( # logged
key["nwb_file_name"]
)
lfp_band_file_name = AnalysisNwbfile().create(key["nwb_file_name"])
# get the NWB object with the lfp data;
# FIX: change to fetch with additional infrastructure
lfp_key = {"merge_id": key["lfp_merge_id"]}
@@ -368,7 +366,6 @@ def make(self, key):
"previously saved lfp band times do not match current times"
)

AnalysisNwbfile().log(key, table=self.full_table_name)
self.insert1(key)

def fetch1_dataframe(self, *attrs, **kwargs):
3 changes: 1 addition & 2 deletions src/spyglass/lfp/v1/lfp.py
Original file line number Diff line number Diff line change
@@ -66,7 +66,7 @@ def make(self, key):
the AnalysisNwbfile table. The valid times for the filtered data are
stored in the IntervalList table.
"""
lfp_file_name = AnalysisNwbfile().create(key["nwb_file_name"]) # logged
lfp_file_name = AnalysisNwbfile().create(key["nwb_file_name"])
# get the NWB object with the data
nwbf_key = {"nwb_file_name": key["nwb_file_name"]}
rawdata = (Raw & nwbf_key).fetch_nwb()[0]["raw"]
@@ -201,7 +201,6 @@ def make(self, key):
orig_key["analysis_file_name"] = lfp_file_name
orig_key["lfp_object_id"] = lfp_object_id
LFPOutput.insert1(orig_key)
AnalysisNwbfile().log(key, table=self.full_table_name)

def fetch1_dataframe(self, *attrs, **kwargs) -> pd.DataFrame:
"""Fetch a single dataframe."""
4 changes: 1 addition & 3 deletions src/spyglass/linearization/v0/main.py
Original file line number Diff line number Diff line change
@@ -126,7 +126,7 @@ def make(self, key):
"""Compute linearized position for a given key."""
logger.info(f"Computing linear position for: {key}")

key["analysis_file_name"] = AnalysisNwbfile().create( # logged
key["analysis_file_name"] = AnalysisNwbfile().create(
key["nwb_file_name"]
)

@@ -189,8 +189,6 @@ def make(self, key):

self.insert1(key)

AnalysisNwbfile().log(key, table=self.full_table_name)

def fetch1_dataframe(self) -> DataFrame:
"""Fetch a single dataframe"""
return self.fetch_nwb()[0]["linearized_position"].set_index("time")
4 changes: 1 addition & 3 deletions src/spyglass/linearization/v1/main.py
Original file line number Diff line number Diff line change
@@ -134,7 +134,7 @@ def make(self, key):
position_nwb = PositionOutput().fetch_nwb(
{"merge_id": key["pos_merge_id"]}
)[0]
key["analysis_file_name"] = AnalysisNwbfile().create( # logged
key["analysis_file_name"] = AnalysisNwbfile().create(
position_nwb["nwb_file_name"]
)
position = np.asarray(
@@ -195,8 +195,6 @@ def make(self, key):
[orig_key], part_name=part_name, skip_duplicates=True
)

AnalysisNwbfile().log(key, table=self.full_table_name)

def fetch1_dataframe(self) -> DataFrame:
"""Fetch a single dataframe."""
return self.fetch_nwb()[0]["linearized_position"].set_index("time")
6 changes: 1 addition & 5 deletions src/spyglass/position/v1/position_dlc_orient.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@
from time import time

import datajoint as dj
import numpy as np
import pandas as pd
@@ -123,7 +121,6 @@ def make(self, key):
4. Insert the key into the DLCOrientation table.
"""
# Get labels to smooth from Parameters table
AnalysisNwbfile()._creation_times["pre_create_time"] = time()
pos_df = self._get_pos_df(key)

params = (DLCOrientationParams() & key).fetch1("params")
@@ -162,7 +159,7 @@ def make(self, key):
final_df = pd.DataFrame(
orientation, columns=["orientation"], index=pos_df.index
)
key["analysis_file_name"] = AnalysisNwbfile().create( # logged
key["analysis_file_name"] = AnalysisNwbfile().create(
key["nwb_file_name"]
)
# if spatial series exists, get metadata from there
@@ -192,7 +189,6 @@ def make(self, key):
)

self.insert1(key)
AnalysisNwbfile().log(key, table=self.full_table_name)

def fetch1_dataframe(self) -> pd.DataFrame:
"""Fetch a single dataframe"""
1 change: 0 additions & 1 deletion src/spyglass/position/v1/position_dlc_pose_estimation.py
Original file line number Diff line number Diff line change
@@ -347,7 +347,6 @@ def _logged_make(self, key):
analysis_file_name=key["analysis_file_name"],
)
self.BodyPart.insert1(key)
AnalysisNwbfile().log(key, table=self.full_table_name)

def fetch_dataframe(self, *attrs, **kwargs) -> pd.DataFrame:
"""Fetch a concatenated dataframe of all bodyparts."""
1 change: 0 additions & 1 deletion src/spyglass/position/v1/position_dlc_position.py
Original file line number Diff line number Diff line change
@@ -287,7 +287,6 @@ def _logged_make(self, key):
analysis_file_name=key["analysis_file_name"],
)
self.insert1(key)
AnalysisNwbfile().log(key, table=self.full_table_name)

def fetch1_dataframe(self) -> pd.DataFrame:
"""Fetch a single dataframe."""
3 changes: 0 additions & 3 deletions src/spyglass/position/v1/position_dlc_selection.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
import copy
from pathlib import Path
from time import time

import datajoint as dj
import numpy as np
@@ -67,7 +66,6 @@ def make(self, key):
"""
orig_key = copy.deepcopy(key)
# Add to Analysis NWB file
AnalysisNwbfile()._creation_times["pre_create_time"] = time()
key["pose_eval_result"] = self.evaluate_pose_estimation(key)

pos_nwb = (DLCCentroid & key).fetch_nwb()[0]
@@ -155,7 +153,6 @@ def make(self, key):
part_name=to_camel_case(self.table_name.split("__")[-1]),
skip_duplicates=True,
)
AnalysisNwbfile().log(key, table=self.full_table_name)

def fetch1_dataframe(self) -> pd.DataFrame:
"""Return the position data as a DataFrame."""
5 changes: 1 addition & 4 deletions src/spyglass/position/v1/position_trodes_position.py
Original file line number Diff line number Diff line change
@@ -174,9 +174,7 @@ def make(self, key):
logger.info(f"Computing position for: {key}")
orig_key = copy.deepcopy(key)

analysis_file_name = AnalysisNwbfile().create( # logged
key["nwb_file_name"]
)
analysis_file_name = AnalysisNwbfile().create(key["nwb_file_name"])

raw_position = RawPosition.PosObject & key
spatial_series = raw_position.fetch_nwb()[0]["raw_position"]
@@ -218,7 +216,6 @@ def make(self, key):
PositionOutput._merge_insert(
[orig_key], part_name=part_name, skip_duplicates=True
)
AnalysisNwbfile().log(key, table=self.full_table_name)

@staticmethod
def generate_pos_components(*args, **kwargs):
10 changes: 2 additions & 8 deletions src/spyglass/spikesorting/v0/spikesorting_curation.py
Original file line number Diff line number Diff line change
@@ -341,7 +341,7 @@ def make(self, key):
3. Generates an analysis NWB file with the waveforms
4. Inserts the key into Waveforms table
"""
key["analysis_file_name"] = AnalysisNwbfile().create( # logged
key["analysis_file_name"] = AnalysisNwbfile().create(
key["nwb_file_name"]
)
recording = Curation.get_recording(key)
@@ -375,7 +375,6 @@ def make(self, key):
key["waveforms_object_id"] = object_id
AnalysisNwbfile().add(key["nwb_file_name"], key["analysis_file_name"])

AnalysisNwbfile().log(key, table=self.full_table_name)
self.insert1(key)

def load_waveforms(self, key: dict):
@@ -541,9 +540,7 @@ def make(self, key):
3. Generates an analysis NWB file with the metrics.
4. Inserts the key into QualityMetrics table
"""
analysis_file_name = AnalysisNwbfile().create( # logged
key["nwb_file_name"]
)
analysis_file_name = AnalysisNwbfile().create(key["nwb_file_name"])
waveform_extractor = Waveforms().load_waveforms(key)
key["analysis_file_name"] = (
analysis_file_name # add to key here to prevent fetch errors
@@ -567,7 +564,6 @@ def make(self, key):
key["analysis_file_name"], metrics=qm
)
AnalysisNwbfile().add(key["nwb_file_name"], key["analysis_file_name"])
AnalysisNwbfile().log(key, table=self.full_table_name)

self.insert1(key)

@@ -980,7 +976,6 @@ def make(self, key):
2. Saves the sorting in an analysis NWB file
3. Inserts key into CuratedSpikeSorting table and units into part table.
"""
AnalysisNwbfile()._creation_times["pre_create_time"] = time.time()
unit_labels_to_remove = ["reject"]
# check that the Curation has metrics
metrics = (Curation & key).fetch1("quality_metrics")
@@ -1051,7 +1046,6 @@ def make(self, key):
labels=labels,
)

AnalysisNwbfile().log(key, table=self.full_table_name)
self.insert1(key)

# now add the units
4 changes: 0 additions & 4 deletions src/spyglass/spikesorting/v1/curation.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
from time import time
from typing import Dict, List, Union

import datajoint as dj
@@ -80,8 +79,6 @@ def insert_curation(
-------
curation_key : dict
"""
AnalysisNwbfile()._creation_times["pre_create_time"] = time()

sort_query = cls & {"sorting_id": sorting_id}
parent_curation_id = max(parent_curation_id, -1)
if parent_curation_id == -1:
@@ -124,7 +121,6 @@ def insert_curation(
"description": description,
}
cls.insert1(key, skip_duplicates=True)
AnalysisNwbfile().log(analysis_file_name, table=cls.full_table_name)

return key

4 changes: 0 additions & 4 deletions src/spyglass/spikesorting/v1/metric_curation.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
import os
import uuid
from time import time
from typing import Any, Dict, List, Union

import datajoint as dj
@@ -226,8 +225,6 @@ def make(self, key):
7. Saves the waveforms, metrics, labels, and merge groups to an
analysis NWB file and inserts into MetricCuration table.
"""

AnalysisNwbfile()._creation_times["pre_create_time"] = time()
# FETCH
nwb_file_name = (
SpikeSortingSelection * MetricCurationSelection & key
@@ -301,7 +298,6 @@ def make(self, key):
nwb_file_name,
key["analysis_file_name"],
)
AnalysisNwbfile().log(key, table=self.full_table_name)
self.insert1(key)

@classmethod
23 changes: 8 additions & 15 deletions src/spyglass/spikesorting/v1/recording.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
import uuid
from time import time
from typing import Iterable, List, Optional, Tuple, Union

import datajoint as dj
@@ -182,17 +181,18 @@ def make(self, key):
- NWB file to AnalysisNwbfile
- Recording ids to SpikeSortingRecording
"""
AnalysisNwbfile()._creation_times["pre_create_time"] = time()
nwb_file_name = (SpikeSortingRecordingSelection & key).fetch1(
"nwb_file_name"
)

# DO:
# - get valid times for sort interval
# - proprocess recording
# - write recording to NWB file
sort_interval_valid_times = self._get_sort_interval_valid_times(key)
recording, timestamps = self._get_preprocessed_recording(key)
recording_nwb_file_name, recording_object_id = _write_recording_to_nwb(
recording,
timestamps,
(SpikeSortingRecordingSelection & key).fetch1("nwb_file_name"),
recording, timestamps, nwb_file_name
)
key["analysis_file_name"] = recording_nwb_file_name
key["object_id"] = recording_object_id
@@ -203,21 +203,13 @@ def make(self, key):
# - entry into SpikeSortingRecording
IntervalList.insert1(
{
"nwb_file_name": (SpikeSortingRecordingSelection & key).fetch1(
"nwb_file_name"
),
"nwb_file_name": nwb_file_name,
"interval_list_name": key["recording_id"],
"valid_times": sort_interval_valid_times,
"pipeline": "spikesorting_recording_v1",
}
)
AnalysisNwbfile().add(
(SpikeSortingRecordingSelection & key).fetch1("nwb_file_name"),
key["analysis_file_name"],
)
AnalysisNwbfile().log(
recording_nwb_file_name, table=self.full_table_name
)
AnalysisNwbfile().add(nwb_file_name, key["analysis_file_name"])
self.insert1(key)

@classmethod
@@ -538,6 +530,7 @@ def _write_recording_to_nwb(

analysis_nwb_file = AnalysisNwbfile().create(nwb_file_name)
analysis_nwb_file_abs_path = AnalysisNwbfile.get_abs_path(analysis_nwb_file)

with pynwb.NWBHDF5IO(
path=analysis_nwb_file_abs_path,
mode="a",
2 changes: 0 additions & 2 deletions src/spyglass/spikesorting/v1/sorting.py
Original file line number Diff line number Diff line change
@@ -155,7 +155,6 @@ def make(self, key: dict):
# - information about the recording
# - artifact free intervals
# - spike sorter and sorter params
AnalysisNwbfile()._creation_times["pre_create_time"] = time.time()

recording_key = (
SpikeSortingRecording * SpikeSortingSelection & key
@@ -301,7 +300,6 @@ def make(self, key: dict):
(SpikeSortingSelection & key).fetch1("nwb_file_name"),
key["analysis_file_name"],
)
AnalysisNwbfile().log(key, table=self.full_table_name)
self.insert1(key, skip_duplicates=True)

@classmethod