Skip to content

Commit

Permalink
Merge branch 'master' of https://github.com/LorenFrankLab/spyglass in…
Browse files Browse the repository at this point in the history
…to 1144
  • Loading branch information
CBroz1 committed Oct 24, 2024
2 parents 44d0cb4 + e57638e commit 2253ef9
Show file tree
Hide file tree
Showing 7 changed files with 72 additions and 38 deletions.
10 changes: 5 additions & 5 deletions .github/workflows/test-conda.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ name: Tests

on:
push:
branches:
branches:
- '!test_branch'
- '!documentation'
schedule: # once a day at midnight UTC
Expand Down Expand Up @@ -53,16 +53,16 @@ jobs:
sudo apt-get install mysql-client libmysqlclient-dev libgirepository1.0-dev -y
sudo apt-get install ffmpeg libsm6 libxext6 -y # non-dlc position deps
- name: Run pip install for test deps
run: |
run: |
pip install --quiet .[test]
- name: Download data
env:
BASEURL: ftps://ftp.box.com/trodes_to_nwb_test_data/
NWBFILE: minirec20230622.nwb # Relative to Base URL
VID_ONE: 20230622_sample_01_a1/20230622_sample_01_a1.1.h264
VID_TWO: 20230622_sample_02_a1/20230622_sample_02_a1.1.h264
RAW_DIR: /home/runner/work/spyglass/spyglass/tests/_data/raw/
VID_DIR: /home/runner/work/spyglass/spyglass/tests/_data/video/
RAW_DIR: /home/runner/work/spyglass/spyglass/tests/_data/raw/
VID_DIR: /home/runner/work/spyglass/spyglass/tests/_data/video/
run: |
mkdir -p $RAW_DIR $VID_DIR
wget_opts() { # Declare func with download options
Expand All @@ -76,4 +76,4 @@ jobs:
wget_opts $VID_DIR $VID_TWO
- name: Run tests
run: |
pytest --no-docker --no-dlc
pytest --no-docker --no-dlc
10 changes: 6 additions & 4 deletions .github/workflows/test-package-build.yml
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,6 @@ jobs:
python-version: 3.9
- run: |
pip install --upgrade build twine
pip install importlib_metadata==7.2.1 # twine #977
- name: Build sdist and wheel
run: python -m build
- run: twine check dist/*
Expand All @@ -50,6 +49,7 @@ jobs:
needs: [build]
strategy:
matrix:
python-version: ['3.9', '3.10', '3.11', '3.12']
package: ['wheel', 'sdist', 'archive']
steps:
- name: Download sdist and wheel artifacts
Expand All @@ -66,11 +66,13 @@ jobs:
path: archive/
- uses: actions/setup-python@v5
with:
python-version: 3.9
python-version: ${{ matrix.python-version }}
- name: Display Python version
run: python -c "import sys; print(sys.version)"
- name: Update pip
run: pip install --upgrade pip
- name: Install build dependencies
run: |
pip install --upgrade setuptools wheel
pip install --upgrade pip
- name: Install wheel
if: matrix.package == 'wheel'
run: pip install dist/*.whl
Expand Down
12 changes: 9 additions & 3 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -21,16 +21,22 @@ dj.FreeTable(dj.conn(), "common_session.session_group").drop()
- Add docstrings to all public methods #1076
- Update DataJoint to 0.14.2 #1081
- Allow restriction based on parent keys in `Merge.fetch_nwb()` #1086, #1126
- Import `datajoint.dependencies.unite_master_parts` -> `topo_sort` #1116, #1137
- Import `datajoint.dependencies.unite_master_parts` -> `topo_sort` #1116,
#1137, #1162
- Fix bool settings imported from dj config file #1117
- Allow definition of tasks and new probe entries from config #1074, #1120
- Enforce match between ingested nwb probe geometry and existing table entry
#1074
- Update DataJoint install and password instructions #1131
- Fix dandi upload process for nwb's with video or linked objects #1095, #1151
- Minor docs fixes #1145
- Remove stored hashes from pytests #1152
- Remove mambaforge from tests #1153
- Test fixes
- Remove stored hashes from pytests #1152
- Remove mambaforge from tests #1153
- Remove debug statement #1164
- Add testing for python versions 3.9, 3.10, 3.11, 3.12 #1169
- Allow python \< 3.13 #1169
- Remove numpy version restriction #1169
- Merge table delete removes orphaned master entries #1164

### Pipelines
Expand Down
4 changes: 2 additions & 2 deletions environment.yml
Original file line number Diff line number Diff line change
Expand Up @@ -23,13 +23,13 @@ dependencies:
# - libgcc # dlc-only
- matplotlib
- non_local_detector
- numpy<1.24
- numpy
- pip
- position_tools
- pybind11 # req by mountainsort4 -> isosplit5
- pydotplus
- pyfftw<=0.12.0 # ghostipy req. install from conda-forge for Mac ARM
- python>=3.9,<3.10
- python>=3.9,<3.13
- pytorch<1.12.0
- ripple_detection
- seaborn
Expand Down
4 changes: 2 additions & 2 deletions environment_dlc.yml
Original file line number Diff line number Diff line change
Expand Up @@ -23,13 +23,13 @@ dependencies:
- libgcc # dlc-only
- matplotlib
- non_local_detector
- numpy<1.24
- numpy
- pip>=20.2.*
- position_tools
- pybind11 # req by mountainsort4 -> isosplit5
- pydotplus>=2.0.*
- pyfftw<=0.12.0 # ghostipy req. install from conda-forge for Mac ARM
- python>=3.9,<3.10
- python>=3.9,<3.13
- pytorch<1.12.0
- ripple_detection
- seaborn
Expand Down
36 changes: 18 additions & 18 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ build-backend = "hatchling.build"
name = "spyglass-neuro"
description = "Neuroscience data analysis framework for reproducible research"
readme = "README.md"
requires-python = ">=3.9,<3.10"
requires-python = ">=3.9,<3.13"
license = { file = "LICENSE" }
authors = [
{ name = "Loren Frank", email = "[email protected]" },
Expand Down Expand Up @@ -46,11 +46,11 @@ dependencies = [
"matplotlib",
"ndx_franklab_novela>=0.1.0",
"non_local_detector",
"numpy<1.24",
"numpy",
"opencv-python",
"panel>=1.4.0", # panel #6325 resolved
"panel>=1.4.0", # panel #6325 resolved
"position_tools>=0.1.0",
"pubnub<6.4.0", # TODO: remove this when sortingview is updated
"pubnub<6.4.0", # TODO: remove this when sortingview is updated
"pydotplus",
"pynwb>=2.2.0,<3",
"ripple_detection",
Expand All @@ -62,21 +62,21 @@ dependencies = [

[project.optional-dependencies]
dlc = [
"ffmpeg",
"deeplabcut[tf]", # removing dlc pin removes need to pin tf/numba
"ffmpeg",
"deeplabcut[tf]", # removing dlc pin removes need to pin tf/numba
]
test = [
"click", # for CLI subpackage only
"docker", # for tests in a container
"click", # for CLI subpackage only
"docker", # for tests in a container
"ghostipy",
"kachery", # database access
"kachery", # database access
"kachery-client",
"kachery-cloud>=0.4.0",
"opencv-python-headless", # for headless testing of Qt
"pre-commit", # linting
"pytest", # unit testing
"pytest-cov", # code coverage
"pytest-xvfb", # for headless testing of Qt
"pre-commit", # linting
"pytest", # unit testing
"pytest-cov", # code coverage
"pytest-xvfb", # for headless testing of Qt
]
docs = [
"hatch", # Get version from env
Expand Down Expand Up @@ -134,7 +134,7 @@ addopts = [
# "--no-dlc", # don't run DLC tests
"--show-capture=no",
"--pdbcls=IPython.terminal.debugger:TerminalPdb", # use ipython debugger
"--doctest-modules", # run doctests in all modules
"--doctest-modules", # run doctests in all modules
"--cov=spyglass",
"--cov-report=term-missing",
"--no-cov-on-fail",
Expand All @@ -143,9 +143,9 @@ testpaths = ["tests"]
log_level = "INFO"
env = [
"QT_QPA_PLATFORM = offscreen", # QT fails headless without this
"DISPLAY = :0", # QT fails headless without this
"TF_ENABLE_ONEDNN_OPTS = 0", # TF disable approx calcs
"TF_CPP_MIN_LOG_LEVEL = 2", # Disable TF warnings
"DISPLAY = :0", # QT fails headless without this
"TF_ENABLE_ONEDNN_OPTS = 0", # TF disable approx calcs
"TF_CPP_MIN_LOG_LEVEL = 2", # Disable TF warnings
]

[tool.coverage.run]
Expand Down Expand Up @@ -175,4 +175,4 @@ omit = [ # which submodules have no tests
line-length = 80

[tool.ruff.lint]
ignore = ["F401" , "E402", "E501"]
ignore = ["F401", "E402", "E501"]
34 changes: 30 additions & 4 deletions src/spyglass/utils/dj_graph.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
from datajoint.user_tables import TableMeta
from datajoint.utils import get_master, to_camel_case
from networkx import (
DiGraph,
NetworkXNoPath,
NodeNotFound,
all_simple_paths,
Expand All @@ -33,10 +34,35 @@
unique_dicts,
)

try: # Datajoint 0.14.2+ uses topo_sort instead of unite_master_parts
from datajoint.dependencies import topo_sort as dj_topo_sort
except ImportError:
from datajoint.dependencies import unite_master_parts as dj_topo_sort

def dj_topo_sort(graph: DiGraph) -> List[str]:
"""Topologically sort graph.
Uses datajoint's topo_sort if available, otherwise uses networkx's
topological_sort, combined with datajoint's unite_master_parts.
NOTE: This ordering will impact _hash_upstream, but usage should be
consistent before/after a no-transaction populate.
Parameters
----------
graph : nx.DiGraph
Directed graph to sort
Returns
-------
List[str]
List of table names in topological order
"""
try: # Datajoint 0.14.2+ uses topo_sort instead of unite_master_parts
from datajoint.dependencies import topo_sort

return topo_sort(graph)
except ImportError:
from datajoint.dependencies import unite_master_parts
from networkx.algorithms.dag import topological_sort

return unite_master_parts(list(topological_sort(graph)))


class Direction(Enum):
Expand Down

0 comments on commit 2253ef9

Please sign in to comment.