Skip to content

Commit

Permalink
Move towards pyproject.toml standard (#955)
Browse files Browse the repository at this point in the history
Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
Co-authored-by: Cody Baker <[email protected]>
  • Loading branch information
3 people authored Jul 31, 2024
1 parent 2d65394 commit 359195a
Show file tree
Hide file tree
Showing 8 changed files with 110 additions and 99 deletions.
1 change: 0 additions & 1 deletion .readthedocs.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,6 @@ sphinx:
# Python requirements required to build your docs
python:
install:
- requirements: requirements-minimal.txt
- method: pip
path: .
extra_requirements:
Expand Down
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
### Improvements

* The `OpenEphysBinaryRecordingInterface` now uses `lxml` for extracting the session start time from the settings.xml file and does not depend on `pyopenephys` anymore. [PR #971](https://github.com/catalystneuro/neuroconv/pull/971)
* Swap the majority of package setup and build steps to `pyproject.toml` instead of `setup.py`. [PR #955](https://github.com/catalystneuro/neuroconv/pull/955)
* The `DeeplabcutInterface` now skips inferring timestamps from movie when timestamps are specified, running faster. [PR #967](https://github.com/catalystneuro/neuroconv/pull/967)


Expand Down
87 changes: 87 additions & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,3 +1,90 @@
[build-system]
requires = ["setuptools>=52", "wheel"]
build-backend = "setuptools.build_meta"

[project]
name = "neuroconv"
version = "0.5.1"
description = "Convert data from proprietary formats to NWB format."
readme = "README.md"
authors = [
{name = "Cody Baker"},
{name = "Szonja Weigl"},
{name = "Heberto Mayorquin"},
{name = "Luiz Tauffer"},
{name = "Ben Dichter", email = "[email protected]"}
]
urls = { "Homepage" = "https://github.com/catalystneuro/neuroconv" }
license = {file = "license.txt"}
keywords = ["nwb"]
classifiers = [
"Intended Audience :: Science/Research",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
"Programming Language :: Python :: 3.12",
"Operating System :: POSIX :: Linux",
"Operating System :: Microsoft :: Windows",
"Operating System :: MacOS",
"License :: BSD-3-Clause ",
]
requires-python = ">=3.9"
dependencies = [
"numpy>=1.22.0; python_version <= '3.11'",
"numpy>=1.26.0; python_version >= '3.12'",
"jsonschema>=3.2.0",
"PyYAML>=5.4",
"scipy>=1.4.1",
"h5py>=3.9.0",
"hdmf>=3.13.0",
"hdmf_zarr>=0.7.0",
"pynwb>=2.7.0",
"pydantic>=2.0.0",
"typing_extensions>=4.1.0",
"psutil>=5.8.0",
"tqdm>=4.60.0",
"pandas",
"parse>=1.20.0",
"click",
"docstring-parser",
"packaging" # Issue 903
]


[project.optional-dependencies]
test = [
"pytest",
"pytest-cov",
"ndx-events>=0.2.0", # for special tests to ensure load_namespaces is set to allow NWBFile load at all times
"parameterized>=0.8.1",
"ndx-miniscope",
"spikeinterface[qualitymetrics]>=0.100.0",
"zarr<2.18.0", # Error with Blosc (read-only during decode) in numcodecs on May 7; check later if resolved
"pytest-xdist"
]

docs = [
"Jinja2<3.1",
"Sphinx==5.1.1",
"sphinx_rtd_theme==1.0.0",
"readthedocs-sphinx-search==0.1.2",
"sphinx-toggleprompt==0.2.0",
"sphinx-copybutton==0.5.0",
"roiextractors", # Needed for the API documentation
"spikeinterface", # Needed for the API documentation
"pydata_sphinx_theme==0.12.0"
]
dandi = ["dandi>=0.58.1"]
compressors = ["hdf5plugin"]

[tool.setuptools.packages.find]
where = ["src"]


[project.scripts]
neuroconv = "neuroconv.tools.yaml_conversion_specification._yaml_conversion_specification:run_conversion_from_yaml_cli"


[tool.pytest.ini_options]
minversion = "6.0"
addopts = "-ra --doctest-glob='*.rst'"
Expand Down
18 changes: 0 additions & 18 deletions requirements-minimal.txt

This file was deleted.

9 changes: 0 additions & 9 deletions requirements-rtd.txt

This file was deleted.

8 changes: 0 additions & 8 deletions requirements-testing.txt

This file was deleted.

83 changes: 21 additions & 62 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,93 +4,52 @@
from pathlib import Path
from shutil import copy

from setuptools import find_packages, setup
from setuptools import setup

root = Path(__file__).parent

with open(root / "README.md") as f:
long_description = f.read()
with open(root / "requirements-minimal.txt") as f:
install_requires = f.readlines()
with open(root / "requirements-rtd.txt") as f:
documentation_dependencies = f.readlines()
with open(root / "requirements-testing.txt") as f:
testing_suite_dependencies = f.readlines()

extras_require = defaultdict(list)
def read_requirements(file):
with open(root / file) as f:
return f.readlines()


extras_require["dandi"].append("dandi>=0.58.1")
extras_require["full"].extend(extras_require["dandi"])
extras_require = defaultdict(list)
extras_require["full"] = ["dandi>=0.58.1", "hdf5plugin"]

extras_require.update(compressors=["hdf5plugin"])
extras_require["full"].extend(["hdf5plugin"])

extras_require.update(test=testing_suite_dependencies, docs=documentation_dependencies)
for modality in ["ophys", "ecephys", "icephys", "behavior", "text"]:
modality_path = root / "src" / "neuroconv" / "datainterfaces" / modality
modality_requirement_file = modality_path / "requirements.txt"
if modality_requirement_file.exists():
with open(modality_requirement_file) as f:
modality_requirements = f.readlines()
extras_require["full"].extend(modality_requirements)
extras_require[modality].extend(modality_requirements)
modality_requirements = read_requirements(modality_requirement_file)
extras_require["full"].extend(modality_requirements)
extras_require[modality] = modality_requirements
else:
modality_requirements = list()
modality_requirements = []

format_subpaths = [path for path in modality_path.iterdir() if path.is_dir() and path.name != "__pycache__"]
for format_subpath in format_subpaths:
format_requirement_file = format_subpath / "requirements.txt"
extras_require[format_subpath.name].extend(modality_requirements)
extras_require[format_subpath.name] = modality_requirements.copy()
if format_requirement_file.exists():
with open(format_requirement_file) as f:
format_requirements = f.readlines()
extras_require["full"].extend(format_requirements)
extras_require[modality].extend(format_requirements)
extras_require[format_subpath.name].extend(format_requirements)
format_requirements = read_requirements(format_requirement_file)
extras_require["full"].extend(format_requirements)
extras_require[modality].extend(format_requirements)
extras_require[format_subpath.name].extend(format_requirements)

# Create a local copy for the gin test configuration file based on the master file `base_gin_test_config.json`
gin_config_file_base = Path("./base_gin_test_config.json")
gin_config_file_local = Path("./tests/test_on_data/gin_test_config.json")
gin_config_file_base = root / "base_gin_test_config.json"
gin_config_file_local = root / "tests/test_on_data/gin_test_config.json"
if not gin_config_file_local.exists():
copy(src=gin_config_file_base, dst=gin_config_file_local)

# Bug related to sonpy on M1 Mac being installed but not running properly
if sys.platform == "darwin" and platform.processor() == "arm":
extras_require.pop("spike2")

extras_require["ecephys"].remove(
next(requirement for requirement in extras_require["ecephys"] if "sonpy" in requirement)
)
extras_require["full"].remove(next(requirement for requirement in extras_require["full"] if "sonpy" in requirement))
extras_require.pop("spike2", None)
extras_require["ecephys"] = [req for req in extras_require["ecephys"] if "sonpy" not in req]
extras_require["full"] = [req for req in extras_require["full"] if "sonpy" not in req]

setup(
name="neuroconv",
version="0.5.1",
description="Convert data from proprietary formats to NWB format.",
long_description=long_description,
long_description_content_type="text/markdown",
author="Cody Baker, Szonja Weigl, Heberto Mayorquin, Luiz Tauffer, and Ben Dichter.",
author_email="[email protected]",
url="https://github.com/catalystneuro/neuroconv",
keywords="nwb",
license_files=("license.txt",),
packages=find_packages(where="src"),
package_dir={"": "src"},
include_package_data=True, # Includes files described in MANIFEST.in in the installation.
python_requires=">=3.9",
install_requires=install_requires,
extras_require=extras_require,
entry_points={
"console_scripts": [
"neuroconv = neuroconv.tools.yaml_conversion_specification._yaml_conversion_specification:run_conversion_from_yaml_cli",
],
},
license="BSD-3-Clause",
classifiers=[
"Intended Audience :: Science/Research",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
"Programming Language :: Python :: 3.12",
],
)
2 changes: 1 addition & 1 deletion tests/test_on_data/test_metadata/test_maxwell_metadata.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,10 +4,10 @@
from platform import system
from shutil import rmtree
from tempfile import mkdtemp
from zoneinfo import ZoneInfo

import pytest
from hdmf.testing import TestCase
from zoneinfo import ZoneInfo

from neuroconv.datainterfaces import MaxOneRecordingInterface

Expand Down

0 comments on commit 359195a

Please sign in to comment.