Skip to content

Do specify our copyright in LICENSE file, unify name with dandiarchive.org, make year auto update in docs #6123

Do specify our copyright in LICENSE file, unify name with dandiarchive.org, make year auto update in docs

Do specify our copyright in LICENSE file, unify name with dandiarchive.org, make year auto update in docs #6123

Workflow file for this run

name: Tests
on:
push:
branches:
- master
pull_request:
schedule:
- cron: '0 6 * * *'
concurrency:
group: ${{ github.workflow }}-${{ github.event_name }}-${{ github.ref_name }}
cancel-in-progress: true
defaults:
run:
shell: bash
jobs:
test:
runs-on: ${{ matrix.os }}
env:
NO_ET: 1
DANDI_ALLOW_LOCALHOST_URLS: "1"
DANDI_PAGINATION_DISABLE_FALLBACK: "1"
DANDI_TESTS_PERSIST_DOCKER_COMPOSE: "1"
strategy:
fail-fast: false
matrix:
os:
- windows-2019
- ubuntu-latest
- macos-latest
python:
- 3.9
- '3.10' # Needs quotes so YAML doesn't think it's 3.1
- '3.11'
- '3.12'
mode:
- normal
include:
- os: ubuntu-latest
python: 3.9
mode: dandi-api
- os: ubuntu-latest
python: 3.9
mode: dev-deps
- os: ubuntu-latest
python: 3.12
mode: dev-deps
- os: ubuntu-latest
python: 3.9
mode: nfs
steps:
- name: Set up environment
uses: actions/checkout@v4
with:
# Fetch all commits so that versioneer will return something compatible
# with semantic-version
fetch-depth: 0
- name: Set up Python ${{ matrix.python }}
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python }}
- name: Install dependencies
run: |
python -m pip install --upgrade pip wheel
pip install ".[extras,test]"
- name: Install dev versions of select dependencies
if: matrix.mode == 'dev-deps'
run: |
pip install git+https://github.com/jaraco/keyring
pip install git+https://github.com/NeurodataWithoutBorders/nwbinspector
pip install git+https://github.com/NeurodataWithoutBorders/pynwb \
git+https://github.com/hdmf-dev/hdmf \
git+https://github.com/hdmf-dev/hdmf-zarr
- name: Create NFS filesystem
if: matrix.mode == 'nfs'
run: |
mkdir /tmp/nfsmount_ /tmp/nfsmount
mkdir /tmp/nfsmount_/tmp /tmp/nfsmount_/home
echo "/tmp/nfsmount_ localhost(rw)" | sudo tee /etc/exports
sudo apt-get install -y nfs-kernel-server
sudo exportfs -a
sudo mount -t nfs localhost:/tmp/nfsmount_ /tmp/nfsmount
echo TMPDIR=/tmp/nfsmount/tmp >> "$GITHUB_ENV"
echo HOME=/tmp/nfsmount/home >> "$GITHUB_ENV"
echo DANDI_DEVEL_INSTRUMENT_REQUESTS_SUPERLEN=1 >> "$GITHUB_ENV"
- name: Use scheduled test configuration
if: github.event_name == 'schedule'
run: echo PYTEST_ADDOPTS=--scheduled >> "$GITHUB_ENV"
- name: Run all tests
if: matrix.mode != 'dandi-api'
run: |
python -m pytest -s -v --cov=dandi --cov-report=xml dandi
- name: Smoke test example code in docs
if: matrix.mode != 'dandi-api' && github.event_name == 'schedule'
run: |
set -ex
cd docs/source/examples
for f in *.py
do python "$f"
done
- name: Run DANDI API tests only
if: matrix.mode == 'dandi-api'
run: |
export DANDI_TESTS_AUDIT_CSV=/tmp/audit.csv
python -m pytest -s -v --cov=dandi --cov-report=xml --dandi-api dandi
if [ ! -e /tmp/audit.csv ]
then echo Audit file not created
exit 1
fi
lines="$(wc -l /tmp/audit.csv | awk '{print $1}')"
if [ "$lines" -lt 100 ]
then echo Audit file shorter than expected - only "$lines" lines
exit 1
fi
- name: Dump Docker Compose logs
if: failure() && startsWith(matrix.os, 'ubuntu')
run: |
docker compose \
-f dandi/tests/data/dandiarchive-docker/docker-compose.yml \
logs --timestamps
- name: Shut down Docker Compose
if: startsWith(matrix.os, 'ubuntu')
run: |
docker compose \
-f dandi/tests/data/dandiarchive-docker/docker-compose.yml \
down -v
- name: Upload coverage to Codecov
uses: codecov/codecov-action@v5
with:
token: ${{ secrets.CODECOV_TOKEN }}
files: ./coverage.xml
flags: unittests
# name: codecov-umbrella
# yml: ./codecov.yml
fail_ci_if_error: false