Skip to content

Commit

Permalink
Deploy to DockerHub with GitHub Action (#46)
Browse files Browse the repository at this point in the history
* Update parser.py

* Update config.yml

* Run isort.

* Update.

* Update pyproject.toml

* update

* fix.

* Update get_data.py

* Deploy to DockerHub with GitHub Action.

* Update parser.py

* Apply suggestions from code review

Co-authored-by: Chris Markiewicz <[email protected]>

* Update action versions.

---------

Co-authored-by: Chris Markiewicz <[email protected]>
  • Loading branch information
tsalo and effigies authored Aug 28, 2024
1 parent 60fbfb2 commit 2d6a11d
Show file tree
Hide file tree
Showing 12 changed files with 142 additions and 102 deletions.
73 changes: 32 additions & 41 deletions .circleci/config.yml
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ orbs:
.dockersetup: &dockersetup
docker:
- image: cimg/python:3.12
working_directory: /src/fmripost_aroma
working_directory: /src/fmripost-aroma

runinstall: &runinstall
name: Install fMRIPost-AROMA
Expand All @@ -15,8 +15,8 @@ runinstall: &runinstall
VERSION="$CIRCLE_TAG"
fi
git checkout $CIRCLE_BRANCH
echo "${VERSION}" > /src/fmripost_aroma/src/fmripost_aroma/VERSION
echo "include src/fmripost_aroma/VERSION" >> /src/fmripost_aroma/src/fmripost_aroma/MANIFEST.in
echo "${VERSION}" > /src/fmripost-aroma/src/fmripost_aroma/VERSION
echo "include src/fmripost_aroma/VERSION" >> /src/fmripost-aroma/src/fmripost_aroma/MANIFEST.in
pip install .[tests] --progress-bar off
# Precaching fonts, set 'Agg' as default backend for matplotlib
Expand Down Expand Up @@ -48,12 +48,12 @@ jobs:
- run:
name: Download ds005115_raw test data
command: |
cd /src/fmripost_aroma/.circleci
cd /src/fmripost-aroma/.circleci
python get_data.py $PWD/data ds005115_raw
- save_cache:
key: ds005115_raw-01
paths:
- /src/fmripost_aroma/.circleci/data/ds005115_raw
- /src/fmripost-aroma/.circleci/data/ds005115_raw

download_ds005115_resampling:
<<: *dockersetup
Expand All @@ -65,12 +65,12 @@ jobs:
- run:
name: Download ds005115_resampling test data
command: |
cd /src/fmripost_aroma/.circleci
cd /src/fmripost-aroma/.circleci
python get_data.py $PWD/data ds005115_resampling
- save_cache:
key: ds005115_resampling-01
paths:
- /src/fmripost_aroma/.circleci/data/ds005115_resampling
- /src/fmripost-aroma/.circleci/data/ds005115_resampling

download_ds005115_deriv_no_mni6:
<<: *dockersetup
Expand All @@ -82,12 +82,12 @@ jobs:
- run:
name: Download ds005115_deriv_no_mni6 test data
command: |
cd /src/fmripost_aroma/.circleci
cd /src/fmripost-aroma/.circleci
python get_data.py $PWD/data ds005115_deriv_no_mni6
- save_cache:
key: ds005115_deriv_no_mni6-01
paths:
- /src/fmripost_aroma/.circleci/data/ds005115_deriv_no_mni6
- /src/fmripost-aroma/.circleci/data/ds005115_deriv_no_mni6

download_ds005115_deriv_mni6:
<<: *dockersetup
Expand All @@ -99,12 +99,12 @@ jobs:
- run:
name: Download ds005115_deriv_mni6 test data
command: |
cd /src/fmripost_aroma/.circleci
cd /src/fmripost-aroma/.circleci
python get_data.py $PWD/data ds005115_deriv_mni6
- save_cache:
key: ds005115_deriv_mni6-01
paths:
- /src/fmripost_aroma/.circleci/data/ds005115_deriv_mni6
- /src/fmripost-aroma/.circleci/data/ds005115_deriv_mni6

ds005115_deriv_only:
<<: *dockersetup
Expand All @@ -117,18 +117,18 @@ jobs:
name: Test the PYAFQ standalone recon workflow
no_output_timeout: 1h
command: |
pytest -rP -o log_cli=true -m "pyafq_recon_full" --cov-config=/src/fmripost_aroma/pyproject.toml --cov-append --cov-report term-missing --cov=fmripost_aroma --data_dir=/src/fmripost_aroma/.circleci/data --output_dir=/src/fmripost_aroma/.circleci/out --working_dir=/src/fmripost_aroma/.circleci/work fmripost_aroma
pytest -rP -o log_cli=true -m "pyafq_recon_full" --cov-config=/src/fmripost-aroma/pyproject.toml --cov-append --cov-report term-missing --cov=fmripost_aroma --data_dir=/src/fmripost-aroma/.circleci/data --output_dir=/src/fmripost-aroma/.circleci/out --working_dir=/src/fmripost-aroma/.circleci/work fmripost_aroma
mkdir /src/coverage
mv /src/fmripost_aroma/.coverage /src/coverage/.coverage.pyafq_recon_full
mv /src/fmripost-aroma/.coverage /src/coverage/.coverage.pyafq_recon_full
# remove nifti files before uploading artifacts
find /src/fmripost_aroma/.circleci/out/ -name "*.nii.gz" -type f -delete
find /src/fmripost_aroma/.circleci/out/ -name "*.fib.gz" -type f -delete
find /src/fmripost-aroma/.circleci/out/ -name "*.nii.gz" -type f -delete
find /src/fmripost-aroma/.circleci/out/ -name "*.fib.gz" -type f -delete
- persist_to_workspace:
root: /src/coverage/
paths:
- .coverage.pyafq_recon_full
- store_artifacts:
path: /src/fmripost_aroma/.circleci/out/pyafq_recon_full/
path: /src/fmripost-aroma/.circleci/out/pyafq_recon_full/

ds005115_deriv_and_raw:
<<: *dockersetup
Expand All @@ -141,18 +141,18 @@ jobs:
name: Test the PYAFQ workflow with mrtrix tractography
no_output_timeout: 1h
command: |
pytest -rP -o log_cli=true -m "pyafq_recon_external_trk" --cov-config=/src/fmripost_aroma/pyproject.toml --cov-append --cov-report term-missing --cov=fmripost_aroma --data_dir=/src/fmripost_aroma/.circleci/data --output_dir=/src/fmripost_aroma/.circleci/out --working_dir=/src/fmripost_aroma/.circleci/work fmripost_aroma
pytest -rP -o log_cli=true -m "pyafq_recon_external_trk" --cov-config=/src/fmripost-aroma/pyproject.toml --cov-append --cov-report term-missing --cov=fmripost_aroma --data_dir=/src/fmripost-aroma/.circleci/data --output_dir=/src/fmripost-aroma/.circleci/out --working_dir=/src/fmripost-aroma/.circleci/work fmripost_aroma
mkdir /src/coverage
mv /src/fmripost_aroma/.coverage /src/coverage/.coverage.pyafq_recon_external_trk
mv /src/fmripost-aroma/.coverage /src/coverage/.coverage.pyafq_recon_external_trk
# remove nifti files before uploading artifacts
find /src/fmripost_aroma/.circleci/out/ -name "*.nii.gz" -type f -delete
find /src/fmripost_aroma/.circleci/out/ -name "*.fib.gz" -type f -delete
find /src/fmripost-aroma/.circleci/out/ -name "*.nii.gz" -type f -delete
find /src/fmripost-aroma/.circleci/out/ -name "*.fib.gz" -type f -delete
- persist_to_workspace:
root: /src/coverage/
paths:
- .coverage.pyafq_recon_external_trk
- store_artifacts:
path: /src/fmripost_aroma/.circleci/out/pyafq_recon_external_trk/
path: /src/fmripost-aroma/.circleci/out/pyafq_recon_external_trk/

ds005115_resampling_and_raw:
<<: *dockersetup
Expand All @@ -165,18 +165,18 @@ jobs:
name: Test scalar_mapping workflow
no_output_timeout: 1h
command: |
pytest -rP -o log_cli=true -m "scalar_mapper" --cov-config=/src/fmripost_aroma/pyproject.toml --cov-append --cov-report term-missing --cov=fmripost_aroma --data_dir=/src/fmripost_aroma/.circleci/data --output_dir=/src/fmripost_aroma/.circleci/out --working_dir=/src/fmripost_aroma/.circleci/work fmripost_aroma
pytest -rP -o log_cli=true -m "scalar_mapper" --cov-config=/src/fmripost-aroma/pyproject.toml --cov-append --cov-report term-missing --cov=fmripost_aroma --data_dir=/src/fmripost-aroma/.circleci/data --output_dir=/src/fmripost-aroma/.circleci/out --working_dir=/src/fmripost-aroma/.circleci/work fmripost_aroma
mkdir /src/coverage
mv /src/fmripost_aroma/.coverage /src/coverage/.coverage.scalar_mapper
mv /src/fmripost-aroma/.coverage /src/coverage/.coverage.scalar_mapper
# remove nifti files before uploading artifacts
find /src/fmripost_aroma/.circleci/out/ -name "*.nii.gz" -type f -delete
find /src/fmripost_aroma/.circleci/out/ -name "*.fib.gz" -type f -delete
find /src/fmripost-aroma/.circleci/out/ -name "*.nii.gz" -type f -delete
find /src/fmripost-aroma/.circleci/out/ -name "*.fib.gz" -type f -delete
- persist_to_workspace:
root: /src/coverage/
paths:
- .coverage.scalar_mapper
- store_artifacts:
path: /src/fmripost_aroma/.circleci/out/scalar_mapper/
path: /src/fmripost-aroma/.circleci/out/scalar_mapper/

pytests:
<<: *dockersetup
Expand All @@ -191,18 +191,18 @@ jobs:
- run:
name: Test the DIPY recon workflows
command: |
pytest -rP -o log_cli=true -m "amico_noddi" --cov-config=/src/fmripost_aroma/pyproject.toml --cov-append --cov-report term-missing --cov=fmripost_aroma --data_dir=/src/fmripost_aroma/.circleci/data --output_dir=/src/fmripost_aroma/.circleci/out --working_dir=/src/fmripost_aroma/.circleci/work fmripost_aroma
pytest -rP -o log_cli=true -m "amico_noddi" --cov-config=/src/fmripost-aroma/pyproject.toml --cov-append --cov-report term-missing --cov=fmripost_aroma --data_dir=/src/fmripost-aroma/.circleci/data --output_dir=/src/fmripost-aroma/.circleci/out --working_dir=/src/fmripost-aroma/.circleci/work fmripost_aroma
mkdir /src/coverage
mv /src/fmripost_aroma/.coverage /src/coverage/.coverage.amico_noddi
mv /src/fmripost-aroma/.coverage /src/coverage/.coverage.amico_noddi
# remove nifti files before uploading artifacts
find /src/fmripost_aroma/.circleci/out/ -name "*.nii.gz" -type f -delete
find /src/fmripost_aroma/.circleci/out/ -name "*.fib.gz" -type f -delete
find /src/fmripost-aroma/.circleci/out/ -name "*.nii.gz" -type f -delete
find /src/fmripost-aroma/.circleci/out/ -name "*.fib.gz" -type f -delete
- persist_to_workspace:
root: /src/coverage/
paths:
- .coverage.amico_noddi
- store_artifacts:
path: /src/fmripost_aroma/.circleci/out/amico_noddi/
path: /src/fmripost-aroma/.circleci/out/amico_noddi/

merge_coverage:
<<: *dockersetup
Expand Down Expand Up @@ -234,7 +234,7 @@ jobs:
TZ: "/usr/share/zoneinfo/America/New_York"
docker:
- image: cimg/base:2020.09
working_directory: /tmp/src/fmripost_aroma
working_directory: /tmp/src/fmripost-aroma
steps:
- checkout
- setup_remote_docker:
Expand Down Expand Up @@ -379,12 +379,3 @@ workflows:
only: main
tags:
only: /.*/

- build_and_deploy:
requires:
- deployable
filters:
branches:
only: main
tags:
only: /.*/
1 change: 1 addition & 0 deletions .circleci/get_data.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
#!/usr/bin/env python3
"""Download test data."""

import sys

from fmripost_aroma.tests.utils import download_test_data
Expand Down
46 changes: 46 additions & 0 deletions .github/workflows/docker.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
name: Publish Docker image

on:
push:
branches: [main]
release:
types: [published]

jobs:
push_to_registry:
name: Push Docker image to Docker Hub
runs-on: ubuntu-latest
permissions:
packages: write
contents: read
attestations: write
id-token: write

steps:
- name: Checkout code
uses: actions/checkout@v4

- name: Set up Docker Buildx
uses: docker/[email protected]

- name: Log in to Docker Hub
uses: docker/[email protected]
with:
username: niprepsbot
password: ${{ secrets.DOCKER_TOKEN }}

- name: Extract metadata (tags, labels) for Docker
id: meta
uses: docker/[email protected]
with:
images: nipreps/fmripost-aroma

- name: Build and push Docker image
uses: docker/[email protected]
with:
context: .
push: true
tags: |
nipreps/fmripost-aroma:unstable
${{ github.event_name == 'release' && 'nipreps/fmripost-aroma:latest' || '' }}
${{ github.event_name == 'release' && 'nipreps/fmripost-aroma:${{ github.event.release.tag_name }}' || '' }}
14 changes: 12 additions & 2 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -108,13 +108,13 @@ dependencies = [
[tool.hatch.envs.style.scripts]
fix = [
"black src/",
"isort src/",
"ruff check --fix src/",
"isort src/",
]
check = [
"black --check --diff src/",
"isort --check --diff src/",
"ruff check --diff src/",
"isort --check --diff src/",
]

[[tool.hatch.envs.test.matrix]]
Expand Down Expand Up @@ -169,6 +169,10 @@ ignore = [
"S311", # We are not using random for cryptographic purposes
"ISC001",
"S603",
"PT023",
"S113",
"S202",
"S602",
]

[tool.ruff.lint.flake8-quotes]
Expand All @@ -183,6 +187,12 @@ inline-quotes = "single"
[tool.ruff.format]
quote-style = "single"

[tool.isort]
profile = "black"
multi_line_output = 3
src_paths = ["isort", "test"]
known_local_folder = ["fmripost_aroma"]

[tool.pytest.ini_options]
addopts = '-m "not integration"'
markers = [
Expand Down
64 changes: 32 additions & 32 deletions src/fmripost_aroma/cli/parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -152,6 +152,38 @@ def _bids_filter(value, parser):
),
)

g_aroma = parser.add_argument_group('Options for running ICA-AROMA')
g_aroma.add_argument(
'--melodic-dimensionality',
dest='melodic_dim',
action='store',
default=0,
type=int,
help=(
'Exact or maximum number of MELODIC components to estimate '
'(positive = exact, negative = maximum)'
),
)
g_aroma.add_argument(
'--error-on-warnings',
dest='err_on_warn',
action='store_true',
default=False,
help=(
'Raise an error if ICA-AROMA does not produce sensible output '
'(e.g., if all the components are classified as signal or noise)'
),
)
g_aroma.add_argument(
'--denoising-method',
action='store',
nargs='+',
choices=['aggr', 'nonaggr', 'orthaggr'],
default=None,
dest='denoise_method',
help='Denoising method to apply, if any.',
)

g_bids = parser.add_argument_group('Options for filtering BIDS queries')
g_bids.add_argument(
'--skip_bids_validation',
Expand Down Expand Up @@ -345,38 +377,6 @@ def _bids_filter(value, parser):
),
)

g_aroma = parser.add_argument_group('Options for running ICA_AROMA')
g_aroma.add_argument(
'--melodic-dimensionality',
dest='melodic_dim',
action='store',
default=0,
type=int,
help=(
'Exact or maximum number of MELODIC components to estimate '
'(positive = exact, negative = maximum)'
),
)
g_aroma.add_argument(
'--error-on-warnings',
dest='err_on_warn',
action='store_true',
default=False,
help=(
'Raise an error if ICA_AROMA does not produce sensible output '
'(e.g., if all the components are classified as signal or noise)'
),
)
g_aroma.add_argument(
'--denoising-method',
action='store',
nargs='+',
choices=['aggr', 'nonaggr', 'orthaggr'],
default=None,
dest='denoise_method',
help='Denoising method to apply, if any.',
)

g_carbon = parser.add_argument_group('Options for carbon usage tracking')
g_carbon.add_argument(
'--track-carbon',
Expand Down
Loading

0 comments on commit 2d6a11d

Please sign in to comment.