Skip to content

Commit

Permalink
Refactor system tests to use the new FTP server and to run in pytest (#…
Browse files Browse the repository at this point in the history
…301)

* Remove the old FTP server that was giving us problems

* Reconfigure the new FTP server for being accessed from inside a container

* Fix hanging problem in new FTP server

* Convert system test shell script into pytest tests

* Set host.docker.internal for the sake of docker on linux

* Dump docker logs in GHA on failure

* subprocess wrapper to capture full output

* Install python 3.9.13 on orthanc-anon image to avoid ftplib bug that prevented us connecting to the test FTP server on the docker host

* docker build caching for orthanc-anon

* Abstract out the "wait for condition for N seconds" pattern

---------

Co-authored-by: Stef Piatek <[email protected]>
  • Loading branch information
jeremyestein and stefpiatek authored Feb 26, 2024
1 parent a66dd56 commit 2db6331
Show file tree
Hide file tree
Showing 32 changed files with 619 additions and 515 deletions.
67 changes: 31 additions & 36 deletions .github/workflows/main.yml
Original file line number Diff line number Diff line change
Expand Up @@ -143,30 +143,6 @@ jobs:
timeout-minutes: 30
steps:
- uses: actions/checkout@v3

- uses: docker/setup-buildx-action@v3
# pre-build and cache the postgres container as installing python3 takes a while, doesn't push
- name: Cache Docker layers
uses: actions/cache@v3
with:
path: /tmp/.buildx-cache
key: ${{ runner.os }}-buildx-${{ github.sha }}
restore-keys: |
${{ runner.os }}-buildx-
- uses: docker/build-push-action@v5
with:
context: .
file: docker/postgres/Dockerfile
cache-from: type=local,src=/tmp/.buildx-cache
cache-to: type=local,dest=/tmp/.buildx-cache-new,mode=max
- # Temp fix
# https://github.com/docker/build-push-action/issues/252
# https://github.com/moby/buildkit/issues/1896
name: Move cache
run: |
rm -rf /tmp/.buildx-cache
mv /tmp/.buildx-cache-new /tmp/.buildx-cache
- name: Init Python
uses: actions/setup-python@v4
with:
Expand All @@ -175,7 +151,7 @@ jobs:

- name: Install Python dependencies
run: |
pip install pixl_core/ pixl_ehr/[test]
pip install pytest-pixl/ pixl_core/ pixl_ehr/[test]
- name: Run tests
working-directory: pixl_ehr/tests
Expand All @@ -187,7 +163,6 @@ jobs:
timeout-minutes: 10
steps:
- uses: actions/checkout@v3

- name: Init Python
uses: actions/setup-python@v4
with:
Expand All @@ -209,25 +184,21 @@ jobs:
timeout-minutes: 30
steps:
- uses: actions/checkout@v3
- name: Prune docker volumes
# seems like we're getting an error from ftp-server exiting with zero status code
# this is a workaround that resolves it locally 🤞
run: |
docker volume prune --force
- uses: docker/setup-buildx-action@v3
# pre-build and cache the postgres container as installing python3 takes a while, doesn't push
# pre-build and cache the orthanc-anon image: installing python3 takes a while, doesn't push
- name: Cache Docker layers
uses: actions/cache@v3
uses: actions/cache@v4
with:
path: /tmp/.buildx-cache
key: ${{ runner.os }}-buildx-${{ github.sha }}
key: ${{ runner.os }}-buildx-${{ github.ref }}-${{ github.sha }}
restore-keys: |
${{ runner.os }}-buildx-${{ github.ref }}-
${{ runner.os }}-buildx-
save-always: true
- uses: docker/build-push-action@v5
with:
context: .
file: docker/postgres/Dockerfile
file: docker/orthanc-anon/Dockerfile
cache-from: type=local,src=/tmp/.buildx-cache
cache-to: type=local,dest=/tmp/.buildx-cache-new,mode=max
- # Temp fix
Expand All @@ -244,6 +215,14 @@ jobs:
python-version: 3.10.6
cache: "pip"

- name: Install Python dependencies
# The -e option is required here due to the way the
# code determines the export directory. See issue #318.
run: |
pip install -e pixl_core/
pip install -e cli/[test]
pip install -e pytest-pixl/
- name: Build test services
working-directory: test
run: |
Expand All @@ -257,3 +236,19 @@ jobs:
working-directory: test
run: |
./run-system-test.sh
echo FINISHED SYSTEM TEST SCRIPT
- name: Dump ehr-api docker logs for debugging
if: ${{ failure() }}
run: |
docker logs -t system-test-ehr-api-1 2>&1
- name: Dump orthanc-anon docker logs for debugging
if: ${{ failure() }}
run: |
docker logs -t system-test-orthanc-anon-1 2>&1
- name: Dump imaging-api docker logs for debugging
if: ${{ failure() }}
run: |
docker logs -t system-test-imaging-api-1 2>&1
5 changes: 4 additions & 1 deletion cli/src/pixl_cli/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -125,7 +125,10 @@ def extract_radiology_reports(parquet_dir: Path) -> None:

success_code = 200
if response.status_code != success_code:
msg = f"Failed to run export-patient-data due to: {response.text}"
msg = (
f"Failed to run export-patient-data due to: "
f"error code {response.status_code} {response.text}"
)
raise RuntimeError(msg)


Expand Down
7 changes: 7 additions & 0 deletions docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -153,6 +153,9 @@ services:
- ${PWD}/orthanc/orthanc-anon/config:/run/secrets:ro
networks:
- pixl-net
# needed for same reason as ehr-api
extra_hosts:
- "host.docker.internal:host-gateway"
depends_on:
postgres:
condition: service_healthy
Expand Down Expand Up @@ -253,6 +256,10 @@ services:
retries: 5
networks:
- pixl-net
# needed for testing under GHA (linux), so this container
# can reach the test FTP server running on the docker host
extra_hosts:
- "host.docker.internal:host-gateway"
volumes:
- ${PWD}/exports:/run/exports

Expand Down
33 changes: 33 additions & 0 deletions docker/orthanc-anon/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,39 @@
# limitations under the License.
FROM osimis/orthanc:22.9.0-full-stable
SHELL ["/bin/bash", "-o", "pipefail", "-e", "-u", "-x", "-c"]
# need some packages for building python 3.9.13
RUN export DEBIAN_FRONTEND=noninteractive \
&& apt update \
&& apt install --yes --no-install-recommends \
build-essential \
libbz2-dev \
libffi-dev \
libgdbm-dev \
libncurses5-dev \
libnss3-dev \
libreadline-dev \
libsqlite3-dev \
libssl-dev \
zlib1g-dev \
&& apt-get autoremove \
&& apt-get clean \
&& rm -rf /var/lib/apt/lists/*

# Default for Debian Bullseye (11) is python 3.9.2, which has a bug in ftplib that is
# affecting us.
# See https://bugs.python.org/issue43285 for more details. It only happens in the system
# test, when docker is using NAT to implement host.docker.internal
# Install 3.9.13 alongside the existing version (removing the original python beforehand
# seems to break orthanc)
ADD https://www.python.org/ftp/python/3.9.13/Python-3.9.13.tgz /python3913/
RUN cd /python3913 \
&& tar -xvf Python-3.9.13.tgz \
&& cd Python-3.9.13 \
&& ./configure --enable-optimizations \
&& make -j `nproc` \
&& make install \
&& make clean \
&& rm -fr /python3913

# Install requirements before copying modules
COPY ./pixl_core/pyproject.toml ./pixl_core/pyproject.toml
Expand Down
3 changes: 3 additions & 0 deletions orthanc/orthanc-anon/plugin/pixl.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,6 +72,9 @@ def AzureAccessToken():
return response.json()["access_token"]


TIMER = None


def AzureDICOMTokenRefresh():
"""
Refresh Azure DICOM token
Expand Down
4 changes: 3 additions & 1 deletion pixl_core/src/core/exports.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,9 +80,10 @@ def copy_to_exports(self, input_omop_dir: pathlib.Path) -> str:
"""
public_input = input_omop_dir / "public"

logging.info("Copying public parquet files from %s to %s", public_input, self.public_output)

# Make directory for exports if they don't exist
ParquetExport._mkdir(self.public_output)
logger.info("Copying public parquet files from %s to %s", public_input, self.public_output)

# Copy extract files, overwriting if it exists
shutil.copytree(public_input, self.public_output, dirs_exist_ok=True)
Expand All @@ -96,6 +97,7 @@ def export_radiology(self, export_df: pd.DataFrame) -> pathlib.Path:
"""Export radiology reports to parquet file"""
self._mkdir(self.radiology_output)
parquet_file = self.radiology_output / "radiology.parquet"
logging.info("Exporting radiology to %s", self.radiology_output)
export_df.to_parquet(parquet_file)
# We are not responsible for making the "latest" symlink, see `copy_to_exports`.
# This avoids the confusion caused by EHR API (which calls export_radiology) having a
Expand Down
7 changes: 4 additions & 3 deletions pixl_core/src/core/upload.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,10 +76,11 @@ def upload_dicom_image(zip_content: BinaryIO, pseudo_anon_id: str) -> None:

# Store the file using a binary handler
try:
logger.info("Running command %s", command)
ftp.storbinary(command, zip_content)
except ftplib.all_errors as ftp_error:
ftp.quit()
error_msg = "Failed to run STOR command '%s': '%s'"
error_msg = f"Failed to run STOR command : {command}"
raise ConnectionError(error_msg, command, ftp_error) from ftp_error

# Close the FTP connection
Expand Down Expand Up @@ -159,8 +160,8 @@ def _connect_to_ftp() -> FTP_TLS:
ftp.login(ftp_user, ftp_password)
ftp.prot_p()
except ftplib.all_errors as ftp_error:
error_msg = "Failed to connect to FTPS server: '%s'"
raise ConnectionError(error_msg, ftp_error) from ftp_error
error_msg = f"Failed to connect to FTPS server: {ftp_user}@{ftp_host}:{ftp_port}"
raise ConnectionError(error_msg) from ftp_error
return ftp


Expand Down
35 changes: 19 additions & 16 deletions pixl_core/tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,17 +16,20 @@
import datetime
import os
import pathlib
import subprocess
import shlex
from pathlib import Path
from typing import TYPE_CHECKING, BinaryIO

import pytest
from core.db.models import Base, Extract, Image
from core.exports import ParquetExport
from pytest_pixl.helpers import run_subprocess
from pytest_pixl.plugin import FtpHostAddress
from sqlalchemy import Engine, create_engine
from sqlalchemy.orm import Session, sessionmaker

if TYPE_CHECKING:
import subprocess
from collections.abc import Generator

pytest_plugins = "pytest_pixl"
Expand All @@ -47,25 +50,19 @@
@pytest.fixture(scope="package")
def run_containers() -> subprocess.CompletedProcess[bytes]:
"""Run docker containers for tests which require them."""
subprocess.run(
b"docker compose down --volumes",
check=True,
cwd=TEST_DIR,
shell=True, # noqa: S602
run_subprocess(
shlex.split("docker compose down --volumes"),
TEST_DIR,
timeout=60,
)
yield subprocess.run(
b"docker compose up --build --wait",
check=True,
cwd=TEST_DIR,
shell=True, # noqa: S602
yield run_subprocess(
shlex.split("docker compose up --build --wait"),
TEST_DIR,
timeout=60,
)
subprocess.run(
b"docker compose down --volumes",
check=True,
cwd=TEST_DIR,
shell=True, # noqa: S602
run_subprocess(
shlex.split("docker compose down --volumes"),
TEST_DIR,
timeout=60,
)

Expand All @@ -79,6 +76,12 @@ def ftps_home_dir(ftps_server) -> Generator[Path, None, None]:
return Path(ftps_server.home_dir)


@pytest.fixture(scope="session")
def ftp_host_address():
"""Run FTP on localhost - no docker containers need to access it"""
return FtpHostAddress.LOCALHOST


@pytest.fixture()
def test_zip_content() -> BinaryIO:
"""Directory containing the test data for uploading to the ftp server."""
Expand Down
8 changes: 3 additions & 5 deletions pixl_dcmd/tests/test_main.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@
from __future__ import annotations

import pathlib
import subprocess

import nibabel
import numpy as np
Expand All @@ -23,6 +22,7 @@
import sqlalchemy
import yaml
from pydicom.data import get_testdata_file
from pytest_pixl.helpers import run_subprocess

from core.db.models import Image
from pixl_dcmd.main import (
Expand Down Expand Up @@ -109,15 +109,14 @@ def test_can_nifti_convert_post_anonymisation(
# Convert the anonymised DICOMs to NIFTI with dcm2niix
anon_nifti_dir = tmp_path / "nifti_anon"
anon_nifti_dir.mkdir()
subprocess.run(
run_subprocess(
["dcm2niix", "-f", "anon", "-o", str(anon_nifti_dir), str(anon_dicom_dir)],
check=True,
)

# Convert the pre-anonymisation DICOMs to NIFTI with dcm2niix
ident_nifti_dir = tmp_path / "nifti_ident"
ident_nifti_dir.mkdir()
subprocess.run(
run_subprocess(
[
"dcm2niix",
"-f",
Expand All @@ -126,7 +125,6 @@ def test_can_nifti_convert_post_anonymisation(
str(ident_nifti_dir),
str(directory_of_mri_dicoms),
],
check=True,
)

# Confirm that the shape, orientation and contents of the pre- and
Expand Down
1 change: 1 addition & 0 deletions pixl_ehr/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@ dependencies = [
test = [
"pytest==7.4.2",
"pytest-asyncio==0.21.1",
"pytest-pixl",
"httpx==0.24.*",
]
dev = [
Expand Down
Loading

0 comments on commit 2db6331

Please sign in to comment.