Skip to content

Commit

Permalink
updated specs workflows and added example
Browse files Browse the repository at this point in the history
  • Loading branch information
AlexPatrie committed Sep 23, 2024
1 parent 6d60ade commit c086668
Show file tree
Hide file tree
Showing 8 changed files with 1,936 additions and 36 deletions.
1 change: 1 addition & 0 deletions .github/parse_container_version.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@


service_name = sys.argv[1]

fp = 'docker-compose.yaml'

with open(fp, 'r') as file:
Expand Down
73 changes: 73 additions & 0 deletions .github/workflows/delivery.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,73 @@
name: Continuous Delivery

on:
release:

jobs:
build-microservices:
runs-on: ubuntu-latest
steps:
# Step 1: Checkout repository
- name: Checkout repository
uses: actions/checkout@v3

# Step 2: Log in to GHCR
- name: Log in to GHCR
run: echo "${{ secrets.REPO_ADMIN_GHCR_TOKEN }}" | docker login ghcr.io -u "${{ secrets.REPO_ADMIN_GH_USERNAME }}" --password-stdin

- name: Install Docker Compose
run: |
sudo curl -L "https://github.com/docker/compose/releases/download/$(curl -s https://api.github.com/repos/docker/compose/releases/latest | grep 'tag_name' | cut -d\" -f4)/docker-compose-$(uname -s)-$(uname -m)" -o /usr/local/bin/docker-compose
sudo chmod +x /usr/local/bin/docker-compose
docker-compose --version # Check if Docker Compose is installed correctly
- name: Install Python and PyYAML
run: |
sudo apt-get update
sudo apt-get install -y python3 python3-pip
pip install pyyaml
- name: Extract API container version
id: extract_api_version
run: |
API_VERSION=$(python3 .github/parse_docker_compose.py api -v)
echo "API_VERSION=$API_VERSION" >> $GITHUB_ENV
echo "API Version: $API_VERSION"
API_IMAGE=$(python3 .github/parse_docker_compose.py api -i)
echo "API_IMAGE=$API_IMAGE" >> $GITHUB_ENV
echo "API Image: $API_IMAGE"
- name: Extract worker container version
id: extract_worker_version
run: |
WORKER_VERSION=$(python3 .github/parse_docker_compose.py worker -v)
echo "WORKER_VERSION=$WORKER_VERSION" >> $GITHUB_ENV
echo "WORKER Version: $WORKER_VERSION"
WORKER_IMAGE=$(python3 .github/parse_docker_compose.py worker -i)
echo "WORKER_IMAGE=$WORKER_IMAGE" >> $GITHUB_ENV
echo "WORKER Image: $WORKER_IMAGE"
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2

- name: Build Docker containers with Docker Compose
run: |
API_TAG=${{ env.API_IMAGE }}
WORKER_TAG=${{ env.WORKER_IMAGE }}
docker-compose build --no-cache
echo $API_TAG > api_tag.txt
echo $WORKER_TAG > worker_tag.txt
- name: Upload image tag artifact
uses: actions/upload-artifact@v3
with:
name: api_tag
path: api_tag.txt

- name: Upload image tag artifact
uses: actions/upload-artifact@v3
with:
name: worker_tag
path: worker_tag.txt


79 changes: 79 additions & 0 deletions .github/workflows/deployment.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,79 @@
name: Continuous Deployment

on:
workflow_dispatch:

jobs:
deploy-microservices:
runs-on: ubuntu-latest
steps:
# Step 1: Checkout repository
- name: Checkout repository
uses: actions/checkout@v3

# Step 2: Log in to GHCR
- name: Log in to GHCR
run: echo "${{ secrets.REPO_ADMIN_GHCR_TOKEN }}" | docker login ghcr.io -u "${{ secrets.REPO_ADMIN_GH_USERNAME }}" --password-stdin

# Step 3: Install Docker Compose
- name: Install Docker Compose
run: |
sudo curl -L "https://github.com/docker/compose/releases/download/$(curl -s https://api.github.com/repos/docker/compose/releases/latest | grep 'tag_name' | cut -d\" -f4)/docker-compose-$(uname -s)-$(uname -m)" -o /usr/local/bin/docker-compose
sudo chmod +x /usr/local/bin/docker-compose
docker-compose --version # Check if Docker Compose is installed correctly
- name: Install Python and PyYAML
run: |
sudo apt-get update
sudo apt-get install -y python3 python3-pip
pip install pyyaml
# Step 5: Extract API container version
- name: Extract API container version
id: extract_api_version
run: |
API_VERSION=$(python3 .github/parse_container_version.py api)
echo "API_VERSION=$API_VERSION" >> $GITHUB_ENV
echo "API Version: $API_VERSION"
# Step 6: Extract worker container version
- name: Extract worker container version
id: extract_worker_version
run: |
WORKER_VERSION=$(python3 .github/parse_container_version.py worker)
echo "WORKER_VERSION=$WORKER_VERSION" >> $GITHUB_ENV
echo "WORKER Version: $WORKER_VERSION"
# Step 7: Debug output of extracted versions
- name: Debug output of extracted versions
run: |
echo "API_VERSION=${{ env.API_VERSION }}"
echo "WORKER_VERSION=${{ env.WORKER_VERSION }}"
# Step 8: Set up Docker Buildx
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2

# Step 9: Build Docker containers with Docker Compose
- name: Build Docker containers with Docker Compose
run: docker-compose build --no-cache

# Step 10: Change permissions for push script
- name: Change push script permissions
run: chmod +x ./assets/scripts/push_image.sh

# Step 11: Deploy API microservice container to GHCR
- name: Deploy API microservice container to GHCR
run: |
./assets/scripts/push_image.sh compose_api ${{ env.API_VERSION }}
env:
REPO_ADMIN_GH_USERNAME: ${{ secrets.REPO_ADMIN_GH_USERNAME }}
REPO_ADMIN_GHCR_TOKEN: ${{ secrets.REPO_ADMIN_GHCR_TOKEN }}

# Step 12: Deploy Worker microservice container to GHCR
- name: Deploy worker microservice container to GHCR
run: |
./assets/scripts/push_image.sh compose_worker ${{ env.WORKER_VERSION }}
env:
REPO_ADMIN_GH_USERNAME: ${{ secrets.REPO_ADMIN_GH_USERNAME }}
REPO_ADMIN_GHCR_TOKEN: ${{ secrets.REPO_ADMIN_GHCR_TOKEN }}
61 changes: 41 additions & 20 deletions compose_api/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,16 +34,20 @@

APP_TITLE = "bio-compose"
APP_VERSION = "0.1.0"
APP_SERVERS = [
{
"url": "https://biochecknet.biosimulations.org",
"description": "Production server"
},
{
"url": "http://localhost:8000",
"description": "Development server"
}
]
# APP_SERVERS = [
# {
# "url": "https://biochecknet.biosimulations.org",
# "description": "Production server"
# },
# {
# "url": "http://localhost:3001",
# "description": "Main Development server"
# },
# {
# "url": "http://localhost:8000",
# "description": "Alternate Development server"
# }
# ]

APP_ORIGINS = [
'http://127.0.0.1:8000',
Expand Down Expand Up @@ -71,7 +75,7 @@
# -- app components -- #

router = APIRouter()
app = FastAPI(title=APP_TITLE, version=APP_VERSION, servers=APP_SERVERS)
app = FastAPI(title=APP_TITLE, version=APP_VERSION) # , servers=APP_SERVERS)

# add origins
app.add_middleware(
Expand All @@ -82,7 +86,7 @@
allow_headers=["*"])

# add servers
app.servers = APP_SERVERS
# app.servers = APP_SERVERS


# -- mongo db -- #
Expand Down Expand Up @@ -284,12 +288,15 @@ async def verify_sbml(
simulators: List[str] = Query(default=["amici", "copasi", "tellurium"], description="List of simulators to compare"),
include_outputs: bool = Query(default=True, description="Whether to include the output data on which the comparison is based."),
comparison_id: Optional[str] = Query(default=None, description="Descriptive prefix to be added to this submission's job ID."),
expected_results: UploadFile = File(default=None, description="reports.h5 file defining the expected results to be included in the comparison."),
# expected_results: Optional[UploadFile] = File(default=None, description="reports.h5 file defining the expected results to be included in the comparison."),
rTol: Optional[float] = Query(default=None, description="Relative tolerance to use for proximity comparison."),
aTol: Optional[float] = Query(default=None, description="Absolute tolerance to use for proximity comparison."),
selection_list: Optional[List[str]] = Query(default=None, description="List of observables to include in the return data."),
selection_list: Optional[List[str]] = Query(default=None, description="List of observables to include in the return data.")
) -> PendingSbmlVerificationJob:
try:
expected_results = None
if isinstance(expected_results, str) and expected_results.strip() == "":
expected_results = None
# request specific params
if comparison_id is None:
compare_id = "utc_comparison_sbml"
Expand All @@ -313,13 +320,15 @@ async def verify_sbml(
# Save uploaded reports file to Google Cloud Storage if applicable
report_fp = None
report_blob_dest = None
if expected_results:
if expected_results is not None:
# handle incorrect files upload
properly_formatted_report = check_upload_file_extension(expected_results, 'expected_results', '.h5')
if properly_formatted_report:
report_fp = await save_uploaded_file(expected_results, save_dest)
report_blob_dest = upload_prefix + report_fp.split("/")[-1]
upload_blob(bucket_name=BUCKET_NAME, source_file_name=report_fp, destination_blob_name=report_blob_dest)
else:
report_blob_dest = None
report_location = report_blob_dest

pending_job_doc = await db_connector.insert_job_async(
Expand All @@ -343,7 +352,8 @@ async def verify_sbml(
# clean up local temp files
os.remove(fp)

return PendingSbmlVerificationJob(**pending_job_doc)
# return PendingSbmlVerificationJob(**pending_job_doc)
return pending_job_doc
except Exception as e:
raise HTTPException(status_code=500, detail=str(e))

Expand Down Expand Up @@ -582,18 +592,25 @@ async def fetch_results(job_id: str):

# state-case: job is completed
job = await db_connector.read(collection_name="completed_jobs", job_id=job_id)
print('COMPLETED GOT JOB: ', job)
if job is not None:
job.pop('_id', None)
return {'content': job}

# state-case: job has failed
if job is None:
job = await db_connector.read(collection_name="failed_jobs", job_id=job_id)
print('FAILED GOT JOB: ', job)

# state-case: job is not in completed:
if job is None:
job = await db_connector.read(collection_name="in_progress_jobs", job_id=job_id)
print('IN_PROGRESS GOT JOB: ', job)

# state-case: job is not in progress:
if job is None:
job = await db_connector.read(collection_name="pending_jobs", job_id=job_id)
print('GOT JOB: ', job)

# return-case: job exists as either completed, failed, in_progress, or pending
if not isinstance(job, type(None)):
Expand All @@ -615,7 +632,8 @@ async def fetch_results(job_id: str):
remote_fp = None

if isinstance(job_data, list):
return OutputData(content=job)
# return OutputData(content=job)
return {'content': job}

# output-type-case: output is saved as a dict
if isinstance(job_data, dict):
Expand All @@ -624,7 +642,8 @@ async def fetch_results(job_id: str):
remote_fp = job_data['results_file']
# status/output-case: job is complete and output content is raw data and so return the full data TODO: do something better here
else:
return OutputData(content=job)
# return OutputData(content=job)
return {'content': job}

# output-type-case: output is saved as flattened (str) and thus also a file download
elif isinstance(job_data, str):
Expand All @@ -635,15 +654,17 @@ async def fetch_results(job_id: str):
temp_dest = mkdtemp()
local_fp = download_file_from_bucket(source_blob_path=remote_fp, out_dir=temp_dest, bucket_name=BUCKET_NAME)

return FileResponse(path=local_fp, media_type="application/octet-stream", filename=local_fp.split("/")[-1])
# return FileResponse(path=local_fp, media_type="application/octet-stream", filename=local_fp.split("/")[-1])
return {'path': local_fp, 'media_type': 'application/octet-stream', 'filename': local_fp.split('/')[-1]}

# status/content-case: job is either pending or in progress and does not contain files to download
else:
# acknowledge the user submission to differentiate between original submission
status = job['status']
job['status'] = 'SUBMITTED:' + status

return OutputData(content=job)
# return OutputData(content=job)
return {'content': job}

# return-case: no job exists in any collection by that id
else:
Expand Down
4 changes: 2 additions & 2 deletions compose_api/openapi_spec.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,8 +13,8 @@ def main():
version=app.version,
openapi_version=app.openapi_version,
description=app.description,
routes=app.routes,
servers=app.servers
routes=app.routes
# servers=app.servers
)

# Convert the JSON OpenAPI spec to YAML
Expand Down
11 changes: 0 additions & 11 deletions compose_api/spec/openapi_3_1_0_generated.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -2,11 +2,6 @@ openapi: 3.1.0
info:
title: bio-compose
version: 0.1.0
servers:
- url: https://biochecknet.biosimulations.org
description: Production server
- url: http://localhost:8000
description: Development server
paths:
/:
get:
Expand Down Expand Up @@ -632,12 +627,6 @@ components:
format: binary
title: Uploaded File
description: A deterministic SBML model.
expected_results:
type: string
format: binary
title: Expected Results
description: reports.h5 file defining the expected results to be included
in the comparison.
type: object
required:
- uploaded_file
Expand Down
6 changes: 3 additions & 3 deletions docker-compose.yaml
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
version: '3.8'
# version: '3.8'

services:
mongodb:
Expand All @@ -15,7 +15,7 @@ services:
build:
context: ./compose_api
dockerfile: ./Dockerfile-compose_api
image: ghcr.io/biosimulators/bio-check-compose_api:1.11.1
image: ghcr.io/biosimulators/bio-check-compose_api:1.11.2 # .1 CURRENT
container_name: api
environment:
- MONGO_URI=mongodb://mongodb:27017/?retryWrites=true&w=majority&appName=bio-compose
Expand All @@ -34,7 +34,7 @@ services:
context: ./compose_worker
dockerfile: ./Dockerfile-compose_worker

image: ghcr.io/biosimulators/bio-check-compose_worker:1.11.1
image: ghcr.io/biosimulators/bio-check-compose_worker:1.11.2
container_name: worker
environment:
- MONGO_URI=mongodb://mongodb:27017/?retryWrites=true&w=majority&appName=bio-compose
Expand Down
Loading

0 comments on commit c086668

Please sign in to comment.