From c0866683e5b9d8eb802376a1db1bff01fddb171a Mon Sep 17 00:00:00 2001 From: "spaceBearAmadeus (Alex)" Date: Mon, 23 Sep 2024 11:14:06 -0400 Subject: [PATCH] updated specs workflows and added example --- .github/parse_container_version.py | 1 + .github/workflows/delivery.yml | 73 + .github/workflows/deployment.yml | 79 + compose_api/main.py | 61 +- compose_api/openapi_spec.py | 4 +- compose_api/spec/openapi_3_1_0_generated.yaml | 11 - docker-compose.yaml | 6 +- model-examples/sbml-core/BorisEJB.xml | 1737 +++++++++++++++++ 8 files changed, 1936 insertions(+), 36 deletions(-) create mode 100644 .github/workflows/delivery.yml create mode 100644 .github/workflows/deployment.yml create mode 100644 model-examples/sbml-core/BorisEJB.xml diff --git a/.github/parse_container_version.py b/.github/parse_container_version.py index 8902da775..bba91113b 100644 --- a/.github/parse_container_version.py +++ b/.github/parse_container_version.py @@ -3,6 +3,7 @@ service_name = sys.argv[1] + fp = 'docker-compose.yaml' with open(fp, 'r') as file: diff --git a/.github/workflows/delivery.yml b/.github/workflows/delivery.yml new file mode 100644 index 000000000..95d447311 --- /dev/null +++ b/.github/workflows/delivery.yml @@ -0,0 +1,73 @@ +name: Continuous Delivery + +on: + release: + +jobs: + build-microservices: + runs-on: ubuntu-latest + steps: + # Step 1: Checkout repository + - name: Checkout repository + uses: actions/checkout@v3 + + # Step 2: Log in to GHCR + - name: Log in to GHCR + run: echo "${{ secrets.REPO_ADMIN_GHCR_TOKEN }}" | docker login ghcr.io -u "${{ secrets.REPO_ADMIN_GH_USERNAME }}" --password-stdin + + - name: Install Docker Compose + run: | + sudo curl -L "https://github.com/docker/compose/releases/download/$(curl -s https://api.github.com/repos/docker/compose/releases/latest | grep 'tag_name' | cut -d\" -f4)/docker-compose-$(uname -s)-$(uname -m)" -o /usr/local/bin/docker-compose + sudo chmod +x /usr/local/bin/docker-compose + docker-compose --version # Check if Docker Compose is installed correctly + + - name: Install Python and PyYAML + run: | + sudo apt-get update + sudo apt-get install -y python3 python3-pip + pip install pyyaml + + - name: Extract API container version + id: extract_api_version + run: | + API_VERSION=$(python3 .github/parse_docker_compose.py api -v) + echo "API_VERSION=$API_VERSION" >> $GITHUB_ENV + echo "API Version: $API_VERSION" + API_IMAGE=$(python3 .github/parse_docker_compose.py api -i) + echo "API_IMAGE=$API_IMAGE" >> $GITHUB_ENV + echo "API Image: $API_IMAGE" + + - name: Extract worker container version + id: extract_worker_version + run: | + WORKER_VERSION=$(python3 .github/parse_docker_compose.py worker -v) + echo "WORKER_VERSION=$WORKER_VERSION" >> $GITHUB_ENV + echo "WORKER Version: $WORKER_VERSION" + WORKER_IMAGE=$(python3 .github/parse_docker_compose.py worker -i) + echo "WORKER_IMAGE=$WORKER_IMAGE" >> $GITHUB_ENV + echo "WORKER Image: $WORKER_IMAGE" + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v2 + + - name: Build Docker containers with Docker Compose + run: | + API_TAG=${{ env.API_IMAGE }} + WORKER_TAG=${{ env.WORKER_IMAGE }} + docker-compose build --no-cache + echo $API_TAG > api_tag.txt + echo $WORKER_TAG > worker_tag.txt + + - name: Upload image tag artifact + uses: actions/upload-artifact@v3 + with: + name: api_tag + path: api_tag.txt + + - name: Upload image tag artifact + uses: actions/upload-artifact@v3 + with: + name: worker_tag + path: worker_tag.txt + + diff --git a/.github/workflows/deployment.yml b/.github/workflows/deployment.yml new file mode 100644 index 000000000..7ada60774 --- /dev/null +++ b/.github/workflows/deployment.yml @@ -0,0 +1,79 @@ +name: Continuous Deployment + +on: + workflow_dispatch: + +jobs: + deploy-microservices: + runs-on: ubuntu-latest + steps: + # Step 1: Checkout repository + - name: Checkout repository + uses: actions/checkout@v3 + + # Step 2: Log in to GHCR + - name: Log in to GHCR + run: echo "${{ secrets.REPO_ADMIN_GHCR_TOKEN }}" | docker login ghcr.io -u "${{ secrets.REPO_ADMIN_GH_USERNAME }}" --password-stdin + + # Step 3: Install Docker Compose + - name: Install Docker Compose + run: | + sudo curl -L "https://github.com/docker/compose/releases/download/$(curl -s https://api.github.com/repos/docker/compose/releases/latest | grep 'tag_name' | cut -d\" -f4)/docker-compose-$(uname -s)-$(uname -m)" -o /usr/local/bin/docker-compose + sudo chmod +x /usr/local/bin/docker-compose + docker-compose --version # Check if Docker Compose is installed correctly + + - name: Install Python and PyYAML + run: | + sudo apt-get update + sudo apt-get install -y python3 python3-pip + pip install pyyaml + + # Step 5: Extract API container version + - name: Extract API container version + id: extract_api_version + run: | + API_VERSION=$(python3 .github/parse_container_version.py api) + echo "API_VERSION=$API_VERSION" >> $GITHUB_ENV + echo "API Version: $API_VERSION" + + # Step 6: Extract worker container version + - name: Extract worker container version + id: extract_worker_version + run: | + WORKER_VERSION=$(python3 .github/parse_container_version.py worker) + echo "WORKER_VERSION=$WORKER_VERSION" >> $GITHUB_ENV + echo "WORKER Version: $WORKER_VERSION" + + # Step 7: Debug output of extracted versions + - name: Debug output of extracted versions + run: | + echo "API_VERSION=${{ env.API_VERSION }}" + echo "WORKER_VERSION=${{ env.WORKER_VERSION }}" + + # Step 8: Set up Docker Buildx + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v2 + + # Step 9: Build Docker containers with Docker Compose + - name: Build Docker containers with Docker Compose + run: docker-compose build --no-cache + + # Step 10: Change permissions for push script + - name: Change push script permissions + run: chmod +x ./assets/scripts/push_image.sh + + # Step 11: Deploy API microservice container to GHCR + - name: Deploy API microservice container to GHCR + run: | + ./assets/scripts/push_image.sh compose_api ${{ env.API_VERSION }} + env: + REPO_ADMIN_GH_USERNAME: ${{ secrets.REPO_ADMIN_GH_USERNAME }} + REPO_ADMIN_GHCR_TOKEN: ${{ secrets.REPO_ADMIN_GHCR_TOKEN }} + + # Step 12: Deploy Worker microservice container to GHCR + - name: Deploy worker microservice container to GHCR + run: | + ./assets/scripts/push_image.sh compose_worker ${{ env.WORKER_VERSION }} + env: + REPO_ADMIN_GH_USERNAME: ${{ secrets.REPO_ADMIN_GH_USERNAME }} + REPO_ADMIN_GHCR_TOKEN: ${{ secrets.REPO_ADMIN_GHCR_TOKEN }} diff --git a/compose_api/main.py b/compose_api/main.py index 0f4a769c5..940480d7e 100644 --- a/compose_api/main.py +++ b/compose_api/main.py @@ -34,16 +34,20 @@ APP_TITLE = "bio-compose" APP_VERSION = "0.1.0" -APP_SERVERS = [ - { - "url": "https://biochecknet.biosimulations.org", - "description": "Production server" - }, - { - "url": "http://localhost:8000", - "description": "Development server" - } -] +# APP_SERVERS = [ +# { +# "url": "https://biochecknet.biosimulations.org", +# "description": "Production server" +# }, +# { +# "url": "http://localhost:3001", +# "description": "Main Development server" +# }, +# { +# "url": "http://localhost:8000", +# "description": "Alternate Development server" +# } +# ] APP_ORIGINS = [ 'http://127.0.0.1:8000', @@ -71,7 +75,7 @@ # -- app components -- # router = APIRouter() -app = FastAPI(title=APP_TITLE, version=APP_VERSION, servers=APP_SERVERS) +app = FastAPI(title=APP_TITLE, version=APP_VERSION) # , servers=APP_SERVERS) # add origins app.add_middleware( @@ -82,7 +86,7 @@ allow_headers=["*"]) # add servers -app.servers = APP_SERVERS +# app.servers = APP_SERVERS # -- mongo db -- # @@ -284,12 +288,15 @@ async def verify_sbml( simulators: List[str] = Query(default=["amici", "copasi", "tellurium"], description="List of simulators to compare"), include_outputs: bool = Query(default=True, description="Whether to include the output data on which the comparison is based."), comparison_id: Optional[str] = Query(default=None, description="Descriptive prefix to be added to this submission's job ID."), - expected_results: UploadFile = File(default=None, description="reports.h5 file defining the expected results to be included in the comparison."), + # expected_results: Optional[UploadFile] = File(default=None, description="reports.h5 file defining the expected results to be included in the comparison."), rTol: Optional[float] = Query(default=None, description="Relative tolerance to use for proximity comparison."), aTol: Optional[float] = Query(default=None, description="Absolute tolerance to use for proximity comparison."), - selection_list: Optional[List[str]] = Query(default=None, description="List of observables to include in the return data."), + selection_list: Optional[List[str]] = Query(default=None, description="List of observables to include in the return data.") ) -> PendingSbmlVerificationJob: try: + expected_results = None + if isinstance(expected_results, str) and expected_results.strip() == "": + expected_results = None # request specific params if comparison_id is None: compare_id = "utc_comparison_sbml" @@ -313,13 +320,15 @@ async def verify_sbml( # Save uploaded reports file to Google Cloud Storage if applicable report_fp = None report_blob_dest = None - if expected_results: + if expected_results is not None: # handle incorrect files upload properly_formatted_report = check_upload_file_extension(expected_results, 'expected_results', '.h5') if properly_formatted_report: report_fp = await save_uploaded_file(expected_results, save_dest) report_blob_dest = upload_prefix + report_fp.split("/")[-1] upload_blob(bucket_name=BUCKET_NAME, source_file_name=report_fp, destination_blob_name=report_blob_dest) + else: + report_blob_dest = None report_location = report_blob_dest pending_job_doc = await db_connector.insert_job_async( @@ -343,7 +352,8 @@ async def verify_sbml( # clean up local temp files os.remove(fp) - return PendingSbmlVerificationJob(**pending_job_doc) + # return PendingSbmlVerificationJob(**pending_job_doc) + return pending_job_doc except Exception as e: raise HTTPException(status_code=500, detail=str(e)) @@ -582,18 +592,25 @@ async def fetch_results(job_id: str): # state-case: job is completed job = await db_connector.read(collection_name="completed_jobs", job_id=job_id) + print('COMPLETED GOT JOB: ', job) + if job is not None: + job.pop('_id', None) + return {'content': job} # state-case: job has failed if job is None: job = await db_connector.read(collection_name="failed_jobs", job_id=job_id) + print('FAILED GOT JOB: ', job) # state-case: job is not in completed: if job is None: job = await db_connector.read(collection_name="in_progress_jobs", job_id=job_id) + print('IN_PROGRESS GOT JOB: ', job) # state-case: job is not in progress: if job is None: job = await db_connector.read(collection_name="pending_jobs", job_id=job_id) + print('GOT JOB: ', job) # return-case: job exists as either completed, failed, in_progress, or pending if not isinstance(job, type(None)): @@ -615,7 +632,8 @@ async def fetch_results(job_id: str): remote_fp = None if isinstance(job_data, list): - return OutputData(content=job) + # return OutputData(content=job) + return {'content': job} # output-type-case: output is saved as a dict if isinstance(job_data, dict): @@ -624,7 +642,8 @@ async def fetch_results(job_id: str): remote_fp = job_data['results_file'] # status/output-case: job is complete and output content is raw data and so return the full data TODO: do something better here else: - return OutputData(content=job) + # return OutputData(content=job) + return {'content': job} # output-type-case: output is saved as flattened (str) and thus also a file download elif isinstance(job_data, str): @@ -635,7 +654,8 @@ async def fetch_results(job_id: str): temp_dest = mkdtemp() local_fp = download_file_from_bucket(source_blob_path=remote_fp, out_dir=temp_dest, bucket_name=BUCKET_NAME) - return FileResponse(path=local_fp, media_type="application/octet-stream", filename=local_fp.split("/")[-1]) + # return FileResponse(path=local_fp, media_type="application/octet-stream", filename=local_fp.split("/")[-1]) + return {'path': local_fp, 'media_type': 'application/octet-stream', 'filename': local_fp.split('/')[-1]} # status/content-case: job is either pending or in progress and does not contain files to download else: @@ -643,7 +663,8 @@ async def fetch_results(job_id: str): status = job['status'] job['status'] = 'SUBMITTED:' + status - return OutputData(content=job) + # return OutputData(content=job) + return {'content': job} # return-case: no job exists in any collection by that id else: diff --git a/compose_api/openapi_spec.py b/compose_api/openapi_spec.py index 88cec4f8c..fa7d46054 100644 --- a/compose_api/openapi_spec.py +++ b/compose_api/openapi_spec.py @@ -13,8 +13,8 @@ def main(): version=app.version, openapi_version=app.openapi_version, description=app.description, - routes=app.routes, - servers=app.servers + routes=app.routes + # servers=app.servers ) # Convert the JSON OpenAPI spec to YAML diff --git a/compose_api/spec/openapi_3_1_0_generated.yaml b/compose_api/spec/openapi_3_1_0_generated.yaml index 2bd8d0ac7..40f205574 100644 --- a/compose_api/spec/openapi_3_1_0_generated.yaml +++ b/compose_api/spec/openapi_3_1_0_generated.yaml @@ -2,11 +2,6 @@ openapi: 3.1.0 info: title: bio-compose version: 0.1.0 -servers: -- url: https://biochecknet.biosimulations.org - description: Production server -- url: http://localhost:8000 - description: Development server paths: /: get: @@ -632,12 +627,6 @@ components: format: binary title: Uploaded File description: A deterministic SBML model. - expected_results: - type: string - format: binary - title: Expected Results - description: reports.h5 file defining the expected results to be included - in the comparison. type: object required: - uploaded_file diff --git a/docker-compose.yaml b/docker-compose.yaml index f504af68e..024574628 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -1,4 +1,4 @@ -version: '3.8' +# version: '3.8' services: mongodb: @@ -15,7 +15,7 @@ services: build: context: ./compose_api dockerfile: ./Dockerfile-compose_api - image: ghcr.io/biosimulators/bio-check-compose_api:1.11.1 + image: ghcr.io/biosimulators/bio-check-compose_api:1.11.2 # .1 CURRENT container_name: api environment: - MONGO_URI=mongodb://mongodb:27017/?retryWrites=true&w=majority&appName=bio-compose @@ -34,7 +34,7 @@ services: context: ./compose_worker dockerfile: ./Dockerfile-compose_worker - image: ghcr.io/biosimulators/bio-check-compose_worker:1.11.1 + image: ghcr.io/biosimulators/bio-check-compose_worker:1.11.2 container_name: worker environment: - MONGO_URI=mongodb://mongodb:27017/?retryWrites=true&w=majority&appName=bio-compose diff --git a/model-examples/sbml-core/BorisEJB.xml b/model-examples/sbml-core/BorisEJB.xml new file mode 100644 index 000000000..08c4fe53e --- /dev/null +++ b/model-examples/sbml-core/BorisEJB.xml @@ -0,0 +1,1737 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + J0_V1 + + + MKKK + + + + + + + + 1 + + + + + + + MAPK_PP + + + J0_Ki + + + + J0_n + + + + + + + J0_K1 + + + MKKK + + + + + + + + + + + + + + + + + + + + + + J1_V2 + + + MKKK_P + + + + + + J1_KK2 + + + MKKK_P + + + + + + + + + + + + + + + + + + + + + + + + J2_k3 + + + MKKK_P + + + MKK + + + + + + J2_KK3 + + + MKK + + + + + + + + + + + + + + + + + + + + + + + + J3_k4 + + + MKKK_P + + + MKK_P + + + + + + J3_KK4 + + + MKK_P + + + + + + + + + + + + + + + + + + + + + J4_V5 + + + MKK_PP + + + + + + J4_KK5 + + + MKK_PP + + + + + + + + + + + + + + + + + + + + + J5_V6 + + + MKK_P + + + + + + J5_KK6 + + + MKK_P + + + + + + + + + + + + + + + + + + + + + + + + J6_k7 + + + MKK_PP + + + MAPK + + + + + + J6_KK7 + + + MAPK + + + + + + + + + + + + + + + + + + + + + + + + J7_k8 + + + MKK_PP + + + MAPK_P + + + + + + J7_KK8 + + + MAPK_P + + + + + + + + + + + + + + + + + + + + + J8_V9 + + + MAPK_PP + + + + + + J8_KK9 + + + MAPK_PP + + + + + + + + + + + + + + + + + + + + + J9_V10 + + + MAPK_P + + + + + + J9_KK10 + + + MAPK_P + + + + + + + + + \ No newline at end of file