diff --git a/.development.env b/.development.env index ad42034..75b6f46 100644 --- a/.development.env +++ b/.development.env @@ -1,3 +1,5 @@ +HOST=localhost + LTS_FOLDER=./lts MOCKARCHIVER_IMAGE=ghcr.io/swissopenem/scopemarchiver-mockarchiver:latest \ No newline at end of file diff --git a/.github/workflows/build-docker-images.yml b/.github/workflows/build-docker-images.yml index a3c81ce..65d5a67 100644 --- a/.github/workflows/build-docker-images.yml +++ b/.github/workflows/build-docker-images.yml @@ -7,9 +7,9 @@ env: # github.repository as / MOCKARCHIVER_IMAGE_NAME: ${{ github.repository }}-mockarchiver WEBUPLOADER_IMAGE_NAME: ${{ github.repository }}-webuploader - CELERY_WORKER_IMAGE_NAME: ${{ github.repository }}-worker - CELERY_SERVER_IMAGE_NAME: ${{ github.repository }}-server - CELERY_FLOWER_IMAGE_NAME: ${{ github.repository }}-flower + CELERY_WORKER_IMAGE_NAME: ${{ github.repository }}-openem-worker + OPENEM_BACKEND_IMAGE_NAME: ${{ github.repository }}-openem-backend + CELERY_FLOWER_IMAGE_NAME: ${{ github.repository }}-openem-flower on: push: @@ -28,7 +28,7 @@ jobs: outputs: mockarchiver: ${{ steps.filter.outputs.mockarchiver }} webuploader: ${{ steps.filter.outputs.webuploader }} - jobsystem: ${{ steps.filter.outputs.jobsystem }} + backend: ${{ steps.filter.outputs.backend }} steps: # For pull requests it's not necessary to checkout the code - uses: actions/checkout@v4 @@ -41,8 +41,8 @@ jobs: - 'mockarchiver/**' webuploader: - 'webuploader/**' - jobsystem: - - 'jobsystem/**' + backend: + - 'backend/**' build_mockarchiver: runs-on: ubuntu-latest @@ -93,7 +93,7 @@ jobs: contents: read needs: detect_changes - if: ${{ needs.detect_changes.outputs.jobsystem == 'true' }} || github.event_name != 'pull_request' + if: ${{ needs.detect_changes.outputs.backend == 'true' }} || github.event_name != 'pull_request' steps: - uses: actions/checkout@v3 @@ -121,13 +121,13 @@ jobs: id: build-and-push uses: docker/build-push-action@v5 with: - context: ./jobsystem - file: ./jobsystem/worker.Dockerfile + context: ./backend + file: ./backend/worker.Dockerfile push: true # ${{ github.event_name != 'pull_request' }} tags: ${{ steps.meta.outputs.tags }} labels: ${{ steps.meta.outputs.labels }} - build_celery_server: + build_openem_backend: runs-on: ubuntu-latest permissions: @@ -135,7 +135,7 @@ jobs: contents: read needs: detect_changes - if: ${{ needs.detect_changes.outputs.jobsystem == 'true' }} || github.event_name != 'pull_request' + if: ${{ needs.detect_changes.outputs.backend == 'true' }} || github.event_name != 'pull_request' steps: - uses: actions/checkout@v3 @@ -155,7 +155,7 @@ jobs: id: meta uses: docker/metadata-action@96383f45573cb7f253c731d3b3ab81c87ef81934 # v5.0.0 with: - images: ${{ env.REGISTRY }}/${{ env.CELERY_SERVER_IMAGE_NAME }} + images: ${{ env.REGISTRY }}/${{ env.OPENEM_BACKEND_IMAGE_NAME }} # Build and push Docker image with Buildx (don't push on PR) # https://github.com/docker/build-push-action @@ -163,8 +163,8 @@ jobs: id: build-and-push uses: docker/build-push-action@v5 with: - context: ./jobsystem - file: ./jobsystem/web.Dockerfile + context: ./backend + file: ./backend/web.Dockerfile push: true # ${{ github.event_name != 'pull_request' }} tags: ${{ steps.meta.outputs.tags }} labels: ${{ steps.meta.outputs.labels }} @@ -177,7 +177,7 @@ jobs: contents: read needs: detect_changes - if: ${{ needs.detect_changes.outputs.jobsystem == 'true' }} || github.event_name != 'pull_request' + if: ${{ needs.detect_changes.outputs.backend == 'true' }} || github.event_name != 'pull_request' steps: - uses: actions/checkout@v3 @@ -205,8 +205,8 @@ jobs: id: build-and-push uses: docker/build-push-action@v5 with: - context: ./jobsystem - file: ./jobsystem/flower.Dockerfile + context: ./backend + file: ./backend/flower.Dockerfile push: true # ${{ github.event_name != 'pull_request' }} tags: ${{ steps.meta.outputs.tags }} labels: ${{ steps.meta.outputs.labels }} diff --git a/.env b/.production.env similarity index 56% rename from .env rename to .production.env index f89d93c..ff19970 100644 --- a/.env +++ b/.production.env @@ -1,10 +1,19 @@ # Global HOST=openem-dev.ethz.ch +ARCHIVER_ENABLED=True +INGESTER_ENABLED=True -# TUSD -TUSD_HOOKS_FOLDER=./tusd/hooks -# Minio +# Backend +CELERY_FLOWER_IMAGE_NAME=ghcr.io/swissopenem/openem-celery-flower +CELERY_WORKER_IMAGE_NAME=ghcr.io/swissopenem/openem-celery-worker +OPENEM_BACKEND_IMAGE_NAME=ghcr.io/swissopenem/openem-backend +OPENEM_IMAGE_TAG=main-local + +# Archiver +CELERY_ARCHIVING_DIR=/data/LTS + +## Minio MINIO_STORAGE_FOLDER=/data/minio MINIO_REGION="eu-west-1" MINIO_USER="minioadmin" @@ -12,17 +21,13 @@ MINIO_PASS="minioadmin" MINIO_ARCHIVAL_BUCKET="archival.openem-dev.ethz.ch" MINIO_RETRIEVAL_BUCKET="retrieval.openem-dev.ethz.ch" -# Mockarchiver +## TUSD +TUSD_HOOKS_FOLDER=./tusd/hooks + +## Mockarchiver LTS_FOLDER=/data/lts MOCKARCHIVER_IMAGE=ghcr.io/swissopenem/scopemarchiver-mockarchiver:latest -# Webuploader +## Webuploader WEBUPLOADER_IMAGE_NAME=ghcr.io/swissopenem/scopemarchiver-webuploader -WEBUPLOADER_IMAGE_TAG=main - -# Celery -CELERY_FLOWER_IMAGE_NAME=ghcr.io/swissopenem/scopemarchiver-flower -CELERY_WORKER_IMAGE_NAME=ghcr.io/swissopenem/scopemarchiver-worker -CELERY_SERVER_IMAGE_NAME=ghcr.io/swissopenem/scopemarchiver-server -CELERY_IMAGE_TAG=main -CELERY_ARCHIVING_DIR=/data/LTS +WEBUPLOADER_IMAGE_TAG=main \ No newline at end of file diff --git a/README.md b/README.md index 997bcae..a704fb3 100644 --- a/README.md +++ b/README.md @@ -1,18 +1,56 @@ -# ScopeMArchiver +# OpenEM Service -A archiving servce that allows uploading data and registering it with [SciCat](https://scicatproject.github.io). +A ingester and archiver service that allows uploading data and registering it with [SciCat](https://scicatproject.github.io). -## Mockarchiver +## Development + +```bash +docker compose --env_file .production.env --env-file .development.env up -d +``` + +> **Note:** .env files are picked up by VSCode and variables defined there are added to the shell that is used. This can lead to confusion as the files is not reloaded after changing values and the values in the session of the shell take precedence. + +## Archiver + +The archiver functionality can be enabled by setting the following env variable: + +```bash +ARCHIVER_ENABLED=True +``` + +### Mockarchiver Python based service that mocks behavior of the LTS at ETH. See its [Readme](./mockarchiver/README.me) for details. +## Ingester + +The ingester functionality can be enabled by setting the following env variable: + +```bash +INGESTER_ENABLED=True +``` + ## Deployment -All the services can be deployed using docker compose: +Production: + +```bash +docker compose --env_file .production.env up -d +``` + +Development: ```bash -docker compose up -d +docker compose --env-file .production.env --env-file .development.env --profile archiver up -d ``` -> Note: Individual image tags need to be passed as env variables +## Endpoints + +Deploying it locally for development provide the following endpoints + +| Service | Endpoint | +|-- |-- | +| Archiver Frontend | | +| Traefik | | +| Jobs API | | diff --git a/backend/.vscode/launch.json b/backend/.vscode/launch.json new file mode 100644 index 0000000..e1fa037 --- /dev/null +++ b/backend/.vscode/launch.json @@ -0,0 +1,50 @@ +{ + // Use IntelliSense to learn about possible attributes. + // Hover to view descriptions of existing attributes. + // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387 + "version": "0.2.0", + "configurations": [ + { + "name": "Backend API", + "type": "debugpy", + "request": "launch", + "env": { + "ARCHIVER_ENABLED": "True", + "INGESTER_ENABLED": "True" + }, + "module": "uvicorn", + "args": [ + "main:app", + "--reload" + ], + "jinja": true + }, + { + "name": "Worker", + "type": "debugpy", + "request": "launch", + "env": { + "ARCHIVER_ENABLED": "True", + "INGESTER_ENABLED": "True" + }, + "module": "celery", + "args": [ + "-A", + "jobsystem.celery", + "worker" + ], + "jinja": true + } + ], + "compounds": [ + { + "name": "Backend", + "configurations": [ + "Backend API", + "Worker" + ], + "preLaunchTask": "${defaultBuildTask}", + "stopAll": true + } + ] +} \ No newline at end of file diff --git a/jobsystem/Pipfile b/backend/Pipfile similarity index 91% rename from jobsystem/Pipfile rename to backend/Pipfile index 4b10168..2bb4456 100644 --- a/jobsystem/Pipfile +++ b/backend/Pipfile @@ -13,6 +13,7 @@ uvicorn = "*" celery = {extras = ["redis"], version = "*"} pytest-celery = "*" pytest = "*" +fastapi-featureflags = "*" [dev-packages] diff --git a/jobsystem/Pipfile.lock b/backend/Pipfile.lock similarity index 78% rename from jobsystem/Pipfile.lock rename to backend/Pipfile.lock index 4ef52e1..9c4f212 100644 --- a/jobsystem/Pipfile.lock +++ b/backend/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "e15caa2ea46de1adadb0d1377336cd41bda3ae6d39d66ec22c345c3dc7e68cfd" + "sha256": "40726878ce73a1346863fb0c498ee676df9b5880924296ed2bc4040d182c8858" }, "pipfile-spec": 6, "requires": { @@ -41,14 +41,6 @@ "markers": "python_version >= '3.6'", "version": "==5.2.0" }, - "annotated-types": { - "hashes": [ - "sha256:0641064de18ba7a25dee8f96403ebc39113d0cb953a01429249d5c7564666a43", - "sha256:563339e807e53ffd9c267e99fc6d9ea23eb8443c08f112651963e24e22f84a5d" - ], - "markers": "python_version >= '3.8'", - "version": "==0.6.0" - }, "anyio": { "hashes": [ "sha256:048e05d0f6caeed70d731f3db756d35dcc1f35747c8c403364a8332c630441b8", @@ -97,7 +89,7 @@ "sha256:4640d96be84d82d02ed59ea2b7105a0f7b33abe8703703cd0ab0bf87c427522f", "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028" ], - "markers": "python_version >= '3.7'", + "markers": "python_full_version < '3.11.3'", "version": "==4.0.3" }, "billiard": { @@ -322,12 +314,21 @@ }, "fastapi": { "hashes": [ - "sha256:266775f0dcc95af9d3ef39bad55cff525329a931d5fd51930aadd4f428bf7ff3", - "sha256:87a1f6fb632a218222c5984be540055346a8f5d8a68e8f6fb647b1dc9934de4b" + "sha256:4d9d3e8c71c73f11874bcf5e33626258d143252e329a01002f767306c64fb982", + "sha256:d374dbc4ef2ad9b803899bd3360d34c534adc574546e25314ab72c0c4411749f" ], "index": "pypi", - "markers": "python_version >= '3.8'", - "version": "==0.110.0" + "markers": "python_version >= '3.7'", + "version": "==0.95.2" + }, + "fastapi-featureflags": { + "hashes": [ + "sha256:00c4802f1729ee7f5c4a58d7df663f3925a68d36ed86e00ee2207f327322f344", + "sha256:de8d7e410cf97f7c15399d458daebb50212059a3af51a2b31dc2104c3b99c919" + ], + "index": "pypi", + "markers": "python_version <= '3.12' and python_version >= '3.7'", + "version": "==0.4.8" }, "flower": { "hashes": [ @@ -394,11 +395,11 @@ }, "packaging": { "hashes": [ - "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5", - "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7" + "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5", + "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9" ], "markers": "python_version >= '3.7'", - "version": "==23.2" + "version": "==24.0" }, "pluggy": { "hashes": [ @@ -471,105 +472,54 @@ }, "pydantic": { "hashes": [ - "sha256:72c6034df47f46ccdf81869fddb81aade68056003900a8724a4f160700016a2a", - "sha256:e07805c4c7f5c6826e33a1d4c9d47950d7eaf34868e2690f8594d2e30241f11f" - ], - "markers": "python_version >= '3.8'", - "version": "==2.6.3" - }, - "pydantic-core": { - "hashes": [ - "sha256:00ee1c97b5364b84cb0bd82e9bbf645d5e2871fb8c58059d158412fee2d33d8a", - "sha256:0d32576b1de5a30d9a97f300cc6a3f4694c428d956adbc7e6e2f9cad279e45ed", - "sha256:0df446663464884297c793874573549229f9eca73b59360878f382a0fc085979", - "sha256:0f56ae86b60ea987ae8bcd6654a887238fd53d1384f9b222ac457070b7ac4cff", - "sha256:13dcc4802961b5f843a9385fc821a0b0135e8c07fc3d9949fd49627c1a5e6ae5", - "sha256:162e498303d2b1c036b957a1278fa0899d02b2842f1ff901b6395104c5554a45", - "sha256:1b662180108c55dfbf1280d865b2d116633d436cfc0bba82323554873967b340", - "sha256:1cac689f80a3abab2d3c0048b29eea5751114054f032a941a32de4c852c59cad", - "sha256:21b888c973e4f26b7a96491c0965a8a312e13be108022ee510248fe379a5fa23", - "sha256:287073c66748f624be4cef893ef9174e3eb88fe0b8a78dc22e88eca4bc357ca6", - "sha256:2a1ef6a36fdbf71538142ed604ad19b82f67b05749512e47f247a6ddd06afdc7", - "sha256:2a72fb9963cba4cd5793854fd12f4cfee731e86df140f59ff52a49b3552db241", - "sha256:2acca2be4bb2f2147ada8cac612f8a98fc09f41c89f87add7256ad27332c2fda", - "sha256:2f583bd01bbfbff4eaee0868e6fc607efdfcc2b03c1c766b06a707abbc856187", - "sha256:33809aebac276089b78db106ee692bdc9044710e26f24a9a2eaa35a0f9fa70ba", - "sha256:36fa178aacbc277bc6b62a2c3da95226520da4f4e9e206fdf076484363895d2c", - "sha256:4204e773b4b408062960e65468d5346bdfe139247ee5f1ca2a378983e11388a2", - "sha256:4384a8f68ddb31a0b0c3deae88765f5868a1b9148939c3f4121233314ad5532c", - "sha256:456855f57b413f077dff513a5a28ed838dbbb15082ba00f80750377eed23d132", - "sha256:49d5d58abd4b83fb8ce763be7794d09b2f50f10aa65c0f0c1696c677edeb7cbf", - "sha256:4ac6b4ce1e7283d715c4b729d8f9dab9627586dafce81d9eaa009dd7f25dd972", - "sha256:4df8a199d9f6afc5ae9a65f8f95ee52cae389a8c6b20163762bde0426275b7db", - "sha256:500960cb3a0543a724a81ba859da816e8cf01b0e6aaeedf2c3775d12ee49cade", - "sha256:519ae0312616026bf4cedc0fe459e982734f3ca82ee8c7246c19b650b60a5ee4", - "sha256:578114bc803a4c1ff9946d977c221e4376620a46cf78da267d946397dc9514a8", - "sha256:5c5cbc703168d1b7a838668998308018a2718c2130595e8e190220238addc96f", - "sha256:6162f8d2dc27ba21027f261e4fa26f8bcb3cf9784b7f9499466a311ac284b5b9", - "sha256:704d35ecc7e9c31d48926150afada60401c55efa3b46cd1ded5a01bdffaf1d48", - "sha256:716b542728d4c742353448765aa7cdaa519a7b82f9564130e2b3f6766018c9ec", - "sha256:72282ad4892a9fb2da25defeac8c2e84352c108705c972db82ab121d15f14e6d", - "sha256:7233d65d9d651242a68801159763d09e9ec96e8a158dbf118dc090cd77a104c9", - "sha256:732da3243e1b8d3eab8c6ae23ae6a58548849d2e4a4e03a1924c8ddf71a387cb", - "sha256:75b81e678d1c1ede0785c7f46690621e4c6e63ccd9192af1f0bd9d504bbb6bf4", - "sha256:75f76ee558751746d6a38f89d60b6228fa174e5172d143886af0f85aa306fd89", - "sha256:7ee8d5f878dccb6d499ba4d30d757111847b6849ae07acdd1205fffa1fc1253c", - "sha256:7f752826b5b8361193df55afcdf8ca6a57d0232653494ba473630a83ba50d8c9", - "sha256:86b3d0033580bd6bbe07590152007275bd7af95f98eaa5bd36f3da219dcd93da", - "sha256:8d62da299c6ecb04df729e4b5c52dc0d53f4f8430b4492b93aa8de1f541c4aac", - "sha256:8e47755d8152c1ab5b55928ab422a76e2e7b22b5ed8e90a7d584268dd49e9c6b", - "sha256:9091632a25b8b87b9a605ec0e61f241c456e9248bfdcf7abdf344fdb169c81cf", - "sha256:936e5db01dd49476fa8f4383c259b8b1303d5dd5fb34c97de194560698cc2c5e", - "sha256:99b6add4c0b39a513d323d3b93bc173dac663c27b99860dd5bf491b240d26137", - "sha256:9c865a7ee6f93783bd5d781af5a4c43dadc37053a5b42f7d18dc019f8c9d2bd1", - "sha256:a425479ee40ff021f8216c9d07a6a3b54b31c8267c6e17aa88b70d7ebd0e5e5b", - "sha256:a4b2bf78342c40b3dc830880106f54328928ff03e357935ad26c7128bbd66ce8", - "sha256:a6b1bb0827f56654b4437955555dc3aeeebeddc47c2d7ed575477f082622c49e", - "sha256:aaf09e615a0bf98d406657e0008e4a8701b11481840be7d31755dc9f97c44053", - "sha256:b1f6f5938d63c6139860f044e2538baeee6f0b251a1816e7adb6cbce106a1f01", - "sha256:b29eeb887aa931c2fcef5aa515d9d176d25006794610c264ddc114c053bf96fe", - "sha256:b3992a322a5617ded0a9f23fd06dbc1e4bd7cf39bc4ccf344b10f80af58beacd", - "sha256:b5b6079cc452a7c53dd378c6f881ac528246b3ac9aae0f8eef98498a75657805", - "sha256:b60cc1a081f80a2105a59385b92d82278b15d80ebb3adb200542ae165cd7d183", - "sha256:b926dd38db1519ed3043a4de50214e0d600d404099c3392f098a7f9d75029ff8", - "sha256:bd87f48924f360e5d1c5f770d6155ce0e7d83f7b4e10c2f9ec001c73cf475c99", - "sha256:bda1ee3e08252b8d41fa5537413ffdddd58fa73107171a126d3b9ff001b9b820", - "sha256:be0ec334369316fa73448cc8c982c01e5d2a81c95969d58b8f6e272884df0074", - "sha256:c6119dc90483a5cb50a1306adb8d52c66e447da88ea44f323e0ae1a5fcb14256", - "sha256:c9803edf8e29bd825f43481f19c37f50d2b01899448273b3a7758441b512acf8", - "sha256:c9bd22a2a639e26171068f8ebb5400ce2c1bc7d17959f60a3b753ae13c632975", - "sha256:cbcc558401de90a746d02ef330c528f2e668c83350f045833543cd57ecead1ad", - "sha256:cf6204fe865da605285c34cf1172879d0314ff267b1c35ff59de7154f35fdc2e", - "sha256:d33dd21f572545649f90c38c227cc8631268ba25c460b5569abebdd0ec5974ca", - "sha256:d89ca19cdd0dd5f31606a9329e309d4fcbb3df860960acec32630297d61820df", - "sha256:d8f99b147ff3fcf6b3cc60cb0c39ea443884d5559a30b1481e92495f2310ff2b", - "sha256:d937653a696465677ed583124b94a4b2d79f5e30b2c46115a68e482c6a591c8a", - "sha256:dcca5d2bf65c6fb591fff92da03f94cd4f315972f97c21975398bd4bd046854a", - "sha256:ded1c35f15c9dea16ead9bffcde9bb5c7c031bff076355dc58dcb1cb436c4721", - "sha256:e3e70c94a0c3841e6aa831edab1619ad5c511199be94d0c11ba75fe06efe107a", - "sha256:e56f8186d6210ac7ece503193ec84104da7ceb98f68ce18c07282fcc2452e76f", - "sha256:e7774b570e61cb998490c5235740d475413a1f6de823169b4cf94e2fe9e9f6b2", - "sha256:e7c6ed0dc9d8e65f24f5824291550139fe6f37fac03788d4580da0d33bc00c97", - "sha256:ec08be75bb268473677edb83ba71e7e74b43c008e4a7b1907c6d57e940bf34b6", - "sha256:ecdf6bf5f578615f2e985a5e1f6572e23aa632c4bd1dc67f8f406d445ac115ed", - "sha256:ed25e1835c00a332cb10c683cd39da96a719ab1dfc08427d476bce41b92531fc", - "sha256:f4cb85f693044e0f71f394ff76c98ddc1bc0953e48c061725e540396d5c8a2e1", - "sha256:f53aace168a2a10582e570b7736cc5bef12cae9cf21775e3eafac597e8551fbe", - "sha256:f651dd19363c632f4abe3480a7c87a9773be27cfe1341aef06e8759599454120", - "sha256:fc4ad7f7ee1a13d9cb49d8198cd7d7e3aa93e425f371a68235f784e99741561f", - "sha256:fee427241c2d9fb7192b658190f9f5fd6dfe41e02f3c1489d2ec1e6a5ab1e04a" + "sha256:08b6ec0917c30861e3fe71a93be1648a2aa4f62f866142ba21670b24444d7fd8", + "sha256:0fbb503bbbbab0c588ed3cd21975a1d0d4163b87e360fec17a792f7d8c4ff29f", + "sha256:1245f4f61f467cb3dfeced2b119afef3db386aec3d24a22a1de08c65038b255f", + "sha256:13e86a19dca96373dcf3190fcb8797d40a6f12f154a244a8d1e8e03b8f280593", + "sha256:21efacc678a11114c765eb52ec0db62edffa89e9a562a94cbf8fa10b5db5c046", + "sha256:23d47a4b57a38e8652bcab15a658fdb13c785b9ce217cc3a729504ab4e1d6bc9", + "sha256:24a7679fab2e0eeedb5a8924fc4a694b3bcaac7d305aeeac72dd7d4e05ecbebf", + "sha256:282613a5969c47c83a8710cc8bfd1e70c9223feb76566f74683af889faadc0ea", + "sha256:336709883c15c050b9c55a63d6c7ff09be883dbc17805d2b063395dd9d9d0022", + "sha256:412ab4a3f6dbd2bf18aefa9f79c7cca23744846b31f1d6555c2ee2b05a2e14ca", + "sha256:466669501d08ad8eb3c4fecd991c5e793c4e0bbd62299d05111d4f827cded64f", + "sha256:46f17b832fe27de7850896f3afee50ea682220dd218f7e9c88d436788419dca6", + "sha256:49a46a0994dd551ec051986806122767cf144b9702e31d47f6d493c336462597", + "sha256:4ae57b4d8e3312d486e2498d42aed3ece7b51848336964e43abbf9671584e67f", + "sha256:53e3819bd20a42470d6dd0fe7fc1c121c92247bca104ce608e609b59bc7a77ee", + "sha256:596f12a1085e38dbda5cbb874d0973303e34227b400b6414782bf205cc14940c", + "sha256:646b2b12df4295b4c3148850c85bff29ef6d0d9621a8d091e98094871a62e5c7", + "sha256:798a3d05ee3b71967844a1164fd5bdb8c22c6d674f26274e78b9f29d81770c4e", + "sha256:7f4fcec873f90537c382840f330b90f4715eebc2bc9925f04cb92de593eae054", + "sha256:82d886bd3c3fbeaa963692ef6b643159ccb4b4cefaf7ff1617720cbead04fd1d", + "sha256:8e3a76f571970fcd3c43ad982daf936ae39b3e90b8a2e96c04113a369869dc87", + "sha256:8ee853cd12ac2ddbf0ecbac1c289f95882b2d4482258048079d13be700aa114c", + "sha256:9d578ac4bf7fdf10ce14caba6f734c178379bd35c486c6deb6f49006e1ba78a7", + "sha256:a42c7d17706911199798d4c464b352e640cab4351efe69c2267823d619a937e5", + "sha256:aad4e10efa5474ed1a611b6d7f0d130f4aafadceb73c11d9e72823e8f508e663", + "sha256:ad8c2bc677ae5f6dbd3cf92f2c7dc613507eafe8f71719727cbc0a7dec9a8c01", + "sha256:bc3ed06ab13660b565eed80887fcfbc0070f0aa0691fbb351657041d3e874efe", + "sha256:bfb113860e9288d0886e3b9e49d9cf4a9d48b441f52ded7d96db7819028514cc", + "sha256:c37c28449752bb1f47975d22ef2882d70513c546f8f37201e0fec3a97b816eee", + "sha256:c66609e138c31cba607d8e2a7b6a5dc38979a06c900815495b2d90ce6ded35b4", + "sha256:d604be0f0b44d473e54fdcb12302495fe0467c56509a2f80483476f3ba92b33c", + "sha256:d986e115e0b39604b9eee3507987368ff8148222da213cd38c359f6f57b3b347", + "sha256:dba49d52500c35cfec0b28aa8b3ea5c37c9df183ffc7210b10ff2a415c125c4a", + "sha256:e897c9f35281f7889873a3e6d6b69aa1447ceb024e8495a5f0d02ecd17742a7f", + "sha256:f9f674b5c3bebc2eba401de64f29948ae1e646ba2735f884d1594c5f675d6f2a", + "sha256:fa7790e94c60f809c95602a26d906eba01a0abee9cc24150e4ce2189352deb1b" ], - "markers": "python_version >= '3.8'", - "version": "==2.16.3" + "markers": "python_version >= '3.7'", + "version": "==1.10.14" }, "pytest": { "hashes": [ - "sha256:d4051d623a2e0b7e51960ba963193b09ce6daeb9759a451844a21e4ddedfc1bd", - "sha256:edfaaef32ce5172d5466b5127b42e0d6d35ebbe4453f0e3505d96afd93f6b096" + "sha256:2a8386cfc11fa9d2c50ee7b2a57e7d898ef90470a7a34c4b949ff59662bb78b7", + "sha256:ac978141a75948948817d360297b7aae0fcb9d6ff6bc9ec6d514b85d5a65c044" ], "index": "pypi", "markers": "python_version >= '3.8'", - "version": "==8.0.2" + "version": "==8.1.1" }, "pytest-celery": { "hashes": [ @@ -596,10 +546,10 @@ }, "redis": { "hashes": [ - "sha256:3f82cc80d350e93042c8e6e7a5d0596e4dd68715babffba79492733e1f367037", - "sha256:4caa8e1fcb6f3c0ef28dba99535101d80934b7d4cd541bbb47f4a3826ee472d1" + "sha256:4973bae7444c0fbed64a06b87446f79361cb7e4ec1538c022d696ed7a5015580", + "sha256:5da9b8fe9e1254293756c16c008e8620b3d15fcc6dde6babde9541850e72a32d" ], - "version": "==5.0.2" + "version": "==5.0.3" }, "requests": { "hashes": [ @@ -628,11 +578,11 @@ }, "starlette": { "hashes": [ - "sha256:13d429aa93a61dc40bf503e8c801db1f1bca3dc706b10ef2434a36123568f044", - "sha256:90a671733cfb35771d8cc605e0b679d23b992f8dcfad48cc60b38cb29aeb7080" + "sha256:6a6b0d042acb8d469a01eba54e9cda6cbd24ac602c4cd016723117d6a7e73b75", + "sha256:918416370e846586541235ccd38a474c08b80443ed31c578a418e2209b3eef91" ], - "markers": "python_version >= '3.8'", - "version": "==0.36.3" + "markers": "python_version >= '3.7'", + "version": "==0.27.0" }, "tomli": { "hashes": [ @@ -685,12 +635,12 @@ }, "uvicorn": { "hashes": [ - "sha256:3d9a267296243532db80c83a959a3400502165ade2c1338dea4e67915fd4745a", - "sha256:5c89da2f3895767472a35556e539fd59f7edbe9b1e9c0e1c99eebeadc61838e4" + "sha256:6623abbbe6176204a4226e67607b4d52cc60ff62cda0ff177613645cefa2ece1", + "sha256:cab4473b5d1eaeb5a0f6375ac4bc85007ffc75c3cc1768816d9e5d589857b067" ], "index": "pypi", "markers": "python_version >= '3.8'", - "version": "==0.27.1" + "version": "==0.28.0" }, "vine": { "hashes": [ diff --git a/backend/archiver/__init__.py b/backend/archiver/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/jobsystem/main.py b/backend/archiver/api.py similarity index 55% rename from jobsystem/main.py rename to backend/archiver/api.py index ff7ae5b..3d22860 100644 --- a/jobsystem/main.py +++ b/backend/archiver/api.py @@ -1,40 +1,15 @@ -from fastapi import FastAPI -from fastapi.responses import JSONResponse -from fastapi.middleware.cors import CORSMiddleware - -from working_storage_interface import minioClient - -from pydantic import BaseModel - - -import tasks - +from fastapi import APIRouter -class ArchiveJob(BaseModel): - filename: str +from .working_storage_interface import minioClient +from fastapi.responses import JSONResponse +import archiver.tasks as tasks +from archiver.model import ArchiveJob, Object -class Object(BaseModel): - object_name: str - - -app = FastAPI() - -origins = [ - "http://127.0.0.1*", - "http://localhost:5173", -] - -app.add_middleware( - CORSMiddleware, - allow_origins=origins, - allow_credentials=True, - allow_methods=["*"], - allow_headers=["*"], -) +router = APIRouter() -@app.get("/tasks", status_code=201) +@router.get("/tasks", status_code=201) def run_task(): try: task = tasks.create_archiving_pipeline() @@ -44,13 +19,13 @@ def run_task(): return JSONResponse(status_code=500) -@app.get("/archivable_objects") +@router.get("/archivable_objects") def get_archivable_objects() -> list[Object]: objects = minioClient.get_objects(bucket=minioClient.ARCHIVAL_BUCKET) return [Object(object_name=o.object_name) for o in objects] -@app.post("/archiving/") +@router.post("/archiving/") async def create_archive_job(job: ArchiveJob): try: j = ArchiveJob.model_validate(job) diff --git a/backend/archiver/model.py b/backend/archiver/model.py new file mode 100644 index 0000000..dbdefb3 --- /dev/null +++ b/backend/archiver/model.py @@ -0,0 +1,9 @@ +from pydantic import BaseModel + + +class ArchiveJob(BaseModel): + filename: str + + +class Object(BaseModel): + object_name: str diff --git a/jobsystem/tasks.py b/backend/archiver/tasks.py similarity index 98% rename from jobsystem/tasks.py rename to backend/archiver/tasks.py index ca01f9f..d4ff4b8 100644 --- a/jobsystem/tasks.py +++ b/backend/archiver/tasks.py @@ -2,7 +2,7 @@ import os -from working_storage_interface import minioClient +from .working_storage_interface import minioClient import logging import requests import time diff --git a/jobsystem/working_storage_interface.py b/backend/archiver/working_storage_interface.py similarity index 87% rename from jobsystem/working_storage_interface.py rename to backend/archiver/working_storage_interface.py index 328a6f1..8a5917c 100644 --- a/jobsystem/working_storage_interface.py +++ b/backend/archiver/working_storage_interface.py @@ -24,10 +24,10 @@ def list_archiveable_objects(self) -> List[str]: class MinioStorage(WorkingStorage): - _USER = os.environ.get('MINIO_USER') - _PASSWORD = os.environ.get('MINIO_PASS') - _REGION = os.environ.get('MINIO_REGION') - _URL = os.environ.get('MINIO_URL') + _USER = os.environ.get('MINIO_USER', "minioadmin") + _PASSWORD = os.environ.get('MINIO_PASS', "minioadmin") + _REGION = os.environ.get('MINIO_REGION', "eu-west-1") + _URL = os.environ.get('MINIO_URL', "localhost:9000") ARCHIVAL_BUCKET: Bucket = Bucket( os.environ.get('MINIO_ARCHIVAL_BUCKET', "archival")) diff --git a/backend/bin/env.sh b/backend/bin/env.sh deleted file mode 100755 index 9f03495..0000000 --- a/backend/bin/env.sh +++ /dev/null @@ -1,7 +0,0 @@ -#!/bin/bash - -SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd ) -export ROOT_DIR=${SCRIPT_DIR}/.. -export eval $(cat ${ROOT_DIR}/.env.development | grep -v '^#' | grep -v '^\s*$' | sed 's/\${\([^}]*\)}/\$\1/g' | xargs) -eval export eval $(cat ${ROOT_DIR}/.env.development | grep -v '^#' | grep -v '^\s*$' | sed 's/\${\([^}]*\)}/\$\1/g' | xargs) - diff --git a/backend/db/create_db.sh b/backend/db/create_db.sh deleted file mode 100644 index fca7aa2..0000000 --- a/backend/db/create_db.sh +++ /dev/null @@ -1,32 +0,0 @@ -#!/bin/bash - -SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd ) -export ROOT_DIR=${SCRIPT_DIR}/.. -export $(cat ${ROOT_DIR}/.env | grep -v '^#' | grep -v '^\s*$' | sed 's/\${\([^}]*\)}/\$\1/g' | xargs) -cd $ROOT_DIR - -if [ $USER = $POSTGRES_USER ]; then - psql="psql" -else - psql="sudo -u $POSTGRES_USER psql" -fi - -if [ -z $DATABASE ]; then - echo "DATABASE variable not set - exiting" - exit 1 -fi - -$psql -c "DROP DATABASE IF EXISTS $DATABASE" postgres -$psql -c "DROP USER IF EXISTS $DBOWNER" postgres -$psql -c "DROP USER IF EXISTS $DBUSER" postgres -$psql -c "CREATE DATABASE $DATABASE" postgres -$psql -c "CREATE USER $DBOWNER WITH PASSWORD '$DBOWNERPASSWORD';" postgres -$psql -c "CREATE USER $DBUSER WITH PASSWORD '$DBPASSWORD';" postgres -$psql -c "ALTER ROLE $DBOWNER SET client_encoding TO 'utf8'; ALTER ROLE $DBOWNER SET timezone TO 'UTC';" postgres -$psql -c "ALTER DATABASE $DATABASE OWNER TO $DBOWNER;" postgres -$psql -c "ALTER DEFAULT PRIVILEGES IN SCHEMA public GRANT SELECT, INSERT, UPDATE, DELETE ON TABLES TO $DBUSER" $DATABASE -$psql -c "ALTER DEFAULT PRIVILEGES IN SCHEMA public GRANT SELECT, UPDATE ON SEQUENCES TO $DBUSER" $DATABASE -#python3 manage.py migrate -#sudo -u vagrant bash -c "/var/www/coffeeshopsite/create_users.sh" -#sudo -u vagrant bash -c "/var/www/coffeeshopsite/loaddata.sh" -#sudo -u vagrant bash -c "/var/www/coffeeshopsite/collectstatic.sh" diff --git a/jobsystem/flower.Dockerfile b/backend/flower.Dockerfile similarity index 90% rename from jobsystem/flower.Dockerfile rename to backend/flower.Dockerfile index 670aa8d..2cea387 100644 --- a/jobsystem/flower.Dockerfile +++ b/backend/flower.Dockerfile @@ -29,4 +29,4 @@ RUN PIPENV_VENV_IN_PROJECT=1 pipenv install --deploy RUN PIPENV_VENV_IN_PROJECT=1 pipenv install flower -CMD ["pipenv", "run", "celery", "-A", "tasks", "flower"] \ No newline at end of file +CMD ["pipenv", "run", "celery", "-A", "jobsystem.celery", "flower"] \ No newline at end of file diff --git a/backend/ingester/api.py b/backend/ingester/api.py new file mode 100644 index 0000000..b552a89 --- /dev/null +++ b/backend/ingester/api.py @@ -0,0 +1,17 @@ +from fastapi import APIRouter + + +from fastapi.responses import JSONResponse +import ingester.tasks as tasks + +router = APIRouter() + + +@router.get("/ingester_test_route", status_code=200) +def test_route(): + return JSONResponse({"message": "hello"}) + + +@router.get("/ingester_test_task", status_code=200) +def test_route(): + tasks.dummy_task.delay() diff --git a/backend/ingester/tasks.py b/backend/ingester/tasks.py new file mode 100644 index 0000000..ee1932b --- /dev/null +++ b/backend/ingester/tasks.py @@ -0,0 +1,7 @@ +from celery import Celery, chain, shared_task +import time + + +@shared_task +def dummy_task(): + time.sleep(10) diff --git a/backend/jobsystem/celery.py b/backend/jobsystem/celery.py new file mode 100644 index 0000000..641467e --- /dev/null +++ b/backend/jobsystem/celery.py @@ -0,0 +1,17 @@ +from celery import Celery +import os +import logging +from fastapi_featureflags import FeatureFlags, feature_flag, feature_enabled + +FeatureFlags.load_conf_from_dict(dict(os.environ.items())) + +if feature_enabled("ARCHIVER_ENABLED"): + import archiver.tasks +if feature_enabled("INGESTER_ENABLED"): + import ingester.tasks + +_LOGGER = logging.getLogger("Jobs") + +celery_app = Celery('tasks', + broker=os.environ.get('CELERY_BROKER_URL'), + backend=os.environ.get('CELERY_RESULT_BACKEND')) diff --git a/backend/main.py b/backend/main.py index d960cbf..5175634 100644 --- a/backend/main.py +++ b/backend/main.py @@ -1,23 +1,30 @@ +import os from fastapi import FastAPI -from pydantic import BaseModel +from fastapi.middleware.cors import CORSMiddleware -app = FastAPI() +from fastapi_featureflags import FeatureFlags, feature_flag, feature_enabled +FeatureFlags.load_conf_from_dict(dict(os.environ.items())) -@app.get("/") -async def root(): - return {"message": "Hello World"} +app = FastAPI(root_path=os.environ.get('API_ROOT_PATH', '/')) -@app.get("/items/{item_id}") -async def read_item(item_id: int): - return {"item_id": item_id} +if feature_enabled("ARCHIVER_ENABLED"): + import archiver.api as archiver_api + app.include_router(archiver_api.router) +if feature_enabled("INGESTER_ENABLED"): + import ingester.api as ingester_api + app.include_router(ingester_api.router) -class Item(BaseModel): - name: str - description: str | None = None - price: float - tax: float | None = None -@app.post("/items/") -async def create_item(item: Item): - return item +origins = [ + "http://127.0.0.1*", + "http://localhost:5173", +] + +app.add_middleware( + CORSMiddleware, + allow_origins=origins, + allow_credentials=True, + allow_methods=["*"], + allow_headers=["*"], +) diff --git a/backend/requirements.txt b/backend/requirements.txt deleted file mode 100644 index 914057c..0000000 --- a/backend/requirements.txt +++ /dev/null @@ -1,4 +0,0 @@ -psycopg2-binary -python_dotenv -regex -fastapi[all] diff --git a/jobsystem/web.Dockerfile b/backend/web.Dockerfile similarity index 94% rename from jobsystem/web.Dockerfile rename to backend/web.Dockerfile index 50fb77a..f380595 100644 --- a/jobsystem/web.Dockerfile +++ b/backend/web.Dockerfile @@ -24,4 +24,4 @@ RUN PATH="${HOME}/.local/bin:$PATH" RUN PIPENV_VENV_IN_PROJECT=1 pipenv install --deploy -CMD ["pipenv", "run", "uvicorn", "main:app", "--reload", "--host", "0.0.0.0", "--root-path", "/fastapi"] \ No newline at end of file +CMD ["pipenv", "run", "uvicorn", "main:app", "--reload", "--host", "0.0.0.0"] \ No newline at end of file diff --git a/jobsystem/worker.Dockerfile b/backend/worker.Dockerfile similarity index 89% rename from jobsystem/worker.Dockerfile rename to backend/worker.Dockerfile index 0f68b7c..ab80b3e 100644 --- a/jobsystem/worker.Dockerfile +++ b/backend/worker.Dockerfile @@ -26,4 +26,4 @@ RUN PATH="${HOME}/.local/bin:$PATH" RUN PIPENV_VENV_IN_PROJECT=1 pipenv install --deploy -CMD ["pipenv", "run", "celery", "-A", "tasks", "worker"] \ No newline at end of file +CMD ["pipenv", "run", "celery", "-A", "jobsystem.celery", "worker"] \ No newline at end of file diff --git a/docker-compose-development.yml b/docker-compose-development.yml deleted file mode 100644 index 4b1cff9..0000000 --- a/docker-compose-development.yml +++ /dev/null @@ -1,19 +0,0 @@ -version: "3.9" -env_file: - path: ./.development.env -services: - mockarchiver: - image: ${MOCKARCHIVER_IMAGE} - cap_add: - - SYS_ADMIN - devices: - - /dev/fuse - security_opt: - - apparmor:unconfined - build: - context: ./mockarchiver - dockerfile: dockerfile - volumes: - - ${LTS_FOLDER}:/mnt/lts - ports: - - 7000:7000 \ No newline at end of file diff --git a/docker-compose.yml b/docker-compose.yml index 5cfb644..3e2effd 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,7 +1,7 @@ version: "3.9" include: - jobs.docker-compose.yml - - monitoring.docker-compose.yml + # - monitoring.docker-compose.yml services: traefik: image: "traefik:v2.10" @@ -22,11 +22,11 @@ services: - "/var/run/docker.sock:/var/run/docker.sock:ro" labels: - "traefik.enable=true" - - "traefik.http.routers.mydashboard.rule=PathPrefix(`/traefik`) || PathPrefix(`/dashboard`) || PathPrefix(`/api`)" - - "traefik.http.middlewares.mydashboard-strip.stripprefix.prefixes=/traefik" + - "traefik.http.routers.mydashboard.rule=Host(`traefik.${HOST}`) && PathPrefix(`/dashboard`, `/api`)" + # - "traefik.http.middlewares.mydashboard-strip.stripprefix.prefixes=/traefik" - "traefik.http.routers.mydashboard.service=api@internal" - "traefik.http.routers.mydashboard.entrypoints=web" - - "traefik.http.routers.mydashboard.middlewares=mydashboard-strip" + # - "traefik.http.routers.mydashboard.middlewares=mydashboard-strip" - "traefik.frontend.headers.passHostHeader=true" tusd: @@ -60,6 +60,8 @@ services: - AWS_ACCESS_KEY_ID=${MINIO_USER} - AWS_SECRET_ACCESS_KEY=${MINIO_PASS} - AWS_REGION=eu-west-1 + profiles: + - tus minio: image: minio/minio container_name: minio @@ -99,12 +101,17 @@ services: - "traefik.http.routers.minio-api.service=minio-api" - "traefik.http.routers.minio-api.middlewares=minio-api-stripprefix,minio-headers,minio-cors" - "traefik.http.services.minio-api.loadbalancer.server.port=9000" + profiles: + - archiver minio-create-bucket: image: minio/mc depends_on: - minio entrypoint: > /bin/sh -c " /usr/bin/mc alias set local-minio http://minio:9000 ${MINIO_USER} ${MINIO_PASS}; /usr/bin/mc mb -p local-minio/${MINIO_ARCHIVAL_BUCKET}; /usr/bin/mc mb -p local-minio/${MINIO_RETRIEVAL_BUCKET}; exit 0; " + profiles: + - archiver + frontend: image: ${WEBUPLOADER_IMAGE_NAME}:${WEBUPLOADER_IMAGE_TAG} container_name: frontend @@ -122,6 +129,36 @@ services: - "traefik.http.routers.webuploader.entrypoints=web" - "traefik.http.services.webuploader.loadbalancer.server.port=3000" + backend: + image: ${OPENEM_BACKEND_IMAGE_NAME}:${OPENEM_IMAGE_TAG} + container_name: backend + build: + context: ./backend + dockerfile: ./web.Dockerfile + ports: + - 8004:8000 + environment: + - CELERY_BROKER_URL=amqp://guest:guest@rabbitmq + - CELERY_RESULT_BACKEND=redis://redis:6379 + - MINIO_REGION=${MINIO_REGION} + - MINIO_USER=${MINIO_USER} + - MINIO_PASS=${MINIO_PASS} + - MINIO_ARCHIVAL_BUCKET=${MINIO_ARCHIVAL_BUCKET} + - MINIO_RETRIEVAL_BUCKET=${MINIO_RETRIEVAL_BUCKET} + - MINIO_URL=minio:9000 + - API_ROOT_PATH=/api/v1 + - ARCHIVER_ENABLED=${ARCHIVER_ENABLED} + - INGESTER_ENABLED=${INGESTER_ENABLED} + depends_on: + - rabbitmq + - redis + labels: + - "traefik.enable=true" + - "traefik.http.routers.backend.rule=Host(`${HOST}`) && PathPrefix(`/api/v1`)" + - "traefik.http.middlewares.backend-stripprefix.stripprefix.prefixes=/api/v1" + - "traefik.http.routers.backend.entrypoints=web" + - "traefik.http.routers.backend.middlewares=backend-stripprefix" + - "traefik.http.services.backend.loadbalancer.server.port=8000" uppy-companion: image: transloadit/companion container_name: uppy-companion @@ -159,3 +196,5 @@ services: - "traefik.http.middlewares.companion-headers.headers.customrequestheaders.X-Forwarded-Proto=http" - "traefik.http.middlewares.companion-headers.headers.customrequestheaders.X-Forwarded-Host=${HOST}/companion" - "traefik.http.routers.companion.middlewares=companion-stripprefix,companion-headers,companion-cors" + profiles: + - archiver diff --git a/jobs.docker-compose.yml b/jobs.docker-compose.yml index ba46c44..2e3ac22 100644 --- a/jobs.docker-compose.yml +++ b/jobs.docker-compose.yml @@ -44,38 +44,11 @@ services: depends_on: - rabbitmq - redis - celery-flower: - image: ${CELERY_FLOWER_IMAGE_NAME}:${CELERY_IMAGE_TAG} - build: - context: ./jobsystem/ - dockerfile: ./flower.Dockerfile - container_name: celery-flower - expose: - - 5555:5555 - environment: - - CELERY_BROKER_URL=amqp://guest:guest@rabbitmq - - CELERY_RESULT_BACKEND=redis://redis:6379 - # Seems to be an issue with running it behind a reverse proxy, needs this and no stripprefix middleware - - FLOWER_URL_PREFIX=/celery-flower - - MINIO_REGION=${MINIO_REGION} - - MINIO_USER=${MINIO_USER} - - MINIO_PASS=${MINIO_PASS} - - MINIO_ARCHIVAL_BUCKET=${MINIO_ARCHIVAL_BUCKET} - - MINIO_RETRIEVAL_BUCKET=${MINIO_RETRIEVAL_BUCKET} - - MINIO_URL=minio:9000 - labels: - - "traefik.enable=true" - - "traefik.http.routers.celery-flower.rule=Host(`${HOST}`) && PathPrefix(`/celery-flower`)" - - "traefik.http.routers.celery-flower.entrypoints=web" - - "traefik.http.services.celery-flower.loadbalancer.server.port=5555" - depends_on: - - rabbitmq - - redis celery-worker: - image: ${CELERY_WORKER_IMAGE_NAME}:${CELERY_IMAGE_TAG} + image: ${CELERY_WORKER_IMAGE_NAME}:${OPENEM_IMAGE_TAG} container_name: celery-worker build: - context: ./jobsystem/ + context: ./backend/ dockerfile: ./worker.Dockerfile environment: - CELERY_BROKER_URL=amqp://guest:guest@rabbitmq @@ -89,38 +62,14 @@ services: - MINIO_ARCHIVAL_BUCKET=${MINIO_ARCHIVAL_BUCKET} - MINIO_RETRIEVAL_BUCKET=${MINIO_RETRIEVAL_BUCKET} - MINIO_URL=minio:9000 + - ARCHIVER_ENABLED=${ARCHIVER_ENABLED} + - INGESTER_ENABLED=${INGESTER_ENABLED} depends_on: - rabbitmq - redis volumes: - ${CELERY_ARCHIVING_DIR}:/tmp/archiving - celery-web: - image: ${CELERY_SERVER_IMAGE_NAME}:${CELERY_IMAGE_TAG} - container_name: celery-web - build: - context: ./jobsystem - dockerfile: ./web.Dockerfile - ports: - - 8004:8000 - environment: - - CELERY_BROKER_URL=amqp://guest:guest@rabbitmq - - CELERY_RESULT_BACKEND=redis://redis:6379 - - MINIO_REGION=${MINIO_REGION} - - MINIO_USER=${MINIO_USER} - - MINIO_PASS=${MINIO_PASS} - - MINIO_ARCHIVAL_BUCKET=${MINIO_ARCHIVAL_BUCKET} - - MINIO_RETRIEVAL_BUCKET=${MINIO_RETRIEVAL_BUCKET} - - MINIO_URL=minio:9000 - depends_on: - - rabbitmq - - redis - labels: - - "traefik.enable=true" - - "traefik.http.routers.celery-web.rule=Host(`${HOST}`) && PathPrefix(`/fastapi`)" - - "traefik.http.middlewares.celery-web-stripprefix.stripprefix.prefixes=/fastapi" - - "traefik.http.routers.celery-web.entrypoints=web" - - "traefik.http.routers.celery-web.middlewares=celery-web-stripprefix" - - "traefik.http.services.celery-web.loadbalancer.server.port=8000" + redis: image: redis:latest container_name: redis diff --git a/jobsystem/.vscode/launch.json b/jobsystem/.vscode/launch.json deleted file mode 100644 index 6a41d92..0000000 --- a/jobsystem/.vscode/launch.json +++ /dev/null @@ -1,19 +0,0 @@ -{ - // Use IntelliSense to learn about possible attributes. - // Hover to view descriptions of existing attributes. - // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387 - "version": "0.2.0", - "configurations": [ - { - "name": "Python Debugger: FastAPI", - "type": "debugpy", - "request": "launch", - "module": "uvicorn", - "args": [ - "main:app", - "--reload" - ], - "jinja": true - } - ] -} \ No newline at end of file diff --git a/webuploader/src/routes/+page.svelte b/webuploader/src/routes/+page.svelte index b65985d..ab0675a 100644 --- a/webuploader/src/routes/+page.svelte +++ b/webuploader/src/routes/+page.svelte @@ -6,8 +6,7 @@ import { onMount } from 'svelte'; async function doPost(object_name: Str) { - var archive_url = `/fastapi/archiving`; - // var archive_url = 'http://localhost:8000/archiving'; + var archive_url = `/api/v1/archiving`; const res = await fetch(archive_url, { method: 'POST', body: JSON.stringify({ @@ -43,8 +42,7 @@ let selected: List = []; onMount(async function () { - var archivable_objects_url = '/fastapi/archivable_objects'; - // var archivable_objects_url = 'http://localhost:8000/archivable_objects'; + var archivable_objects_url = '/api/v1/archivable_objects'; try { const res = await fetch(archivable_objects_url, { method: 'GET' @@ -54,12 +52,17 @@ items = json; console.log(json); } catch {} - - // items = JSON.parse(json); - // console.log(items); }); let changeEvent: CustomEvent<{ changedIndices: number[] }> | null; + + function openTraefikDashboard() { + const host = window.location.hostname; + let url = window.location.protocol + '//traefik.' + host + '/dashboard/'; + console.log(url); + let w = window.open(url, '_blank'); + w?.focus(); + }

Open EM Network Data Uploader Service

@@ -70,7 +73,11 @@ - @@ -82,10 +89,6 @@ - -