Skip to content

Commit

Permalink
Add github actions files
Browse files Browse the repository at this point in the history
  • Loading branch information
vietnguyengit committed Dec 10, 2024
1 parent 056aae6 commit 20b81ee
Show file tree
Hide file tree
Showing 7 changed files with 199 additions and 10 deletions.
2 changes: 2 additions & 0 deletions .github/environment/edge.env
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
AWS_REGION=ap-southeast-2
AWS_ROLE_ARN=arn:aws:iam::704910415367:role/AodnGitHubActionsRole
54 changes: 54 additions & 0 deletions .github/workflows/build_deploy_edge.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,54 @@
name: Build/Deploy Edge
on:
push:
branches:
- main
paths-ignore:
- '**/*.md'
- '.github/environment/**'
permissions:
id-token: write
contents: read
jobs:
build_push:
runs-on: ubuntu-latest
environment: central
outputs:
digest: ${{ steps.build_and_push.outputs.digest }}
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Configure AWS Credentials
id: aws_auth
uses: aws-actions/configure-aws-credentials@v4
with:
audience: sts.amazonaws.com
aws-region: ${{ vars.AWS_REGION }}
role-to-assume: ${{ vars.AWS_ROLE_ARN }}
- name: Login to ECR
uses: docker/login-action@v3
with:
registry: ${{ vars.ECR_REGISTRY }}
- name: Build and Push Docker Image
id: build_and_push
uses: docker/build-push-action@v5
with:
context: .
# Only building for AMD64 for now
# platforms: linux/amd64,linux/arm64
push: true
tags: |
${{ vars.ECR_REGISTRY }}/${{ vars.ECR_REPOSITORY }}:${{ github.sha }}
${{ vars.ECR_REGISTRY }}/${{ vars.ECR_REPOSITORY }}:latest
trigger_edge_deploy:
needs: [build_push]
uses: ./.github/workflows/trigger_deploy.yml
with:
app_name: data-access-service
environment: edge
digest: ${{ needs.build_push.outputs.digest }}
secrets: inherit
38 changes: 38 additions & 0 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
name: CI
on:
pull_request:
branches:
- main
push:
branches:
- main
paths-ignore:
- '**/*.md'
- '.github/environment/**'
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
test:
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: "3.10"
- name: Set up Poetry
uses: snok/install-poetry@v1
with:
version: "latest"
- name: Install dependencies
run: |
poetry lock --no-update
poetry install
- name: Run pre-commit checks
run: |
poetry run pre-commit run --all-files
- name: Run tests
run: |
poetry run pytest
80 changes: 80 additions & 0 deletions .github/workflows/trigger_deploy.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,80 @@
name: Trigger Deploy
on:
workflow_dispatch:
inputs:
app_name:
required: true
description: The short-name of the app corresponding to the folder in appdeploy.
type: string
environment:
required: true
description: The environment to use for the deploy job.
type: choice
options:
- edge
- staging
- production
digest:
required: false
description: The image digest to pass to the deploy job.
type: string
workflow_call:
inputs:
app_name:
required: true
type: string
digest:
required: false
type: string
environment:
required: true
type: string
permissions:
id-token: write
contents: read
jobs:
trigger_deploy:
runs-on: ubuntu-latest
environment: ${{ inputs.environment }}
steps:
- name: Configure AWS Credentials
uses: aws-actions/configure-aws-credentials@v4
with:
audience: sts.amazonaws.com
aws-region: ${{ vars.AWS_REGION }}
role-to-assume: ${{ vars.AWS_ROLE_ARN }}
- name: Push Image Digest to SSM
if: ${{ inputs.digest != '' }}
run: |
aws ssm put-parameter \
--name "/apps/${{ inputs.app_name }}/${{ inputs.environment }}/image_digest" \
--type "String" \
--value "$digest" \
--overwrite
env:
digest: ${{ inputs.digest }}
- name: Generate App Token
uses: actions/create-github-app-token@v1
id: app-token
with:
app-id: ${{ vars.DEPLOY_APP_ID }}
private-key: ${{ secrets.DEPLOY_APP_PRIVATE_KEY }}
owner: ${{ github.repository_owner }}
repositories: "appdeploy"
- name: Trigger Deploy Workflow
uses: actions/github-script@v7
with:
github-token: ${{ steps.app-token.outputs.token }}
retries: 3
retry-exempt-status-codes: 204
script: |
github.rest.actions.createWorkflowDispatch({
owner: 'aodn',
repo: 'appdeploy',
workflow_id: 'deploy.yml',
ref: 'main',
inputs: {
app_name: '${{ inputs.app_name }}',
environment: '${{ inputs.environment }}'
}
})
4 changes: 2 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -108,6 +108,6 @@ PROFILE=edge
| Description | Endpoints | Param | Environment |
|--------------------|----------------------------------------|------------------------------------------------------------------------------------------------|-------------------------------------------------------------------------------|
| Formatted metadata | /api/v1/das/metadata/{uuid} | none | ALL |
| Raw metadata | /api/v1/das/metadata/{uuid}/raw | none | ALL |
| Formatted metadata | /api/v1/das/metadata/{uuid} | none | ALL |
| Raw metadata | /api/v1/das/metadata/{uuid}/raw | none | ALL |
| Subsetting | /api/v1/das/data/{uuid} | start_date=2023-12-25T14:30:00 end_date=2024-02-25T14:30:00 start_depth=-0.06 f=netcdf or json | ALL |
3 changes: 2 additions & 1 deletion data_access_service/core/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -94,7 +94,8 @@ def has_data(self, uuid: str, start_date: datetime, end_date: datetime):
start_date, period_end, None, None, None, None, None
).empty:
return True
else: start_date = period_end + timedelta(days=1)
else:
start_date = period_end + timedelta(days=1)
return False

def get_dataset_data(
Expand Down
28 changes: 21 additions & 7 deletions data_access_service/core/restapi.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,8 @@ def _generate_json_array(dask, compress: bool = False):
else:
return json_array

def _generate_partial_json_array(dask, compress:bool = False):

def _generate_partial_json_array(dask, compress: bool = False):
record_list = []
for partition in dask.to_delayed():
partition_df = convert_non_numeric_to_str(partition.compute())
Expand All @@ -78,12 +79,14 @@ def _generate_partial_json_array(dask, compress:bool = False):
else:
return json_array


# currently only want year, month and date.
def _reformat_date(date):
parsed_date = parser.isoparse(date)
formatted_date = parsed_date.strftime("%Y-%m-%d")
return formatted_date


def _round_5_decimal(value: float) -> float:
# as they are only used for the frontend map display, so we don't need to have too many decimals
return round(value, 5)
Expand Down Expand Up @@ -120,28 +123,35 @@ def _verify_depth_param(name: str, req_value: numpy.double) -> numpy.double | No
else:
return req_value


def _verify_to_index_flag_param(flag: str) -> bool:
if (flag is not None) and (flag.lower() == "true"):
return True
else:
return False


def _response_json(filtered: DataFrame, compress: bool):
ddf: dask.dataframe.DataFrame = dd.from_pandas(
filtered, npartitions=len(filtered.index) // RECORD_PER_PARTITION + 1
)
response = Response(_generate_json_array(ddf, compress), mimetype="application/json")
response = Response(
_generate_json_array(ddf, compress), mimetype="application/json"
)

if compress:
response.headers["Content-Encoding"] = "gzip"

return response


def _response_partial_json(filtered: DataFrame, compress: bool):
ddf: dask.dataframe.DataFrame = dd.from_pandas(
filtered, npartitions=len(filtered.index) // RECORD_PER_PARTITION + 1
)
response = Response(_generate_partial_json_array(ddf, compress), mimetype="application/json")
response = Response(
_generate_partial_json_array(ddf, compress), mimetype="application/json"
)

if compress:
response.headers["Content-Encoding"] = "gzip"
Expand Down Expand Up @@ -191,17 +201,19 @@ def get_mapped_metadata(uuid):
def get_raw_metadata(uuid):
return app.api.get_raw_meta_data(uuid)


@restapi.route("data/<string:uuid>/has_data", methods=["GET"])
def data_check(uuid):
start_date=_verify_datatime_param(
start_date = _verify_datatime_param(
"start_date", request.args.get("start_date", default=None, type=str)
)
end_date=_verify_datatime_param(
end_date = _verify_datatime_param(
"end_date", request.args.get("end_date", default=None, type=str)
)
has_data = str(app.api.has_data(uuid, start_date, end_date)).lower()
has_data = str(app.api.has_data(uuid, start_date, end_date)).lower()
return Response(has_data, mimetype="application/json")


@restapi.route("/data/<string:uuid>", methods=["GET"])
def get_data(uuid):
log.info("Request details: %s", json.dumps(request.args.to_dict(), indent=2))
Expand All @@ -223,7 +235,9 @@ def get_data(uuid):
"end_depth", request.args.get("end_depth", default=None, type=numpy.double)
)

is_to_index = _verify_to_index_flag_param(request.args.get("is_to_index", default=None, type=str))
is_to_index = _verify_to_index_flag_param(
request.args.get("is_to_index", default=None, type=str)
)

# The cloud optimized format is fast to lookup if there is an index, some field isn't part of the
# index and therefore will not gain to filter by those field, indexed fields are site_code, timestamp, polygon
Expand Down

0 comments on commit 20b81ee

Please sign in to comment.