diff --git a/.github/workflows/build-package-mock-ims.yml b/.github/workflows/build-package-mock-ims.yml index 24fa783..5e1bcfd 100644 --- a/.github/workflows/build-package-mock-ims.yml +++ b/.github/workflows/build-package-mock-ims.yml @@ -1,4 +1,4 @@ -name: Build and Package the Mock IMS Service to Dev Registry +name: Build and Package the NWSC Proxy Service to Dev Registry on: push: branches: [ main ] @@ -11,7 +11,7 @@ jobs: fail-fast: true matrix: app: - - mockims + - proxy steps: - name: Login to GitHub Container Registry @@ -20,7 +20,7 @@ jobs: registry: ghcr.io username: ${{github.actor}} password: ${{secrets.GITHUB_TOKEN}} - + - name: pull run: | docker pull ghcr.io/noaa-gsl/idss/commons/python/python-base:main @@ -69,7 +69,7 @@ jobs: --build-arg COMMITBRANCH=${{env.BRANCH}} \ --build-arg COMMITSHA=${{github.sha}} \ -t ${{env.DEV_REGISTRY}}/${{env.APP_LOWERCASE}}:${{env.BRANCH}} \ - -f ./docker/mockims/dev/Dockerfile . + -f ./docker/nwsc_proxy/dev/Dockerfile . - name: Run Trivy vulnerability scanner uses: aquasecurity/trivy-action@master diff --git a/.github/workflows/linter.yml b/.github/workflows/linter.yml index b0dd658..47537f7 100644 --- a/.github/workflows/linter.yml +++ b/.github/workflows/linter.yml @@ -43,3 +43,10 @@ jobs: - name: Run code linter run: pylint ./python/idsse/testing --max-line-length=120 --recursive true + + - name: Set PYTHONPATH for proxy service + run: | + echo "PYTHONPATH=python/nwsc_proxy" >> $GITHUB_ENV + + - name: Run code linter for proxy service + run: pylint ./python/nwsc_proxy --max-line-length=120 --recursive true diff --git a/.github/workflows/run-tests.yml b/.github/workflows/run-tests.yml new file mode 100644 index 0000000..548f27d --- /dev/null +++ b/.github/workflows/run-tests.yml @@ -0,0 +1,56 @@ +name: Run Pytest + +on: + pull_request: + +jobs: + build: + runs-on: ubuntu-latest + defaults: + run: + shell: bash -el {0} + strategy: + matrix: + python-version: [ "3.11" ] + steps: + - uses: actions/checkout@v3 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + + - name: Install python dependencies + run: | + python -m pip install --upgrade pip + pip install pytest pytest-cov pylint==2.17.5 python-dateutil==2.8.2 flask==2.3.2 + + - name: Checkout idss-engine-commons + uses: actions/checkout@v2 + with: + repository: NOAA-GSL/idss-engine-commons + ref: main + path: commons/ + + - name: Install IDSSE python commons + working-directory: commons/python/idsse_common + run: pip install . + + - name: Set PYTHONPATH for pytest + run: | + echo "PYTHONPATH=python/nwsc_proxy" >> $GITHUB_ENV + + - name: Test pytest + working-directory: python/nwsc_proxy/test + run: | + set -o pipefail; # exit immediately if pytest fails (tee obfuscates the exit code) + pytest --cov=.. --cov-report=term --junitxml=./pytest.xml | tee ./coverage.txt; + + - name: Pytest coverage comment + if: ${{ github.ref == 'refs/heads/main' }} + id: coverageComment + uses: MishaKav/pytest-coverage-comment@main + with: + hide-comment: true + badge-title: Coverage + title: Report + pytest-coverage-path: python/nwsc_proxy/test/coverage.txt diff --git a/docker/mockims/local/Dockerfile b/docker/mockims/local/Dockerfile deleted file mode 100644 index b9bcf07..0000000 --- a/docker/mockims/local/Dockerfile +++ /dev/null @@ -1,23 +0,0 @@ -# IMS Gateway Request Service using python sci base image -# sci is required because the use of shapely -FROM idss.engine.commons.python-base:local - -ARG maintainer -LABEL maintainer ${maintainer} - -# Install additional dependencies -#RUN conda config --add channels conda-forge && \ -# conda install -y =<1.0.0> - -WORKDIR /python/ims_service - -# Copy source files -COPY ./python/idsse/testing/ims_service/src/ims_service.py /python/ims_service/ - -# (TEMPORARY) Copy canned criteria files. To be removed when integration with IMS API exists -COPY ./python/idsse/testing/ims_service/profiles/*.json /python/profiles/ - -# If you need to have a local mount, otherwise service will use version controlled jsons -#VOLUME /python/profiles - -ENTRYPOINT [ "python3", "/python/ims_service/ims_service.py" ] \ No newline at end of file diff --git a/docker/mockims/dev/Dockerfile b/docker/nwsc_proxy/dev/Dockerfile similarity index 59% rename from docker/mockims/dev/Dockerfile rename to docker/nwsc_proxy/dev/Dockerfile index 396001c..db0276d 100644 --- a/docker/mockims/dev/Dockerfile +++ b/docker/nwsc_proxy/dev/Dockerfile @@ -1,4 +1,4 @@ -# IMS Gateway Request Service using python commons base image +# NWSC Proxy Service using python commons base image FROM ghcr.io/noaa-gsl/idss/commons/python/python-base:main ARG maintainer @@ -8,17 +8,18 @@ LABEL maintainer ${maintainer} #RUN conda config --add channels conda-forge && \ # conda install -y flask=2.3.2 -WORKDIR /python/ims_service +WORKDIR /python/nwsc_proxy # Copy source files -COPY ./python/idsse/testing/ims_service/src/ims_service.py /python/ims_service/ +COPY ./python/nwsc_proxy/*.py /python/nwsc_proxy/ +COPY ./python/nwsc_proxy/src/*.py /python/nwsc_proxy/src/ -# (TEMPORARY) Copy canned criteria files. To be removed when integration with IMS API exists -COPY ./python/idsse/testing/ims_service/profiles/*.json /python/profiles/ +# (TEMPORARY) Copy canned criteria files. To be removed when integration with NWS Connect API exists +COPY ./python/nwsc_proxy/src/profiles/*.json /python/profiles/ # The volume mapping here is kind of strange for k8s deployment, because if we map an empty volume to /criteria # then the temp copy of json above will get blown away by the volume mapping...just omit it for k8s deployment # for now. #VOLUME /python/profiles -ENTRYPOINT [ "python3", "/python/ims_service/ims_service.py" ] +ENTRYPOINT [ "python3", "/python/nwsc_proxy/ncp_web_service.py", "--base_dir", "/python/profiles"] diff --git a/docker/nwsc_proxy/local/Dockerfile b/docker/nwsc_proxy/local/Dockerfile new file mode 100644 index 0000000..6acf977 --- /dev/null +++ b/docker/nwsc_proxy/local/Dockerfile @@ -0,0 +1,25 @@ +# NWSC Proxy Service using python commons base image +FROM idss.engine.commons.python-base:local + +ARG maintainer +LABEL maintainer ${maintainer} + +# Install additional dependencies +#RUN conda config --add channels conda-forge && \ +# conda install -y flask=2.3.2 + +WORKDIR /python/nwsc_proxy + +# Copy source files +COPY ./python/nwsc_proxy/*.py /python/nwsc_proxy/ +COPY ./python/nwsc_proxy/src/*.py /python/nwsc_proxy/src/ + +# (TEMPORARY) Copy canned criteria files. To be removed when integration with NWS Connect API exists +COPY ./python/nwsc_proxy/src/profiles/*.json /python/profiles/ + +# The volume mapping here is kind of strange for k8s deployment, because if we map an empty volume to /criteria +# then the temp copy of json above will get blown away by the volume mapping...just omit it for k8s deployment +# for now. +#VOLUME /python/profiles + +ENTRYPOINT [ "python3", "/python/nwsc_proxy/ncp_web_service.py", "--base_dir", "/python/profiles"] diff --git a/python/idsse/testing/ims_service/README.md b/python/idsse/testing/ims_service/README.md deleted file mode 100644 index 9c638bf..0000000 --- a/python/idsse/testing/ims_service/README.md +++ /dev/null @@ -1,77 +0,0 @@ -# IMS Dummy Service - - ## Overview --The `ims-service` is dummy IMS service (Impact-based decision support service Management System) - -## Configurations -The ims service should be started as a standalone service and offers two end-points in support of the IMS gateway request/response services. Those services should be provided with the network address of this services endpoints via their command line arguments for testing purposes. - - -## Build, Release, and Run -The subsections below outline how to build the images within this project. All microservices built with Docker are done within the -[idss-engine/build](https://github.com/NOAA-GSL/idss-engine/build/) directory. - -**Recommended Tags** -- development: `:dev` -- stable release: `:release` ie. `:alder` -- targeted environment: `:aws` - ---- -### IMS Service -From the IDSS Engine project root directory `idss-engine/build///`: - -`$ docker-compose build ims_service` - -**Local Development Image Name** `idss.engine.service.ims.service:` - -**Packaged/Deployed Image Name** `idsse/service/ims/service:` - ---- - -### Run - -See the [Build, Release, Run](https://github.com/NOAA-GSL/idss-engine/blob/main/README.md#running-idss-engine) section within the umbrella project documentation: [idss-engine](https://github.com/NOAA-GSL/idss-engine) - -#### Docker - -To run this service can run in isolation, it does not requires a rabbitmq server - -``` -docker run --rm --name ims-service idss.engine.service.ims.service:local -``` - -Optional parameters include: -``` - None -``` -#### Python (local) - -The most common way to get python dependencies installed is to use either [conda](https://docs.conda.io/projects/conda/en/latest/user-guide/install/index.html#installing-conda-on-a-system-that-has-other-python-installations-or-packages) or [pip](https://packaging.python.org/en/latest/tutorials/installing-packages/) package managers. - -1. Create and activate a virtualenv if you haven't already: - ``` - python3 -m venv .venv && source .venv/bin/activate - ``` -2. Install 3rd party dependencies - 1. Using pip: - ``` - pip install flask - ``` - 1. Or using conda (much slower): - ``` - conda install -c conda-forge python==3.11 pika flask - ``` -3. Import idsse-common library, which is not currently published to any public repository like pip and must be cloned from GitHub manually: - ``` - pip install --editable /local/path/to/idss-engine-commons - ``` - - This library uses [AWS CLI](https://docs.aws.amazon.com/cli/latest/userguide/getting-started-install.html) or [s5cmd](https://github.com/peak/s5cmd/blob/master/README.md#installation) filesystem tools to interact with AWS, so your machine must have one of these installed as well. Example installs using homebrew: - ``` - brew install awscli - brew install peak/tap/s5cmd - ``` - -Lastly, `cd` to the `python/ims_service/src` directory, and start the relevant service. For example, for Request Service: -```sh -python3 ims_service.py -``` diff --git a/python/idsse/testing/ims_service/src/ims_service.py b/python/idsse/testing/ims_service/src/ims_service.py deleted file mode 100644 index 217860f..0000000 --- a/python/idsse/testing/ims_service/src/ims_service.py +++ /dev/null @@ -1,77 +0,0 @@ -"""Test service for ims_gateway services""" -# ---------------------------------------------------------------------------------- -# Created on Fri Apr 07 2023 -# -# Copyright (c) 2023 Colorado State University. All rights reserved. (1) -# -# Contributors: -# Paul Hamer (1) -# -# ---------------------------------------------------------------------------------- -# pylint: disable=missing-function-docstring,redefined-outer-name,protected-access -# pylint: disable=unused-argument, disable=duplicate-code -import json - -from glob import glob -from os import path - -from flask import Flask, request, jsonify - -app = Flask(__name__) -app.config['GSL_KEY'] = '8209c979-e3de-402e-a1f5-556d650ab889' - -# The joined profiles from the JSON examples... -ims_request = {'errors': [], 'profiles': []} - - -@app.route('/all-events', methods=['GET']) -def profiles(): - # First check for the key argument and that it matches the expected value... - if request.headers.get("X-Api-Key") != app.config['GSL_KEY']: - return jsonify({"message": "ERROR: Unauthorized"}), 401 - - if len(request.args.keys()) != 1 or request.args.get('dataSource') != 'NBM': - # add one more check for ANY (currently IMS Gateway Request is using 'ANY') - if request.args.get('dataSource') != 'ANY': - return jsonify({"message": "Bad Request : Invalid argument!"}), 400 - - # Return the profiles... - return jsonify(ims_request) - - -@app.route('/ims-response', methods=['POST']) -def response(): - # First check for the key argument and that it matches the expected value... - if request.headers.get("X-Api-Key") != app.config['GSL_KEY']: - return jsonify({"message": "ERROR: Unauthorized"}), 401 - - data = request.get_json() # Assumes the incoming data is in JSON format - print("Received POST request with data:", data) - - # Process the data or perform any desired actions - return jsonify({"message": "POST request received successfully!"}) - - -if __name__ == '__main__': - # Load the canned profiles from the resources directory into a single dictionary to form - # one JSON response when queried by the IMS_request service. - profile_dir = path.join(path.dirname(__file__), '..', 'profiles') - json_files = [ - path.join(profile_dir, file) - for file in glob('*.json', root_dir=profile_dir) - ] - - print('Loading canned support profiles from:', json_files) - # json_files = sorted(glob('../profiles/*.json')) - for json_file in json_files: - with open(json_file, 'r', encoding="utf-8") as jf: - profile = json.load(jf) - # print(profile) - for err in profile['errors']: - ims_request['errors'].append(err) - for pro in profile['profiles']: - ims_request['profiles'].append(pro) - # ims_request = ims_request | {os.path.basename(json_file).strip('.json') : profile} - - # host=0.0.0.0 is required for flask to work properly in docker and k8s env - app.run(host='0.0.0.0', port=5000) diff --git a/python/nwsc_proxy/README.md b/python/nwsc_proxy/README.md new file mode 100644 index 0000000..297c256 --- /dev/null +++ b/python/nwsc_proxy/README.md @@ -0,0 +1,92 @@ +# NWS Connect Proxy Service + +## Overview +-The `nwsc-proxy` is a web service that simulates NWS Connect systems storing a set of Support Profiles and serving them up in a simple REST interface. + +## Configurations +The NWS Connect proxy service should be started as a standalone service and offers two end-points in support of the NWSConnect Gateway services. Those services should be provided with the network address of this services endpoints via their command line arguments for testing purposes. + + +## Build, Release, and Run +The subsections below outline how to build the images within this project. All microservices built with Docker are done within the +[idss-engine/build](https://github.com/NOAA-GSL/idss-engine/build/) directory. + +**Recommended Tags** +- development: `:dev` +- stable release: `:release` ie. `:alder` +- targeted environment: `:aws` + +--- +### NWSC Proxy Service +From the IDSS Engine project root directory `idss-engine/build///`: + +`$ docker-compose build proxy_service` + +**Local Development Image Name** `idss.engine.service.proxy.service:` + +**Packaged/Deployed Image Name** `idsse/service/proxy/service:` + +--- + +### Run + +See the [Build, Release, Run](https://github.com/NOAA-GSL/idss-engine/blob/main/README.md#running-idss-engine) section within the umbrella project documentation: [idss-engine](https://github.com/NOAA-GSL/idss-engine) + +#### Docker + +To run this service can run in isolation, it does not requires a rabbitmq server + +``` +docker run --rm --name proxy-service idss.engine.service.proxy.service:local +``` + +Required parameters include: +``` + --base_dir /path/to/file/dir # file location where JSON files will be read and written +``` +#### Python (local) + +The most common way to get python dependencies installed is to use either [conda](https://docs.conda.io/projects/conda/en/latest/user-guide/install/index.html#installing-conda-on-a-system-that-has-other-python-installations-or-packages) or [pip](https://packaging.python.org/en/latest/tutorials/installing-packages/) package managers. + +1. Create and activate a virtualenv if you haven't already: + ``` + python3 -m venv .venv && source .venv/bin/activate + ``` +2. Install 3rd party dependencies + 1. Using pip: + ``` + pip install flask + ``` + 1. Or using conda (much slower): + ``` + conda install -c conda-forge python==3.11 pika flask + ``` +3. Import idsse-common library, which is not currently published to any public repository like pip and must be cloned from GitHub manually: + ``` + pip install --editable /local/path/to/idss-engine-commons + ``` + - This library uses [AWS CLI](https://docs.aws.amazon.com/cli/latest/userguide/getting-started-install.html) or [s5cmd](https://github.com/peak/s5cmd/blob/master/README.md#installation) filesystem tools to interact with AWS, so your machine must have one of these installed as well. Example installs using homebrew: + ``` + brew install awscli + brew install peak/tap/s5cmd + ``` + +Lastly, `cd` to the `./python/nwsc_proxy` directory, and start the NWS Connect Proxy service: +```sh +python3 ncp_web_service.py --base_dir /path/to/some/dir +``` + +On startup, the service creates 'existing' and 'new' subdirectories at the path location given by `--base_dir` if needed, then reads into its in-memory cache any existing JSON files in the base directory or either subdirectory. + +### Endpoints +- GET `/health` +- GET `/all-events?dataSource=ANY&status=existing` + - Get list of existing Support Profiles (not new). Will be formatted like `{ "profiles": [], "errors": []}` +- GET `/all-events?dataSource=ANY&status=new` + - Get only new (never before processed) Support Profiles. After a profile is returned to any API request, it will disappear from the "new" list, only appearing in `status=existing` filter requests. +- POST `/all-events` + - Create a new Support Profile to be stored by the API. The body of the request will be the JSON saved--the `id` field should be unique. +- DELETE `/all-events?uuid=` + - Permanently remove an existing Support Profile from the API. `uuid` must match one of the saved Support Profile JSON's `id` attribute, otherwise it will return `404`. + +Note that all requests to the `/all-events` endpoint require an `X-Api-Key` header that must match the approved key, or the API will return `401`. \ No newline at end of file diff --git a/python/idsse/testing/ims_service/__init__.py b/python/nwsc_proxy/__init__.py similarity index 100% rename from python/idsse/testing/ims_service/__init__.py rename to python/nwsc_proxy/__init__.py diff --git a/python/nwsc_proxy/ncp_web_service.py b/python/nwsc_proxy/ncp_web_service.py new file mode 100644 index 0000000..bb337ef --- /dev/null +++ b/python/nwsc_proxy/ncp_web_service.py @@ -0,0 +1,145 @@ +"""NWS Connect Proxy service simulating behaviors of NWS Connect core services""" +# ---------------------------------------------------------------------------------- +# Created on Fri Apr 07 2023 +# +# Copyright (c) 2023 Colorado State University. All rights reserved. (1) +# +# Contributors: +# Paul Hamer (1) +# Mackenzie Grimes (1) +# +# ---------------------------------------------------------------------------------- +import os +from datetime import datetime, UTC +from argparse import ArgumentParser, Namespace + +from flask import Flask, current_app, request, jsonify + +from src.profile_store import ProfileStore + +# constants +GSL_KEY = '8209c979-e3de-402e-a1f5-556d650ab889' + + +def to_iso(date_time: datetime) -> str: + """Format a datetime instance to an ISO string. Borrowed from idsse.commons.utils for now""" + return (f'{date_time.strftime("%Y-%m-%dT%H:%M")}:' + f'{(date_time.second + date_time.microsecond / 1e6):06.3f}' + 'Z' if date_time.tzname() in [None, str(UTC)] + else date_time.strftime("%Z")[3:]) + + +# pylint: disable=too-few-public-methods +class HealthRoute: + """Handle requests to /health endpoint""" + def __init__(self): + self._app_start_time = datetime.now(UTC) + + def handler(self): + """Logic for requests to /health""" + uptime = datetime.now(UTC) - self._app_start_time + return jsonify({ + 'startedAt': to_iso(self._app_start_time), + 'uptime': uptime.total_seconds() + }), 200 + + +class EventsRoute: + """Handle requests to /all-events endpoint""" + def __init__(self, base_dir: str): + self.profile_store = ProfileStore(base_dir) + + # pylint: disable=too-many-return-statements + def handler(self): + """Logic for requests to /all-events""" + # check that this request has proper key to get or add data + if request.headers.get('X-Api-Key') != current_app.config['GSL_KEY']: + return jsonify({'message': 'ERROR: Unauthorized'}), 401 + + if request.method == 'POST': + # request is saving new Support Profile event + request_body: dict = request.json + profile_id = self.profile_store.save(request_body) + if not profile_id: + return jsonify({'message': f'Profile {request_body.get("id")} already exists'} + ), 400 + + return jsonify({'message': f'Profile {profile_id} saved'}), 201 + + if request.method == 'DELETE': + profile_id = request.args.get('uuid', default=None, type=str) + is_deleted = self.profile_store.delete(profile_id) + if not is_deleted: + return jsonify({'message': f'Profile {profile_id} not found'}), 404 + return jsonify({'message': f'Profile {profile_id} deleted'}), 204 + + # otherwise, must be 'GET' operation + data_source = request.args.get('dataSource', None, type=str) + if data_source not in ['NBM', 'ANY']: + return jsonify({'profiles': [], 'errors': [f'Invalid dataSource: {data_source}']}), 400 + + profile_status = request.args.get('status', default='existing', type=str) + if profile_status == 'existing': + profiles = self.profile_store.get_all() + + elif profile_status == 'new': + profiles = self.profile_store.get_all(filter_new_profiles=True) + # update ProfileStore to label all queried events as no longer "new"; + # they've now been returned to IDSS Engine clients at least once + current_app.logger.info('Got all new profiles: %s', profiles) + for profile in profiles: + self.profile_store.mark_as_existing(profile['id']) + + else: + # status query param should have been 'existing' or 'new' + return jsonify( + {'profiles': [], 'errors': [f'Invalid profile status: {profile_status}']} + ), 400 + + return jsonify({'profiles': profiles, 'errors': []}), 200 + + +class AppWrapper: + """Web server class wrapping Flask operations""" + def __init__(self, base_dir: str): + """Build Flask app instance, mapping handler to each endpoint""" + self.app = Flask(__name__, static_folder=None) # no need for a static folder + self.app.config['GSL_KEY'] = GSL_KEY + + health_route = HealthRoute() + events_route = EventsRoute(base_dir) + + self.app.add_url_rule('/health', 'health', view_func=health_route.handler, + methods=['GET']) + self.app.add_url_rule('/all-events', 'events', + view_func=events_route.handler, + methods=['GET', 'POST', 'DELETE']) + + def run(self, **kwargs): + """Start up web server""" + self.app.run(**kwargs) + + +def create_app(args: Namespace = None) -> Flask: + """Create a Flask instance""" + base_dir = args.base_dir + return AppWrapper(base_dir).app + + +if __name__ == '__main__': # pragma: no cover + parser = ArgumentParser() + parser.add_argument('--port', dest='port', default=5000, type=int, + help='The port the web server will listen on.') + parser.add_argument('--base_dir', dest='base_dir', required=True, type=str, + help='The base directory where Support Profile JSONs will be read/written') + + _args = parser.parse_args() + + app = create_app(_args) + # host=0.0.0.0 is required for flask to work properly in docker and k8s env + app.run(host='0.0.0.0', port=_args.port) + +elif 'gunicorn' in os.getenv('SERVER_SOFTWARE', default=''): # pragma: no cover + # default to current directory + _base_dir = os.getenv('BASE_DIR', os.getcwd()) + app = AppWrapper(_base_dir).app diff --git a/python/idsse/testing/ims_service/src/__init__.py b/python/nwsc_proxy/src/__init__.py similarity index 100% rename from python/idsse/testing/ims_service/src/__init__.py rename to python/nwsc_proxy/src/__init__.py diff --git a/python/nwsc_proxy/src/profile_store.py b/python/nwsc_proxy/src/profile_store.py new file mode 100644 index 0000000..a0eb4fd --- /dev/null +++ b/python/nwsc_proxy/src/profile_store.py @@ -0,0 +1,193 @@ +"""Profile store that does CRUD operations on filesystem to simulate NWS Connect storage""" +# ---------------------------------------------------------------------------------- +# Created on Tues Dec 17 2024 +# +# Copyright (c) 2024 Colorado State University. All rights reserved. (1) +# +# Contributors: +# Mackenzie Grimes (1) +# +# ---------------------------------------------------------------------------------- +import os +import json +import logging +from dataclasses import dataclass +from glob import glob + +# constants controlling the subdirectory where new vs. existing Profiles are saved +NEW_SUBDIR = 'new' +EXISTING_SUBDIR = 'existing' + +logger = logging.getLogger(__name__) + + +@dataclass +class CachedProfile: + """Data class to hold Support Profile's data and metadata ("new" vs "existing" status) + + Args: + data (dict): full JSON data of this Support Profile + is_new (bool): track if Support Profile has ever been processed. Ought to start as True + """ + # pylint: disable=invalid-name + data: dict + is_new: bool + + @property + def id(self) -> str: + """The Support Profile UUID""" + return self.data.get('id') + + +class ProfileStore: + """Data storage using JSON files on filesystem that simulates CRUD operations""" + def __init__(self, base_dir: str): + self._base_dir = base_dir + self._new_dir = os.path.join(self._base_dir, NEW_SUBDIR) + self._existing_dir = os.path.join(self._base_dir, EXISTING_SUBDIR) + + # ensure that base directory and all expected subdirectories exist + for _dir in [self._base_dir, self._new_dir, self._existing_dir]: + os.makedirs(_dir, exist_ok=True) + + # load any NWS Connect response files dumped into the base_dir + for response_filename in glob('*.json', root_dir=self._base_dir): + response_filepath = os.path.join(self._base_dir, response_filename) + logger.warning('Loading profiles from raw API response file: %s', response_filepath) + + with open(response_filepath, 'r', encoding='utf-8') as infile: + data: dict = json.load(infile) + + # loop through all profiles in this file, + # save them to "existing" directory as individual profiles + for profile in data.get('profiles', []): + profile_filepath = os.path.join(self._existing_dir, f'{profile["id"]}.json') + logger.info('Saving existing profile to file: %s', profile_filepath) + + with open(profile_filepath, 'w', encoding='utf-8') as outfile: + json.dump(profile, outfile) + + # populate cache of JSON data of all Support Profiles, marked as new vs. existing + existing_profiles = [CachedProfile(profile, is_new=False) + for profile in self._load_profiles_from_filesystem(self._existing_dir)] + new_profiles = [CachedProfile(profile, is_new=True) + for profile in self._load_profiles_from_filesystem(self._new_dir)] + + self.profile_cache = existing_profiles + new_profiles + + def get_all(self, filter_new_profiles = False) -> list[dict]: + """Get all Support Profile JSONs persisted in this API, filtering by status='new' + (if Support Profile has never been returned in an API request before) or status='existing' + otherwise. + + Args: + filter_new_profiles (bool): if True, get only Support Profiles that have never been + returned to IDSS Engine on previous requests (never processed). Default is False: + return all existing profiles. + """ + return [cached_profile.data for cached_profile in self.profile_cache + if cached_profile.is_new == filter_new_profiles] + + def save(self, profile: dict) -> str | None: + """Persist a new Support Profile Profile to this API + + Returns: + str | None: UUID of saved Support Profile on success, otherwise None + """ + logger.debug('Now saving new profile: %s', profile) + + # if profile ID is already in the cache, reject this save + existing_profile = next(((cached_obj for cached_obj in self.profile_cache + if cached_obj.id == profile.get('id'))), None) + if existing_profile: + logger.warning('Cannot save profile; already exists %s', existing_profile.id) + return None + + cached_profile = CachedProfile(profile, is_new=True) + + # save Profile JSON to filesystem + filepath = os.path.join(self._new_dir, f'{cached_profile.id}.json') + logger.info('Now saving profile to path: %s', filepath) + with open(filepath, 'w', encoding='utf-8') as file: + json.dump(profile, file) + + # add profile to in-memory cache + self.profile_cache.append(cached_profile) + return cached_profile.id + + def mark_as_existing(self, profile_id: str) -> bool: + """Mark a formerly "new" Support Profile as "existing", a.k.a. has been returned in + API response at least once and should no longer be processed as "new" + + Returns: + bool: True on success. False if JSON with this profile_id not found on filesystem + """ + # find the profile data from the new_profiles cache and move it to existing_profiles + cached_profile = next((profile for profile in self.profile_cache + if profile.id == profile_id), None) + if not cached_profile: + # profile is not in cache; it must not exist + logger.warning('Support Profile %s expected in profile_cache but not found', + profile_id) + return False + + new_filepath = os.path.join(self._new_dir, f'{profile_id}.json') + if not os.path.exists(new_filepath): + logger.warning('Attempt to mark as "existing" profile that is not found: %s', + new_filepath) + return False + + # move the JSON file from the "new" to the "existing" directory and update cache + existing_filepath = os.path.join(self._existing_dir, f'{profile_id}.json') + os.rename(new_filepath, existing_filepath) + cached_profile.is_new = False + + return True + + def delete(self, profile_id: str) -> bool: + """Delete a Support Profile profile from storage, based on its UUID. + + Returns: + bool: True on success + """ + logger.info('Deleting profile_id %s', profile_id) + + filepath = os.path.join(self._existing_dir, f'{profile_id}.json') + if not os.path.exists(filepath): + # profile does not in exist in "existing" subdirectory, maybe its in "new" + filepath = os.path.join(self._new_dir, f'{profile_id}.json') + + if not os.path.exists(filepath): + logger.warning('Cannot delete profile %s; JSON file not found in %s or %s', + profile_id, self._existing_dir, self._new_dir) + return False + + logger.debug('Attempting to delete profile at path: %s', filepath) + os.remove(filepath) + + # drop profile from cache + self.profile_cache = [cached_profile for cached_profile in self.profile_cache + if cached_profile.id != profile_id] + return True + + def _load_profiles_from_filesystem(self, dir_: str) -> list[dict]: + """Read all JSON files from one of this ProfileStore's subdirectories, and return list of + the discovered files' json data. + + Args: + dir_ (str): path to scan for Support Profile or NWS Connect API response JSON files + """ + logger.info('Loading Support Profiles JSON files from path: %s', dir_) + + profile_list: list[dict] = [] + for filename in glob('*.json', root_dir=dir_): + with open(os.path.join(dir_, filename), 'r', encoding='utf-8') as file: + json_data: dict = json.load(file) + # if this is a pure NWS Connect response, profile data is nested inside `profiles` + if profiles := json_data.get('profiles', None) and isinstance(profiles, list): + profile_list.extend(profiles) + else: + # this file is assumed to be just a Support Profile + profile_list.append(json_data) + + return profile_list diff --git a/python/idsse/testing/ims_service/profiles/ims_test_1.json b/python/nwsc_proxy/src/profiles/nwsc_test_response_1.json similarity index 100% rename from python/idsse/testing/ims_service/profiles/ims_test_1.json rename to python/nwsc_proxy/src/profiles/nwsc_test_response_1.json diff --git a/python/idsse/testing/ims_service/profiles/ims_test_2.json b/python/nwsc_proxy/src/profiles/nwsc_test_response_2.json similarity index 100% rename from python/idsse/testing/ims_service/profiles/ims_test_2.json rename to python/nwsc_proxy/src/profiles/nwsc_test_response_2.json diff --git a/python/idsse/testing/ims_service/profiles/ims_test_3.json b/python/nwsc_proxy/src/profiles/nwsc_test_response_3.json similarity index 100% rename from python/idsse/testing/ims_service/profiles/ims_test_3.json rename to python/nwsc_proxy/src/profiles/nwsc_test_response_3.json diff --git a/python/nwsc_proxy/test/.coveragerc b/python/nwsc_proxy/test/.coveragerc new file mode 100644 index 0000000..02afcac --- /dev/null +++ b/python/nwsc_proxy/test/.coveragerc @@ -0,0 +1,2 @@ +[run] +omit = */test_*,__init__.py diff --git a/python/nwsc_proxy/test/__init__.py b/python/nwsc_proxy/test/__init__.py new file mode 100644 index 0000000..7072501 --- /dev/null +++ b/python/nwsc_proxy/test/__init__.py @@ -0,0 +1,4 @@ +"""Add nwsc_proxy top-level module to sys.path""" +import os +import sys +sys.path.append(os.path.join(os.path.dirname(__file__), '..')) diff --git a/python/nwsc_proxy/test/test_ncp_web_service.py b/python/nwsc_proxy/test/test_ncp_web_service.py new file mode 100644 index 0000000..1f7bbf7 --- /dev/null +++ b/python/nwsc_proxy/test/test_ncp_web_service.py @@ -0,0 +1,202 @@ +"""Unit tests for ncp_web_service.py""" +# ---------------------------------------------------------------------------------- +# Created on Wed Dec 18 2024 +# +# Copyright (c) 2024 Colorado State University. All rights reserved. (1) +# +# Contributors: +# Mackenzie Grimes (1) +# +# ---------------------------------------------------------------------------------- +# pylint: disable=missing-function-docstring,redefined-outer-name,unused-argument +import json +from datetime import timedelta +from unittest.mock import Mock + +from flask import Request, Response +from pytest import fixture, MonkeyPatch +from werkzeug.datastructures import MultiDict + +from python.nwsc_proxy.ncp_web_service import (AppWrapper, Flask, Namespace, ProfileStore, + create_app, datetime, GSL_KEY) + +# constants +EXAMPLE_DATETIME = datetime(2024, 1, 1, 12, 34) +EXAMPLE_UUID = '9835b194-74de-4321-aa6b-d769972dc7cb' + + +# fixtures +@fixture +def mock_datetime(monkeypatch: MonkeyPatch) -> Mock: + mock_obj = Mock(name='MockDatetime') + mock_obj.now.return_value = EXAMPLE_DATETIME + monkeypatch.setattr('python.nwsc_proxy.ncp_web_service.datetime', mock_obj) + + return mock_obj + + +@fixture +def mock_profile_store(monkeypatch: MonkeyPatch) -> Mock: + mock_obj = Mock(name='MockProfileStore', spec=ProfileStore) + monkeypatch.setattr('python.nwsc_proxy.ncp_web_service.ProfileStore', mock_obj) + return mock_obj + + +@fixture +def mock_jsonify(monkeypatch: MonkeyPatch) -> Mock: + def mock_func(*args, **_kwargs): + return Response(bytes(json.dumps(args[0]), 'utf-8'), content_type='application/json') + + mock_obj = Mock(name='MockJsonify') + mock_obj.side_effect = mock_func + monkeypatch.setattr('python.nwsc_proxy.ncp_web_service.jsonify', mock_obj) + return mock_obj + + +@fixture +def mock_current_app(monkeypatch: MonkeyPatch) -> Mock: + mock_obj = Mock(name='MockCurrentApp', spec=Flask) + mock_obj.logger.info.return_value = None + mock_obj.logger.error.return_value = None + mock_obj.config = MultiDict({'GSL_KEY': GSL_KEY}) + monkeypatch.setattr('python.nwsc_proxy.ncp_web_service.current_app', mock_obj) + return mock_obj + + +@fixture +def mock_request(monkeypatch: MonkeyPatch, mock_current_app, mock_jsonify) -> Mock: + mock_obj = Mock(name='MockFlaskRequest', spec=Request) + mock_obj.origin = 'http://example.com:5000' + mock_obj.method = 'GET' + mock_obj.headers = MultiDict({'X-Api-Key': GSL_KEY}) + monkeypatch.setattr('python.nwsc_proxy.ncp_web_service.request', mock_obj) + return mock_obj + + +@fixture +def wrapper(mock_profile_store, mock_datetime, mock_request) -> AppWrapper: + return AppWrapper('/fake/base/dir') + + +def test_create_app(mock_profile_store: Mock): + args = Namespace() + args.base_dir = '/fake/base/dir' + + _app = create_app(args) + + assert isinstance(_app, Flask) + endpoint_dict = _app.view_functions + assert sorted(list(endpoint_dict.keys())) == ['events', 'health'] + + +def test_health_route(wrapper: AppWrapper, mock_datetime: Mock): + # simulate that server has been running for 5 minutes + mock_datetime.now.return_value = EXAMPLE_DATETIME + timedelta(minutes=5) + + result: tuple[Response, int] = wrapper.app.view_functions['health']() + + response, status_code = result + assert status_code == 200 + assert response.json == { + 'startedAt': '2024-01-01T12:34:00.000Z', + 'uptime': 5 * 60 + } + +def test_events_bad_key(wrapper: AppWrapper, mock_request: Mock): + mock_request.headers = MultiDict({'X-Api-Key': 'A_BAD_KEY'}) + + result: tuple[Response, int] = wrapper.app.view_functions['events']() + + assert result[1] == 401 + + +def test_get_bad_data_source(wrapper: AppWrapper, mock_request: Mock): + mock_request.args = MultiDict({'dataSource': 'A BAD DATA SOURCE'}) + + result: tuple[Response, int] = wrapper.app.view_functions['events']() + + assert result[1] == 400 + + +def test_get_bad_status(wrapper: AppWrapper, mock_request: Mock): + mock_request.args = MultiDict({'dataSource': 'NBM', 'status': 'NOT REAL STATUS'}) + + result: tuple[Response, int] = wrapper.app.view_functions['events']() + + response, status_code = result + assert status_code == 400 + assert response.json == {'profiles': [], 'errors': ['Invalid profile status: NOT REAL STATUS']} + + +def test_get_existing_profiles(wrapper: AppWrapper, mock_request: Mock, mock_profile_store: Mock): + mock_request.args = MultiDict({'dataSource': 'NBM', 'status': 'existing'}) + example_profile_list = [{'id': EXAMPLE_UUID, 'name': 'My Profile'}] + mock_profile_store.return_value.get_all.return_value = example_profile_list + + result: tuple[Response, int] = wrapper.app.view_functions['events']() + + response, status_code = result + assert status_code == 200 + assert response.json == {'profiles': example_profile_list, 'errors': []} + mock_profile_store.return_value.get_all.assert_called_with() # filter_new_profiles not set + + +def test_get_new_profiles(wrapper: AppWrapper, mock_request: Mock, mock_profile_store: Mock): + mock_request.args = MultiDict({'dataSource': 'NBM', 'status': 'new'}) + example_profile = {'id': EXAMPLE_UUID, 'name': 'My Profile'} + mock_profile_store.return_value.get_all.return_value = [example_profile] + + result: tuple[Response, int] = wrapper.app.view_functions['events']() + + response, status_code = result + assert status_code == 200 + assert response.json == {'profiles': [example_profile], 'errors': []} + + get_call_args = mock_profile_store.return_value.get_all.mock_calls + assert get_call_args[0][2] == {'filter_new_profiles': True} # filter_new_profiles set to True + + # expect that we told ProfileStore to label this profile as not new + mark_existing_call_args = mock_profile_store.return_value.mark_as_existing.mock_calls + assert mark_existing_call_args[0][1][0] == example_profile['id'] + + +def test_create_profile_success(wrapper: AppWrapper, mock_request: Mock, mock_profile_store: Mock): + mock_request.method = 'POST' + mock_request.json = {'id': EXAMPLE_UUID, 'name': 'My Profile'} + mock_profile_store.return_value.save.return_value = EXAMPLE_UUID # save() success + + result: tuple[Response, int] = wrapper.app.view_functions['events']() + + assert result[1] == 201 + + +def test_create_previous_profile_failure(wrapper: AppWrapper, + mock_request: Mock, + mock_profile_store: Mock): + mock_request.method = 'POST' + mock_request.json = {'id': EXAMPLE_UUID, 'name': 'My Profile'} + mock_profile_store.return_value.save.return_value = None # save() rejected, profile must exist + + result: tuple[Response, int] = wrapper.app.view_functions['events']() + + assert result[1] == 400 + + +def test_delete_profile_success(wrapper: AppWrapper, mock_request: Mock, mock_profile_store: Mock): + mock_request.method = 'DELETE' + mock_request.args = MultiDict({'uuid': EXAMPLE_UUID}) + mock_profile_store.return_value.delete.return_value = True # delete worked + + result: tuple[Response, int] = wrapper.app.view_functions['events']() + + assert result[1] == 204 + + +def test_delete_profile_failure(wrapper: AppWrapper, mock_request: Mock, mock_profile_store: Mock): + mock_request.method = 'DELETE' + mock_request.args = MultiDict({'uuid': EXAMPLE_UUID}) + mock_profile_store.return_value.delete.return_value = False # delete() rejected, profile must exist + + result: tuple[Response, int] = wrapper.app.view_functions['events']() + + assert result[1] == 404 diff --git a/python/nwsc_proxy/test/test_profile_store.py b/python/nwsc_proxy/test/test_profile_store.py new file mode 100644 index 0000000..6cdce91 --- /dev/null +++ b/python/nwsc_proxy/test/test_profile_store.py @@ -0,0 +1,178 @@ +"""Tests for src/profile_store.py""" +# ---------------------------------------------------------------------------------- +# Created on Wed Dec 18 2024 +# +# Copyright (c) 2024 Colorado State University. All rights reserved. (1) +# +# Contributors: +# Mackenzie Grimes (1) +# +# ---------------------------------------------------------------------------------- +# pylint: disable=missing-function-docstring,redefined-outer-name +import json +import os +import shutil +from copy import deepcopy +from glob import glob + +from pytest import fixture + +from python.nwsc_proxy.src.profile_store import ProfileStore, NEW_SUBDIR, EXISTING_SUBDIR + +# constants +STORE_BASE_DIR = os.path.join(os.path.dirname(__file__), 'temp') +RAW_JSON_PATH = os.path.join(os.path.dirname(__file__), '..', 'src', 'profiles') + +EXAMPLE_UUID = '9835b194-74de-4321-aa6b-d769972dc7cb' + +with open(os.path.join(RAW_JSON_PATH, 'nwsc_test_response_1.json'), 'r', encoding='utf-8') as f: + EXAMPLE_SUPPORT_PROFILE: dict = json.load(f)['profiles'][0] + + +def _empty_directory(dir_path: str): + for filename in os.listdir(dir_path): + filepath = os.path.join(dir_path, filename) + if os.path.isdir(filepath): + if len(os.listdir(filepath)) > 0: + _empty_directory(filepath) # recursively delete child directories + os.rmdir(filepath) + else: + os.remove(filepath) + + +# fixtures +def startup(): + """Runs before each test is executed. Create test resource file structure""" + os.makedirs(STORE_BASE_DIR, exist_ok=True) + _empty_directory(STORE_BASE_DIR) # delete any existing files/directories + + # copy all JSON files from ../src/profiles/ to the ProfileStore's base dir + for response_file in glob('*.json', root_dir=RAW_JSON_PATH): + shutil.copy(os.path.join(RAW_JSON_PATH, response_file), STORE_BASE_DIR) + + +def teardown(): + """Clean up any files/directories created during test""" + _empty_directory(STORE_BASE_DIR) + os.rmdir(STORE_BASE_DIR) + + +@fixture(autouse=True) +def startup_and_teardown(): + startup() + yield # run test + teardown() + + +@fixture +def store(): + return ProfileStore(STORE_BASE_DIR) + + +# tests +def test_profile_store_loads_api_responses(store: ProfileStore): + assert sorted([c.id for c in store.profile_cache]) == [ + 'a08370c6-ab87-4808-bd51-a8597e58410d', + 'e1033860-f198-4c6a-a91b-beaec905132f', + 'fd35adec-d2a0-49a9-a320-df20a7b6d681', + ] + + for cache_obj in store.profile_cache: + # should have loaded all profiles as status "existing", file should exist in that subdir + assert not cache_obj.is_new + filepath = os.path.join(STORE_BASE_DIR, EXISTING_SUBDIR, f'{cache_obj.id}.json') + assert os.path.exists(filepath) + + # new directory should be empty to begin with + assert os.listdir(os.path.join(STORE_BASE_DIR, NEW_SUBDIR)) == [] + + +def test_store_loads_jsons_from_new(store: ProfileStore): + # create a pre-existing "new" profile as well as the 3 "existing" profiles + profile = deepcopy(store.get_all()[0]) + profile['id'] = EXAMPLE_UUID # give copied profile a unique identifier + store.save(profile) + + # simulate starting ProfileStore process fresh, with existing JSONs on filesystem + _new_store = ProfileStore(STORE_BASE_DIR) + + # newly creatd ProfileStore should have correctly loaded and labeled "new" Profile + new_profile_list = _new_store.get_all(filter_new_profiles=True) + assert len(new_profile_list) == 1 + assert len(_new_store.profile_cache) == 4 # 3 existing, 1 new + + +def test_get_all_profiles(store: ProfileStore): + result = store.get_all() + assert len(result) == 3 + + result = store.get_all(filter_new_profiles=True) + assert len(result) == 0 + + +def test_save_adds_to_new_profiles(store: ProfileStore): + new_profile = deepcopy(EXAMPLE_SUPPORT_PROFILE) + new_profile['id'] = EXAMPLE_UUID + + new_profile_id = store.save(new_profile) + + assert new_profile_id == EXAMPLE_UUID + # profile should now be returned by get() request for new profiles + new_profile_list = store.get_all(filter_new_profiles=True) + assert [p.get('id') for p in new_profile_list] == [EXAMPLE_UUID] + + # profile should not be returned by get() request for existing profiles + existing_profile_list = store.get_all() + assert EXAMPLE_UUID not in [p.get('id') for p in existing_profile_list] + + # file should exist in the "new" subdirectory + assert os.path.exists(os.path.join(STORE_BASE_DIR, NEW_SUBDIR, f'{new_profile_id}.json')) + + +def test_save_rejects_existing_profile(store: ProfileStore): + new_profile = deepcopy(EXAMPLE_SUPPORT_PROFILE) # use Support Profile that already exists + + new_profile_id = store.save(new_profile) + + assert not new_profile_id + # no new profile should have been added + new_profile_list = store.get_all(filter_new_profiles=True) + assert new_profile_list == [] + # file should not exist in the "new" subdirectory + assert not os.path.exists(os.path.join(STORE_BASE_DIR, NEW_SUBDIR, f'{new_profile["id"]}.json')) + + +def test_move_to_existing_success(store: ProfileStore): + new_profile = deepcopy(EXAMPLE_SUPPORT_PROFILE) + new_profile['id'] = EXAMPLE_UUID + store.save(new_profile) + + new_profiles = store.get_all(filter_new_profiles=True) + assert [p['id'] for p in new_profiles] == [EXAMPLE_UUID] + + store.mark_as_existing(EXAMPLE_UUID) + + new_profiles = store.get_all(filter_new_profiles=True) + assert new_profiles == [] # Support Profile has vanished from list of new + existing_profiles = store.get_all() + assert EXAMPLE_UUID in [p['id'] for p in existing_profiles] # now Profile is in existing list + + +def test_delete_profile(store: ProfileStore): + existing_profile_list = store.get_all() + profile_id = existing_profile_list[0]['id'] + + success = store.delete(profile_id) + + # after delete, profile should not be returned to get() request, and JSON file should be gone + assert success + existing_profile_list = store.get_all() + assert profile_id not in [p['id'] for p in existing_profile_list] + assert not os.path.exists(os.path.join(STORE_BASE_DIR, EXISTING_SUBDIR, f'{profile_id}.json')) + + +def test_delete_profile_failure(store: ProfileStore): + profile_id = '11111111-2222-3333-444444444444' # fake ID does not exist in ProfileStore + + success = store.delete(profile_id) + assert not success diff --git a/python/setup.py b/python/setup.py index ce642ad..c5b543c 100644 --- a/python/setup.py +++ b/python/setup.py @@ -29,7 +29,6 @@ def package_files(directory): 'idsse.testing.idsse_common', 'idsse.testing.ims_request', 'idsse.testing.ims_response', - 'idsse.testing.ims_service', 'idsse.testing.nwsc_gateway', 'idsse.testing.risk_processor', 'idsse.testing.risk_processor.binghamton',