From 34f856060a07ab09026f96cbc8f7fa9256ddac90 Mon Sep 17 00:00:00 2001 From: Mackenzie Grimes - NOAA Affiliate Date: Tue, 17 Dec 2024 17:33:29 -0700 Subject: [PATCH 01/24] add proxy_web_service, event_store to ims_service --- python/idsse/testing/ims_service/README.md | 18 +- .../testing/ims_service/proxy_web_service.py | 120 +++++++++++ .../testing/ims_service/src/event_store.py | 190 ++++++++++++++++++ .../testing/ims_service/test/__init__.py | 0 .../ims_service/test/test_event_store.py | 2 + 5 files changed, 321 insertions(+), 9 deletions(-) create mode 100644 python/idsse/testing/ims_service/proxy_web_service.py create mode 100644 python/idsse/testing/ims_service/src/event_store.py create mode 100644 python/idsse/testing/ims_service/test/__init__.py create mode 100644 python/idsse/testing/ims_service/test/test_event_store.py diff --git a/python/idsse/testing/ims_service/README.md b/python/idsse/testing/ims_service/README.md index 9c638bf..ed97268 100644 --- a/python/idsse/testing/ims_service/README.md +++ b/python/idsse/testing/ims_service/README.md @@ -1,10 +1,10 @@ -# IMS Dummy Service +# NWS Connect Proxy Service ## Overview --The `ims-service` is dummy IMS service (Impact-based decision support service Management System) +-The `proxy-service` is dummy NWS Connect service that simulates storing a set of Support Profiles and serving them up in a simple REST interface. ## Configurations -The ims service should be started as a standalone service and offers two end-points in support of the IMS gateway request/response services. Those services should be provided with the network address of this services endpoints via their command line arguments for testing purposes. +The NWS Connect proxy service should be started as a standalone service and offers two end-points in support of the NWSConnect Gateway request/response services. Those services should be provided with the network address of this services endpoints via their command line arguments for testing purposes. ## Build, Release, and Run @@ -20,11 +20,11 @@ The subsections below outline how to build the images within this project. All m ### IMS Service From the IDSS Engine project root directory `idss-engine/build///`: -`$ docker-compose build ims_service` +`$ docker-compose build proxy_service` -**Local Development Image Name** `idss.engine.service.ims.service:` +**Local Development Image Name** `idss.engine.service.proxy.service:` -**Packaged/Deployed Image Name** `idsse/service/ims/service:` +**Packaged/Deployed Image Name** `idsse/service/proxy/service:` --- @@ -37,7 +37,7 @@ See the [Build, Release, Run](https://github.com/NOAA-GSL/idss-engine/blob/main/ To run this service can run in isolation, it does not requires a rabbitmq server ``` -docker run --rm --name ims-service idss.engine.service.ims.service:local +docker run --rm --name proxy-service idss.engine.service.proxy.service:local ``` Optional parameters include: @@ -71,7 +71,7 @@ The most common way to get python dependencies installed is to use either [conda brew install peak/tap/s5cmd ``` -Lastly, `cd` to the `python/ims_service/src` directory, and start the relevant service. For example, for Request Service: +Lastly, `cd` to the `./python` directory, and start the relevant service. For example, for Request Service: ```sh -python3 ims_service.py +python3 proxy_service.py ``` diff --git a/python/idsse/testing/ims_service/proxy_web_service.py b/python/idsse/testing/ims_service/proxy_web_service.py new file mode 100644 index 0000000..8841fe9 --- /dev/null +++ b/python/idsse/testing/ims_service/proxy_web_service.py @@ -0,0 +1,120 @@ +"""Proxy web service simulating behaviors of NWS Connect core services""" +# ---------------------------------------------------------------------------------- +# Created on Fri Apr 07 2023 +# +# Copyright (c) 2023 Colorado State University. All rights reserved. (1) +# +# Contributors: +# Paul Hamer (1) +# Mackenzie Grimes (1) +# +# ---------------------------------------------------------------------------------- +# pylint: disable=too-few-public-methods +from datetime import datetime, UTC +from argparse import ArgumentParser, Namespace + +from flask import Flask, request, jsonify, current_app + +from src.event_store import EventStore + +# constants +GSL_KEY = '8209c979-e3de-402e-a1f5-556d650ab889' + + +def to_iso(date_time: datetime) -> str: + """Format a datetime instance to an ISO string. Borrowed from idsse.commons.utils for now""" + return (f'{date_time.strftime("%Y-%m-%dT%H:%M")}:' + f'{(date_time.second + date_time.microsecond / 1e6):06.3f}' + 'Z' if date_time.tzname() in [None, str(UTC)] + else date_time.strftime("%Z")[3:]) + + +class HealthRoute: + """Handle requests to /health endpoint""" + def __init__(self): + self._app_start_time = datetime.now(UTC) + + def handler(self): + """Logic for requests to /health""" + uptime = datetime.now(UTC) - self._app_start_time + return jsonify({ + 'startedAt': to_iso(self._app_start_time), + 'uptime': uptime.total_seconds + }), 200 + + +class EventsRoute: + """Handle requests to /events endpoint""" + def __init__(self, base_dir: str): + self.event_store = EventStore(base_dir) + + def handler(self): + """Logic for requests to /events""" + # check that this request has proper key to get or add data + if request.headers.get('X-Api-Key') != GSL_KEY: + return jsonify({'message': 'ERROR: Unauthorized'}), 401 + + if request.method == 'POST': + # request is saving new Support Profile event + request_body: dict = request.json + event_id = self.event_store.save(request_body) # TODO: handle failure? + + return jsonify({'message': f'Event {event_id} saved'}), 201 + + if request.method == 'DELETE': + event_id = request.args.get('uuid', default=None, type=str) + self.event_store.delete(event_id) # TODO: handle failure? + + return jsonify({'message': f'Event {event_id} deleted'}), 204 + + # otherwise, must be 'GET' operation + event_status = request.args.get('status', default='existing', type=str) + if event_status == 'existing': + events = self.event_store.get_all() + return jsonify({'events': events}), 200 + + if event_status == 'new': + new_events = self.event_store.get_all(filter_new_profiles=True) + # update EventStore to label all queried events as no longer "new"; + # they've now been returned to IDSS Engine clients at least once + for event in new_events: + self.event_store.move_to_existing(event['id']) + return jsonify({'events': events}), 200 + + return jsonify({'message': f'Invalid event status: {event_status}'}), 400 + + +class AppWrapper: + """Web server class wrapping Flask operations""" + def __init__(self, base_dir: str): + """Build Flask app instance, mapping handler to each endpoint""" + self.app = Flask(__name__) + + health_route = HealthRoute() + events_route = EventsRoute(base_dir) + + self.app.add_url_rule('/health', 'health', view_func=health_route.handler, + methods=['GET']) + self.app.add_url_rule('/events', 'events', + view_func=events_route.handler, + methods=['GET', 'POST', 'DELETE']) + + def run(self, **kwargs): + """Start up web server""" + self.app.run(**kwargs) + + +def create_app(args: Namespace = None) -> Flask: + base_dir = args.base_dir + _wrapper = AppWrapper(base_dir) + return _wrapper.app + + +if __name__ == '__main__': + # TODO: command line args + + # host=0.0.0.0 is required for flask to work properly in docker and k8s env + app = create_app() + app.run(host='0.0.0.0', port=5000) + +# TODO: gunicorn runtime diff --git a/python/idsse/testing/ims_service/src/event_store.py b/python/idsse/testing/ims_service/src/event_store.py new file mode 100644 index 0000000..7502674 --- /dev/null +++ b/python/idsse/testing/ims_service/src/event_store.py @@ -0,0 +1,190 @@ +"""Event store that does CRUD operations on filesystem to simulate NWS Connect storage""" +# ---------------------------------------------------------------------------------- +# Created on Tues Dec 17 2024 +# +# Copyright (c) 2024 Colorado State University. All rights reserved. (1) +# +# Contributors: +# Mackenzie Grimes (1) +# +# ---------------------------------------------------------------------------------- +import os +import json +import logging +from glob import glob + +# constants controlling the subdirectory where new vs. existing Events are saved +NEW_SUBDIR = 'new' +EXISTING_SUBDIR = 'existing' + +logger = logging.getLogger(__name__) + + +class EventStore: + """Data storage using JSON files on filesystem that simulates CRUD operations""" + def __init__(self, base_dir: str): + self._base_dir = base_dir + self._new_dir = os.path.join(self._base_dir, NEW_SUBDIR) + self._existing_dir = os.path.join(self._base_dir, EXISTING_SUBDIR) + + # ensure that base directory and all expected subdirectories exist + for _dir in [self._base_dir, self._new_dir, self._existing_dir]: + if not os.path.exists(_dir): + os.mkdir(_dir) + + # cache of JSON data of all Support Profiles, divided into new vs. existing Profiles + self.new_profiles = [] + self.existing_profiles = [] + + logger.info('Loading existing Support Profiles from path: %s', self._existing_dir) + self.existing_profiles = self._read_existing_events() + + def get_all(self, filter_new_profiles = False) -> list[dict]: + """Get all Support Profile JSONs persisted in this API, filtering by status='new' + (if Support Profile has never been returned in an API request before) or status='existing' + otherwise. + + Args: + filter_new_profiles (bool): if True, get only Support Profiles that have never been + returned to IDSS Engine on previous requests (never processed). Default is False + (return all existing profiles). + """ + if filter_new_profiles: + return self.new_profiles + return self.existing_profiles + + def save(self, event: dict) -> str | None: + """Persist a new Support Profile Event to this API + + Returns: + str | None: UUID of saved Support Profile on success, otherwise None + """ + logger.debug('Now saving new profile: %s', event) + # save to JSON file and add to in-memory cache + return self._save_event(event, is_new=True) + + def move_to_existing(self, event_id: str) -> bool: + """Mark an existing Support Profile Event as being "read", a.k.a. has been returned + in API response at least once and is no longer "new". + + Returns: + bool: True on success. False if JSON with this event_id (UUID) not found on filesystem + """ + new_filepath = os.path.join(self._new_dir, event_id, '.json') + + if not os.path.exists(new_filepath): + logger.warning('Attempt to mark as "existing" profile that is not found: %s', + new_filepath) + return False + + # move the JSON file from the "new" to the "existing" directory and update cache + existing_filepath = os.path.join(self._existing_dir, event_id, '.json') + os.rename(new_filepath, existing_filepath) + + # find the event data from the new_profiles cache and move it to the existing_profiles + event_data = next([profile for profile in self.new_profiles if profile['id'] == event_id], + None) + if not event_data: + logger.warning('Support Profile %s expected in new_profiles cache but not found', + event_id) + + # unexpectedly, profile is not in new_profiles cache; + # recover from this by re-reading the JSON from file, because file did exist + event_data = self._read_existing_event(event_id) + + # add Event/Profile to existing_profiles cache, and scrub from new_profiles cache + self.existing_profiles.append(event_data) + self.new_profiles = [profile for profile in self.existing_profiles + if profile['id'] != event_id] + + return True + + def delete(self, event_id: str) -> bool: + """Delete a Support Profile event from storage, based on its UUID. + + Returns: + bool: True on success + """ + logger.debug('Attempting to delete event_id %s', event_id) + is_deleted = self._delete_event(event_id) + if not is_deleted: + return False + + # drop profile from new or existing cache (could be in either) + self.new_profiles = [profile for profile in self.new_profiles if profile['id'] != event_id] + self.existing_profiles = [profile for profile in self.existing_profiles + if profile['id'] != event_id] + + return True + + # private methods that do the actual disk read/write operations + def _save_event(self, event: dict, is_new: bool) -> str: + """Writes event JSON to disk and adds the JSON to appropriate in-memory cache: + `self.new_profiles` if `is_new`, otherwise `self.existing_profiles`. + + Args: + event (dict): full JSON data to be saved + is_new (bool): if True, event will be saved in subdirectory and in-memory cache for + "new" profiles. Otherwise will be stored in "existing" profiles subdir/cache. + Returns: + str: the event_id, if save was successful + """ + event_id = event.get('id') + + # determine the right filepath where JSON data will be written + dir_path = self._new_dir if is_new else self._existing_dir + filepath = os.path.join(dir_path, event_id, '.json') + + # will be saved to the appropriate in-memory list of profile data + profile_cache = self.new_profiles if is_new else self.existing_profiles + + logger.info('Now saving event to path: %s', filepath) + with open(filepath, 'w', encoding='utf-8') as file: + os.write(file, event) # save JSON to filesystem + + profile_cache.append(event) + return event_id + + def _read_existing_events(self) -> list[dict]: + """Read all existing event JSON files from this EventStore's existing event subdirectory""" + event_list: list[dict] = [] + for filename in glob('*.json', root_dir=self._existing_dir): + with open(os.path.join(self._existing_dir, filename), 'r', encoding='utf-8') as file: + json_data = json.load(file) + event_list.append(json_data) + + return event_list + + def _read_existing_event(self, event_id: str) -> dict: + """Get an existing event from disk based on the event ID. + Reads from NEW subdirectory should almost never be necessary. + """ + filename = os.path.join(self._existing_dir, event_id, '.json') + logger.debug('Attempting to read existing event from path: %s', filename) + with open(filename, 'r', encoding='utf-8') as file: + return json.load(file) + + def _write_new_event(self, data: dict): + """Write a new Support Profile Event to disk using `self._new_dir`""" + event_id = data['id'] + with open(os.path.join(self._new_dir, event_id, '.json'), 'w', encoding='utf-8') as file: + os.write(file, data) + + def _delete_event(self, event_id: str) -> bool: + """Delete event from disk by event_id. + Returns: + bool: True on success, False if JSON file not found + """ + filepath = os.path.join(self._existing_dir, event_id, '.json') + if not os.path.exists(filepath): + # event does not in exist in existing subdirectory, maybe its in the new one + filepath = os.path.join(self._existing_dir, event_id, '.json') + + if not os.path.exists(filepath): + logger.warning('Cannot delete event %s; JSON file not found in %s or %s', + event_id, self._existing_dir, self._new_dir) + return False + + logger.debug('Attempting to delete event at path: %s', filepath) + os.remove(filepath) + return True diff --git a/python/idsse/testing/ims_service/test/__init__.py b/python/idsse/testing/ims_service/test/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/python/idsse/testing/ims_service/test/test_event_store.py b/python/idsse/testing/ims_service/test/test_event_store.py new file mode 100644 index 0000000..6c8fe9b --- /dev/null +++ b/python/idsse/testing/ims_service/test/test_event_store.py @@ -0,0 +1,2 @@ +"""Tests for src/event_store.py""" +pass From 2d6e4ff11464426f1f7504b4de65cfb51b7cc4ad Mon Sep 17 00:00:00 2001 From: Mackenzie Grimes - NOAA Affiliate Date: Tue, 17 Dec 2024 17:58:24 -0700 Subject: [PATCH 02/24] bug fixes with filenaming, writing json, health endpoint --- .../testing/ims_service/proxy_web_service.py | 21 +++++++++++++------ .../testing/ims_service/src/event_store.py | 19 +++++++++-------- 2 files changed, 25 insertions(+), 15 deletions(-) diff --git a/python/idsse/testing/ims_service/proxy_web_service.py b/python/idsse/testing/ims_service/proxy_web_service.py index 8841fe9..fe15067 100644 --- a/python/idsse/testing/ims_service/proxy_web_service.py +++ b/python/idsse/testing/ims_service/proxy_web_service.py @@ -13,7 +13,7 @@ from datetime import datetime, UTC from argparse import ArgumentParser, Namespace -from flask import Flask, request, jsonify, current_app +from flask import Flask, current_app, request, jsonify from src.event_store import EventStore @@ -39,7 +39,7 @@ def handler(self): uptime = datetime.now(UTC) - self._app_start_time return jsonify({ 'startedAt': to_iso(self._app_start_time), - 'uptime': uptime.total_seconds + 'uptime': uptime.total_seconds() }), 200 @@ -77,10 +77,12 @@ def handler(self): new_events = self.event_store.get_all(filter_new_profiles=True) # update EventStore to label all queried events as no longer "new"; # they've now been returned to IDSS Engine clients at least once + current_app.logger.info('Got all new events: %s', new_events) for event in new_events: self.event_store.move_to_existing(event['id']) - return jsonify({'events': events}), 200 + return jsonify({'events': new_events}), 200 + # status query param should have been 'existing' or 'new' return jsonify({'message': f'Invalid event status: {event_status}'}), 400 @@ -105,16 +107,23 @@ def run(self, **kwargs): def create_app(args: Namespace = None) -> Flask: + """Create a Flask instance""" base_dir = args.base_dir _wrapper = AppWrapper(base_dir) return _wrapper.app if __name__ == '__main__': - # TODO: command line args + parser = ArgumentParser() + parser.add_argument('--port', dest='port', default=5000, type=int, + help='The port the web server will listen on.') + parser.add_argument('--base_dir', dest='base_dir', required=True, type=str, + help='The base directory where Support Profile JSONs will be read/written') + + _args = parser.parse_args() + app = create_app(_args) # host=0.0.0.0 is required for flask to work properly in docker and k8s env - app = create_app() - app.run(host='0.0.0.0', port=5000) + app.run(host='0.0.0.0', port=_args.port) # TODO: gunicorn runtime diff --git a/python/idsse/testing/ims_service/src/event_store.py b/python/idsse/testing/ims_service/src/event_store.py index 7502674..6efa97c 100644 --- a/python/idsse/testing/ims_service/src/event_store.py +++ b/python/idsse/testing/ims_service/src/event_store.py @@ -70,19 +70,20 @@ def move_to_existing(self, event_id: str) -> bool: Returns: bool: True on success. False if JSON with this event_id (UUID) not found on filesystem """ - new_filepath = os.path.join(self._new_dir, event_id, '.json') + new_filepath = os.path.join(self._new_dir, event_id + '.json') if not os.path.exists(new_filepath): + # TODO: why is this thrown when requesting /events?status=new ? logger.warning('Attempt to mark as "existing" profile that is not found: %s', new_filepath) return False # move the JSON file from the "new" to the "existing" directory and update cache - existing_filepath = os.path.join(self._existing_dir, event_id, '.json') + existing_filepath = os.path.join(self._existing_dir, event_id + '.json') os.rename(new_filepath, existing_filepath) # find the event data from the new_profiles cache and move it to the existing_profiles - event_data = next([profile for profile in self.new_profiles if profile['id'] == event_id], + event_data = next((profile for profile in self.new_profiles if profile['id'] == event_id), None) if not event_data: logger.warning('Support Profile %s expected in new_profiles cache but not found', @@ -133,14 +134,14 @@ def _save_event(self, event: dict, is_new: bool) -> str: # determine the right filepath where JSON data will be written dir_path = self._new_dir if is_new else self._existing_dir - filepath = os.path.join(dir_path, event_id, '.json') + filepath = os.path.join(dir_path, event_id + '.json') # will be saved to the appropriate in-memory list of profile data profile_cache = self.new_profiles if is_new else self.existing_profiles logger.info('Now saving event to path: %s', filepath) with open(filepath, 'w', encoding='utf-8') as file: - os.write(file, event) # save JSON to filesystem + json.dump(event, file) # save JSON to filesystem profile_cache.append(event) return event_id @@ -159,7 +160,7 @@ def _read_existing_event(self, event_id: str) -> dict: """Get an existing event from disk based on the event ID. Reads from NEW subdirectory should almost never be necessary. """ - filename = os.path.join(self._existing_dir, event_id, '.json') + filename = os.path.join(self._existing_dir, event_id + '.json') logger.debug('Attempting to read existing event from path: %s', filename) with open(filename, 'r', encoding='utf-8') as file: return json.load(file) @@ -167,7 +168,7 @@ def _read_existing_event(self, event_id: str) -> dict: def _write_new_event(self, data: dict): """Write a new Support Profile Event to disk using `self._new_dir`""" event_id = data['id'] - with open(os.path.join(self._new_dir, event_id, '.json'), 'w', encoding='utf-8') as file: + with open(os.path.join(self._new_dir, event_id + '.json'), 'w', encoding='utf-8') as file: os.write(file, data) def _delete_event(self, event_id: str) -> bool: @@ -175,10 +176,10 @@ def _delete_event(self, event_id: str) -> bool: Returns: bool: True on success, False if JSON file not found """ - filepath = os.path.join(self._existing_dir, event_id, '.json') + filepath = os.path.join(self._existing_dir, event_id + '.json') if not os.path.exists(filepath): # event does not in exist in existing subdirectory, maybe its in the new one - filepath = os.path.join(self._existing_dir, event_id, '.json') + filepath = os.path.join(self._existing_dir, event_id + '.json') if not os.path.exists(filepath): logger.warning('Cannot delete event %s; JSON file not found in %s or %s', From 424833f40eb9fb282c70f37761330cc944554771 Mon Sep 17 00:00:00 2001 From: Mackenzie Grimes - NOAA Affiliate Date: Wed, 18 Dec 2024 09:35:47 -0700 Subject: [PATCH 03/24] rename EventStore to ProfileStore --- .../src/{event_store.py => profile_store.py} | 137 +++++++++--------- 1 file changed, 72 insertions(+), 65 deletions(-) rename python/idsse/testing/ims_service/src/{event_store.py => profile_store.py} (50%) diff --git a/python/idsse/testing/ims_service/src/event_store.py b/python/idsse/testing/ims_service/src/profile_store.py similarity index 50% rename from python/idsse/testing/ims_service/src/event_store.py rename to python/idsse/testing/ims_service/src/profile_store.py index 6efa97c..0ff90f9 100644 --- a/python/idsse/testing/ims_service/src/event_store.py +++ b/python/idsse/testing/ims_service/src/profile_store.py @@ -1,4 +1,4 @@ -"""Event store that does CRUD operations on filesystem to simulate NWS Connect storage""" +"""Profile store that does CRUD operations on filesystem to simulate NWS Connect storage""" # ---------------------------------------------------------------------------------- # Created on Tues Dec 17 2024 # @@ -13,14 +13,14 @@ import logging from glob import glob -# constants controlling the subdirectory where new vs. existing Events are saved +# constants controlling the subdirectory where new vs. existing Profiles are saved NEW_SUBDIR = 'new' EXISTING_SUBDIR = 'existing' logger = logging.getLogger(__name__) -class EventStore: +class ProfileStore: """Data storage using JSON files on filesystem that simulates CRUD operations""" def __init__(self, base_dir: str): self._base_dir = base_dir @@ -37,7 +37,7 @@ def __init__(self, base_dir: str): self.existing_profiles = [] logger.info('Loading existing Support Profiles from path: %s', self._existing_dir) - self.existing_profiles = self._read_existing_events() + self.existing_profiles = self._read_existing_profiles() def get_all(self, filter_new_profiles = False) -> list[dict]: """Get all Support Profile JSONs persisted in this API, filtering by status='new' @@ -53,139 +53,146 @@ def get_all(self, filter_new_profiles = False) -> list[dict]: return self.new_profiles return self.existing_profiles - def save(self, event: dict) -> str | None: - """Persist a new Support Profile Event to this API + def save(self, profile: dict) -> str | None: + """Persist a new Support Profile Profile to this API Returns: str | None: UUID of saved Support Profile on success, otherwise None """ - logger.debug('Now saving new profile: %s', event) + logger.debug('Now saving new profile: %s', profile) # save to JSON file and add to in-memory cache - return self._save_event(event, is_new=True) + return self._save_profile(profile, is_new=True) - def move_to_existing(self, event_id: str) -> bool: - """Mark an existing Support Profile Event as being "read", a.k.a. has been returned - in API response at least once and is no longer "new". + def move_to_existing(self, profile_id: str) -> bool: + """Mark a formerly "new" Support Profile as "existing", a.k.a. has been returned in + API response at least once and should no longer be processed as "new" Returns: - bool: True on success. False if JSON with this event_id (UUID) not found on filesystem + bool: True on success. False if JSON with this profile_id not found on filesystem """ - new_filepath = os.path.join(self._new_dir, event_id + '.json') + new_filepath = os.path.join(self._new_dir, profile_id + '.json') if not os.path.exists(new_filepath): - # TODO: why is this thrown when requesting /events?status=new ? + # TODO: why is this thrown when requesting /profiles?status=new ? logger.warning('Attempt to mark as "existing" profile that is not found: %s', new_filepath) return False # move the JSON file from the "new" to the "existing" directory and update cache - existing_filepath = os.path.join(self._existing_dir, event_id + '.json') + existing_filepath = os.path.join(self._existing_dir, profile_id + '.json') os.rename(new_filepath, existing_filepath) - # find the event data from the new_profiles cache and move it to the existing_profiles - event_data = next((profile for profile in self.new_profiles if profile['id'] == event_id), - None) - if not event_data: + # find the profile data from the new_profiles cache and move it to existing_profiles + profile_data = next((profile for profile in self.new_profiles + if profile['id'] == profile_id), None) + if not profile_data: logger.warning('Support Profile %s expected in new_profiles cache but not found', - event_id) + profile_id) # unexpectedly, profile is not in new_profiles cache; # recover from this by re-reading the JSON from file, because file did exist - event_data = self._read_existing_event(event_id) + profile_data = self._read_existing_profile(profile_id) - # add Event/Profile to existing_profiles cache, and scrub from new_profiles cache - self.existing_profiles.append(event_data) + # add Profile/Profile to existing_profiles cache, and scrub from new_profiles cache + self.existing_profiles.append(profile_data) self.new_profiles = [profile for profile in self.existing_profiles - if profile['id'] != event_id] + if profile['id'] != profile_id] return True - def delete(self, event_id: str) -> bool: - """Delete a Support Profile event from storage, based on its UUID. + def delete(self, profile_id: str) -> bool: + """Delete a Support Profile profile from storage, based on its UUID. Returns: bool: True on success """ - logger.debug('Attempting to delete event_id %s', event_id) - is_deleted = self._delete_event(event_id) + logger.debug('Attempting to delete profile_id %s', profile_id) + is_deleted = self._delete_profile(profile_id) if not is_deleted: return False # drop profile from new or existing cache (could be in either) - self.new_profiles = [profile for profile in self.new_profiles if profile['id'] != event_id] + self.new_profiles = [profile for profile in self.new_profiles + if profile['id'] != profile_id] self.existing_profiles = [profile for profile in self.existing_profiles - if profile['id'] != event_id] + if profile['id'] != profile_id] return True # private methods that do the actual disk read/write operations - def _save_event(self, event: dict, is_new: bool) -> str: - """Writes event JSON to disk and adds the JSON to appropriate in-memory cache: + def _save_profile(self, profile: dict, is_new: bool) -> str: + """Writes profile JSON to disk and adds the JSON to appropriate in-memory cache: `self.new_profiles` if `is_new`, otherwise `self.existing_profiles`. Args: - event (dict): full JSON data to be saved - is_new (bool): if True, event will be saved in subdirectory and in-memory cache for + profile (dict): full JSON data to be saved + is_new (bool): if True, profile will be saved in subdirectory and in-memory cache for "new" profiles. Otherwise will be stored in "existing" profiles subdir/cache. Returns: - str: the event_id, if save was successful + str: the profile_id, if save was successful """ - event_id = event.get('id') + profile_id = profile.get('id') # determine the right filepath where JSON data will be written dir_path = self._new_dir if is_new else self._existing_dir - filepath = os.path.join(dir_path, event_id + '.json') + filepath = os.path.join(dir_path, profile_id + '.json') # will be saved to the appropriate in-memory list of profile data profile_cache = self.new_profiles if is_new else self.existing_profiles - logger.info('Now saving event to path: %s', filepath) + logger.info('Now saving profile to path: %s', filepath) with open(filepath, 'w', encoding='utf-8') as file: - json.dump(event, file) # save JSON to filesystem + json.dump(profile, file) # save JSON to filesystem - profile_cache.append(event) - return event_id + profile_cache.append(profile) + return profile_id - def _read_existing_events(self) -> list[dict]: - """Read all existing event JSON files from this EventStore's existing event subdirectory""" - event_list: list[dict] = [] + def _read_existing_profiles(self) -> list[dict]: + """Read all JSON files from this ProfileStore's `existing` subdirectory""" + profile_list: list[dict] = [] for filename in glob('*.json', root_dir=self._existing_dir): with open(os.path.join(self._existing_dir, filename), 'r', encoding='utf-8') as file: - json_data = json.load(file) - event_list.append(json_data) - - return event_list - - def _read_existing_event(self, event_id: str) -> dict: - """Get an existing event from disk based on the event ID. + json_data: dict = json.load(file) + # if this is a pure NWS Connect response, profile will be nested inside `profiles` + if profiles := json_data.get('profiles', None) and isinstance(profiles, list): + for profile in profiles: + profile_list.append(profile) + else: + # this file is assumed to be just a Support Profile + profile_list.append(json_data) + + return profile_list + + def _read_existing_profile(self, profile_id: str) -> dict: + """Get an existing profile from disk based on the profile ID. Reads from NEW subdirectory should almost never be necessary. """ - filename = os.path.join(self._existing_dir, event_id + '.json') - logger.debug('Attempting to read existing event from path: %s', filename) + filename = os.path.join(self._existing_dir, profile_id + '.json') + logger.debug('Attempting to read existing profile from path: %s', filename) with open(filename, 'r', encoding='utf-8') as file: return json.load(file) - def _write_new_event(self, data: dict): - """Write a new Support Profile Event to disk using `self._new_dir`""" - event_id = data['id'] - with open(os.path.join(self._new_dir, event_id + '.json'), 'w', encoding='utf-8') as file: + def _write_new_profile(self, data: dict): + """Write a new Support Profile Profile to disk using `self._new_dir`""" + profile_id = data['id'] + with open(os.path.join(self._new_dir, profile_id + '.json'), 'w', encoding='utf-8') as file: os.write(file, data) - def _delete_event(self, event_id: str) -> bool: - """Delete event from disk by event_id. + def _delete_profile(self, profile_id: str) -> bool: + """Delete profile from disk by profile_id. Returns: bool: True on success, False if JSON file not found """ - filepath = os.path.join(self._existing_dir, event_id + '.json') + filepath = os.path.join(self._existing_dir, profile_id + '.json') if not os.path.exists(filepath): - # event does not in exist in existing subdirectory, maybe its in the new one - filepath = os.path.join(self._existing_dir, event_id + '.json') + # profile does not in exist in existing subdirectory, maybe its in the new one + filepath = os.path.join(self._existing_dir, profile_id + '.json') if not os.path.exists(filepath): - logger.warning('Cannot delete event %s; JSON file not found in %s or %s', - event_id, self._existing_dir, self._new_dir) + logger.warning('Cannot delete profile %s; JSON file not found in %s or %s', + profile_id, self._existing_dir, self._new_dir) return False - logger.debug('Attempting to delete event at path: %s', filepath) + logger.debug('Attempting to delete profile at path: %s', filepath) os.remove(filepath) return True From fad680e2179748c195ad993fd6e7835f0e547cb2 Mon Sep 17 00:00:00 2001 From: Mackenzie Grimes - NOAA Affiliate Date: Wed, 18 Dec 2024 09:46:54 -0700 Subject: [PATCH 04/24] delete unnecessary private methods --- .../testing/ims_service/src/profile_store.py | 114 ++++++------------ 1 file changed, 36 insertions(+), 78 deletions(-) diff --git a/python/idsse/testing/ims_service/src/profile_store.py b/python/idsse/testing/ims_service/src/profile_store.py index 0ff90f9..1eddd43 100644 --- a/python/idsse/testing/ims_service/src/profile_store.py +++ b/python/idsse/testing/ims_service/src/profile_store.py @@ -33,10 +33,7 @@ def __init__(self, base_dir: str): os.mkdir(_dir) # cache of JSON data of all Support Profiles, divided into new vs. existing Profiles - self.new_profiles = [] - self.existing_profiles = [] - - logger.info('Loading existing Support Profiles from path: %s', self._existing_dir) + self.new_profiles: list[dict] = [] self.existing_profiles = self._read_existing_profiles() def get_all(self, filter_new_profiles = False) -> list[dict]: @@ -60,8 +57,17 @@ def save(self, profile: dict) -> str | None: str | None: UUID of saved Support Profile on success, otherwise None """ logger.debug('Now saving new profile: %s', profile) - # save to JSON file and add to in-memory cache - return self._save_profile(profile, is_new=True) + profile_id = profile.get('id') + + # determine the right filepath where JSON data will be written + filepath = os.path.join(self._new_dir , profile_id + '.json') + + logger.info('Now saving profile to path: %s', filepath) + with open(filepath, 'w', encoding='utf-8') as file: + json.dump(profile, file) # save JSON to filesystem + + self.new_profiles.append(profile) # add profile to in-memory cache + return profile_id def move_to_existing(self, profile_id: str) -> bool: """Mark a formerly "new" Support Profile as "existing", a.k.a. has been returned in @@ -90,10 +96,13 @@ def move_to_existing(self, profile_id: str) -> bool: profile_id) # unexpectedly, profile is not in new_profiles cache; - # recover from this by re-reading the JSON from file, because file did exist - profile_data = self._read_existing_profile(profile_id) + # recover from this by re-reading the JSON from file, because file must exist + filename = os.path.join(self._existing_dir, profile_id + '.json') + logger.debug('Attempting to read existing profile from path: %s', filename) + with open(filename, 'r', encoding='utf-8') as file: + profile_data = json.load(file) - # add Profile/Profile to existing_profiles cache, and scrub from new_profiles cache + # add Profile to existing_profiles cache, and scrub from new_profiles cache self.existing_profiles.append(profile_data) self.new_profiles = [profile for profile in self.existing_profiles if profile['id'] != profile_id] @@ -106,49 +115,32 @@ def delete(self, profile_id: str) -> bool: Returns: bool: True on success """ - logger.debug('Attempting to delete profile_id %s', profile_id) - is_deleted = self._delete_profile(profile_id) - if not is_deleted: - return False - - # drop profile from new or existing cache (could be in either) - self.new_profiles = [profile for profile in self.new_profiles - if profile['id'] != profile_id] - self.existing_profiles = [profile for profile in self.existing_profiles - if profile['id'] != profile_id] - - return True - - # private methods that do the actual disk read/write operations - def _save_profile(self, profile: dict, is_new: bool) -> str: - """Writes profile JSON to disk and adds the JSON to appropriate in-memory cache: - `self.new_profiles` if `is_new`, otherwise `self.existing_profiles`. + logger.info('Deleting profile_id %s', profile_id) + filepath = os.path.join(self._existing_dir, profile_id + '.json') + profile_cache = self.existing_profiles # assume profile is in "existing" cache - Args: - profile (dict): full JSON data to be saved - is_new (bool): if True, profile will be saved in subdirectory and in-memory cache for - "new" profiles. Otherwise will be stored in "existing" profiles subdir/cache. - Returns: - str: the profile_id, if save was successful - """ - profile_id = profile.get('id') + if not os.path.exists(filepath): + # profile does not in exist in "existing" subdirectory, maybe its in "new" + filepath = os.path.join(self._existing_dir, profile_id + '.json') + profile_cache = self.new_profiles # now assume profile is in "new" cache - # determine the right filepath where JSON data will be written - dir_path = self._new_dir if is_new else self._existing_dir - filepath = os.path.join(dir_path, profile_id + '.json') + if not os.path.exists(filepath): + logger.warning('Cannot delete profile %s; JSON file not found in %s or %s', + profile_id, self._existing_dir, self._new_dir) + return False - # will be saved to the appropriate in-memory list of profile data - profile_cache = self.new_profiles if is_new else self.existing_profiles + logger.debug('Attempting to delete profile at path: %s', filepath) + os.remove(filepath) - logger.info('Now saving profile to path: %s', filepath) - with open(filepath, 'w', encoding='utf-8') as file: - json.dump(profile, file) # save JSON to filesystem + # drop profile from new/existing cache (whichever subdirectory it was found in) + profile_cache = [profile for profile in profile_cache if profile['id'] != profile_id] - profile_cache.append(profile) - return profile_id + return True def _read_existing_profiles(self) -> list[dict]: """Read all JSON files from this ProfileStore's `existing` subdirectory""" + logger.info('Loading existing Support Profiles from path: %s', self._existing_dir) + profile_list: list[dict] = [] for filename in glob('*.json', root_dir=self._existing_dir): with open(os.path.join(self._existing_dir, filename), 'r', encoding='utf-8') as file: @@ -162,37 +154,3 @@ def _read_existing_profiles(self) -> list[dict]: profile_list.append(json_data) return profile_list - - def _read_existing_profile(self, profile_id: str) -> dict: - """Get an existing profile from disk based on the profile ID. - Reads from NEW subdirectory should almost never be necessary. - """ - filename = os.path.join(self._existing_dir, profile_id + '.json') - logger.debug('Attempting to read existing profile from path: %s', filename) - with open(filename, 'r', encoding='utf-8') as file: - return json.load(file) - - def _write_new_profile(self, data: dict): - """Write a new Support Profile Profile to disk using `self._new_dir`""" - profile_id = data['id'] - with open(os.path.join(self._new_dir, profile_id + '.json'), 'w', encoding='utf-8') as file: - os.write(file, data) - - def _delete_profile(self, profile_id: str) -> bool: - """Delete profile from disk by profile_id. - Returns: - bool: True on success, False if JSON file not found - """ - filepath = os.path.join(self._existing_dir, profile_id + '.json') - if not os.path.exists(filepath): - # profile does not in exist in existing subdirectory, maybe its in the new one - filepath = os.path.join(self._existing_dir, profile_id + '.json') - - if not os.path.exists(filepath): - logger.warning('Cannot delete profile %s; JSON file not found in %s or %s', - profile_id, self._existing_dir, self._new_dir) - return False - - logger.debug('Attempting to delete profile at path: %s', filepath) - os.remove(filepath) - return True From a644bfb4568d5e51a6fccac4bafdfbb19eeed892 Mon Sep 17 00:00:00 2001 From: Mackenzie Grimes - NOAA Affiliate Date: Wed, 18 Dec 2024 11:50:23 -0700 Subject: [PATCH 05/24] move ims_service dir to nwsc_dummy_service --- .../testing/ims_service/src/ims_service.py | 77 ------------------- .../ims_service/test/test_event_store.py | 2 - .../README.md | 2 +- .../__init__.py | 0 .../nwsc_dummy_web_service.py} | 69 +++++++++-------- .../profiles/nwsc_test_response_1.json} | 0 .../profiles/nwsc_test_response_2.json} | 0 .../profiles/nwsc_test_response_3.json} | 0 .../src/__init__.py | 0 .../src/profile_store.py | 67 ++++++++-------- .../test/__init__.py | 0 .../test/test_profile_store.py | 2 + 12 files changed, 76 insertions(+), 143 deletions(-) delete mode 100644 python/idsse/testing/ims_service/src/ims_service.py delete mode 100644 python/idsse/testing/ims_service/test/test_event_store.py rename python/{idsse/testing/ims_service => nwsc_dummy_service}/README.md (98%) rename python/{idsse/testing/ims_service => nwsc_dummy_service}/__init__.py (100%) rename python/{idsse/testing/ims_service/proxy_web_service.py => nwsc_dummy_service/nwsc_dummy_web_service.py} (63%) rename python/{idsse/testing/ims_service/profiles/ims_test_1.json => nwsc_dummy_service/profiles/nwsc_test_response_1.json} (100%) rename python/{idsse/testing/ims_service/profiles/ims_test_2.json => nwsc_dummy_service/profiles/nwsc_test_response_2.json} (100%) rename python/{idsse/testing/ims_service/profiles/ims_test_3.json => nwsc_dummy_service/profiles/nwsc_test_response_3.json} (100%) rename python/{idsse/testing/ims_service => nwsc_dummy_service}/src/__init__.py (100%) rename python/{idsse/testing/ims_service => nwsc_dummy_service}/src/profile_store.py (74%) rename python/{idsse/testing/ims_service => nwsc_dummy_service}/test/__init__.py (100%) create mode 100644 python/nwsc_dummy_service/test/test_profile_store.py diff --git a/python/idsse/testing/ims_service/src/ims_service.py b/python/idsse/testing/ims_service/src/ims_service.py deleted file mode 100644 index 217860f..0000000 --- a/python/idsse/testing/ims_service/src/ims_service.py +++ /dev/null @@ -1,77 +0,0 @@ -"""Test service for ims_gateway services""" -# ---------------------------------------------------------------------------------- -# Created on Fri Apr 07 2023 -# -# Copyright (c) 2023 Colorado State University. All rights reserved. (1) -# -# Contributors: -# Paul Hamer (1) -# -# ---------------------------------------------------------------------------------- -# pylint: disable=missing-function-docstring,redefined-outer-name,protected-access -# pylint: disable=unused-argument, disable=duplicate-code -import json - -from glob import glob -from os import path - -from flask import Flask, request, jsonify - -app = Flask(__name__) -app.config['GSL_KEY'] = '8209c979-e3de-402e-a1f5-556d650ab889' - -# The joined profiles from the JSON examples... -ims_request = {'errors': [], 'profiles': []} - - -@app.route('/all-events', methods=['GET']) -def profiles(): - # First check for the key argument and that it matches the expected value... - if request.headers.get("X-Api-Key") != app.config['GSL_KEY']: - return jsonify({"message": "ERROR: Unauthorized"}), 401 - - if len(request.args.keys()) != 1 or request.args.get('dataSource') != 'NBM': - # add one more check for ANY (currently IMS Gateway Request is using 'ANY') - if request.args.get('dataSource') != 'ANY': - return jsonify({"message": "Bad Request : Invalid argument!"}), 400 - - # Return the profiles... - return jsonify(ims_request) - - -@app.route('/ims-response', methods=['POST']) -def response(): - # First check for the key argument and that it matches the expected value... - if request.headers.get("X-Api-Key") != app.config['GSL_KEY']: - return jsonify({"message": "ERROR: Unauthorized"}), 401 - - data = request.get_json() # Assumes the incoming data is in JSON format - print("Received POST request with data:", data) - - # Process the data or perform any desired actions - return jsonify({"message": "POST request received successfully!"}) - - -if __name__ == '__main__': - # Load the canned profiles from the resources directory into a single dictionary to form - # one JSON response when queried by the IMS_request service. - profile_dir = path.join(path.dirname(__file__), '..', 'profiles') - json_files = [ - path.join(profile_dir, file) - for file in glob('*.json', root_dir=profile_dir) - ] - - print('Loading canned support profiles from:', json_files) - # json_files = sorted(glob('../profiles/*.json')) - for json_file in json_files: - with open(json_file, 'r', encoding="utf-8") as jf: - profile = json.load(jf) - # print(profile) - for err in profile['errors']: - ims_request['errors'].append(err) - for pro in profile['profiles']: - ims_request['profiles'].append(pro) - # ims_request = ims_request | {os.path.basename(json_file).strip('.json') : profile} - - # host=0.0.0.0 is required for flask to work properly in docker and k8s env - app.run(host='0.0.0.0', port=5000) diff --git a/python/idsse/testing/ims_service/test/test_event_store.py b/python/idsse/testing/ims_service/test/test_event_store.py deleted file mode 100644 index 6c8fe9b..0000000 --- a/python/idsse/testing/ims_service/test/test_event_store.py +++ /dev/null @@ -1,2 +0,0 @@ -"""Tests for src/event_store.py""" -pass diff --git a/python/idsse/testing/ims_service/README.md b/python/nwsc_dummy_service/README.md similarity index 98% rename from python/idsse/testing/ims_service/README.md rename to python/nwsc_dummy_service/README.md index ed97268..9c48d49 100644 --- a/python/idsse/testing/ims_service/README.md +++ b/python/nwsc_dummy_service/README.md @@ -73,5 +73,5 @@ The most common way to get python dependencies installed is to use either [conda Lastly, `cd` to the `./python` directory, and start the relevant service. For example, for Request Service: ```sh -python3 proxy_service.py +python3 nwsc_dummy_web_service.py ``` diff --git a/python/idsse/testing/ims_service/__init__.py b/python/nwsc_dummy_service/__init__.py similarity index 100% rename from python/idsse/testing/ims_service/__init__.py rename to python/nwsc_dummy_service/__init__.py diff --git a/python/idsse/testing/ims_service/proxy_web_service.py b/python/nwsc_dummy_service/nwsc_dummy_web_service.py similarity index 63% rename from python/idsse/testing/ims_service/proxy_web_service.py rename to python/nwsc_dummy_service/nwsc_dummy_web_service.py index fe15067..22f8fc7 100644 --- a/python/idsse/testing/ims_service/proxy_web_service.py +++ b/python/nwsc_dummy_service/nwsc_dummy_web_service.py @@ -1,4 +1,4 @@ -"""Proxy web service simulating behaviors of NWS Connect core services""" +"""Dummy web service simulating behaviors of NWS Connect core services""" # ---------------------------------------------------------------------------------- # Created on Fri Apr 07 2023 # @@ -9,13 +9,13 @@ # Mackenzie Grimes (1) # # ---------------------------------------------------------------------------------- -# pylint: disable=too-few-public-methods +import os from datetime import datetime, UTC from argparse import ArgumentParser, Namespace from flask import Flask, current_app, request, jsonify -from src.event_store import EventStore +from src.profile_store import ProfileStore # constants GSL_KEY = '8209c979-e3de-402e-a1f5-556d650ab889' @@ -29,6 +29,7 @@ def to_iso(date_time: datetime) -> str: else date_time.strftime("%Z")[3:]) +# pylint: disable=too-few-public-methods class HealthRoute: """Handle requests to /health endpoint""" def __init__(self): @@ -44,12 +45,12 @@ def handler(self): class EventsRoute: - """Handle requests to /events endpoint""" + """Handle requests to /all-events endpoint""" def __init__(self, base_dir: str): - self.event_store = EventStore(base_dir) + self.profile_store = ProfileStore(base_dir) def handler(self): - """Logic for requests to /events""" + """Logic for requests to /all-events""" # check that this request has proper key to get or add data if request.headers.get('X-Api-Key') != GSL_KEY: return jsonify({'message': 'ERROR: Unauthorized'}), 401 @@ -57,33 +58,37 @@ def handler(self): if request.method == 'POST': # request is saving new Support Profile event request_body: dict = request.json - event_id = self.event_store.save(request_body) # TODO: handle failure? - - return jsonify({'message': f'Event {event_id} saved'}), 201 + profile_id = self.profile_store.save(request_body) # TODO: handle failure? + return jsonify({'message': f'Profile {profile_id} saved'}), 201 if request.method == 'DELETE': - event_id = request.args.get('uuid', default=None, type=str) - self.event_store.delete(event_id) # TODO: handle failure? - - return jsonify({'message': f'Event {event_id} deleted'}), 204 + profile_id = request.args.get('uuid', default=None, type=str) + self.profile_store.delete(profile_id) # TODO: handle failure? + return jsonify({'message': f'Profile {profile_id} deleted'}), 204 # otherwise, must be 'GET' operation - event_status = request.args.get('status', default='existing', type=str) - if event_status == 'existing': - events = self.event_store.get_all() - return jsonify({'events': events}), 200 - - if event_status == 'new': - new_events = self.event_store.get_all(filter_new_profiles=True) - # update EventStore to label all queried events as no longer "new"; + data_source = request.args.get('dataSource', None, type='str') + if data_source != 'NBM': + return jsonify({'profiles': [], 'errors': [f'Invalid dataSource: {data_source}']}), 400 + + profile_status = request.args.get('status', default='existing', type=str) + if profile_status == 'existing': + profiles = self.profile_store.get_all() + + if profile_status == 'new': + profiles = self.profile_store.get_all(filter_new_profiles=True) + # update ProfileStore to label all queried events as no longer "new"; # they've now been returned to IDSS Engine clients at least once - current_app.logger.info('Got all new events: %s', new_events) - for event in new_events: - self.event_store.move_to_existing(event['id']) - return jsonify({'events': new_events}), 200 + current_app.logger.info('Got all new profiles: %s', profiles) + for profile in profiles: + self.profile_store.move_to_existing(profile['id']) + + else: + # status query param should have been 'existing' or 'new' + return jsonify({'message': f'Invalid profile status: {profile_status}'}), 400 + + return jsonify({'profiles': profiles, 'errors': []}), 200 - # status query param should have been 'existing' or 'new' - return jsonify({'message': f'Invalid event status: {event_status}'}), 400 class AppWrapper: @@ -97,7 +102,7 @@ def __init__(self, base_dir: str): self.app.add_url_rule('/health', 'health', view_func=health_route.handler, methods=['GET']) - self.app.add_url_rule('/events', 'events', + self.app.add_url_rule('/all-events', 'events', view_func=events_route.handler, methods=['GET', 'POST', 'DELETE']) @@ -109,8 +114,7 @@ def run(self, **kwargs): def create_app(args: Namespace = None) -> Flask: """Create a Flask instance""" base_dir = args.base_dir - _wrapper = AppWrapper(base_dir) - return _wrapper.app + return AppWrapper(base_dir).app if __name__ == '__main__': @@ -126,4 +130,7 @@ def create_app(args: Namespace = None) -> Flask: # host=0.0.0.0 is required for flask to work properly in docker and k8s env app.run(host='0.0.0.0', port=_args.port) -# TODO: gunicorn runtime +elif 'gunicorn' in os.getenv('SERVER_SOFTWARE', default=''): # pragma: no cover + # default to current directory + _base_dir = os.getenv('BASE_DIR', os.getcwd()) + app = AppWrapper(_base_dir).app diff --git a/python/idsse/testing/ims_service/profiles/ims_test_1.json b/python/nwsc_dummy_service/profiles/nwsc_test_response_1.json similarity index 100% rename from python/idsse/testing/ims_service/profiles/ims_test_1.json rename to python/nwsc_dummy_service/profiles/nwsc_test_response_1.json diff --git a/python/idsse/testing/ims_service/profiles/ims_test_2.json b/python/nwsc_dummy_service/profiles/nwsc_test_response_2.json similarity index 100% rename from python/idsse/testing/ims_service/profiles/ims_test_2.json rename to python/nwsc_dummy_service/profiles/nwsc_test_response_2.json diff --git a/python/idsse/testing/ims_service/profiles/ims_test_3.json b/python/nwsc_dummy_service/profiles/nwsc_test_response_3.json similarity index 100% rename from python/idsse/testing/ims_service/profiles/ims_test_3.json rename to python/nwsc_dummy_service/profiles/nwsc_test_response_3.json diff --git a/python/idsse/testing/ims_service/src/__init__.py b/python/nwsc_dummy_service/src/__init__.py similarity index 100% rename from python/idsse/testing/ims_service/src/__init__.py rename to python/nwsc_dummy_service/src/__init__.py diff --git a/python/idsse/testing/ims_service/src/profile_store.py b/python/nwsc_dummy_service/src/profile_store.py similarity index 74% rename from python/idsse/testing/ims_service/src/profile_store.py rename to python/nwsc_dummy_service/src/profile_store.py index 1eddd43..6ea29e5 100644 --- a/python/idsse/testing/ims_service/src/profile_store.py +++ b/python/nwsc_dummy_service/src/profile_store.py @@ -12,6 +12,7 @@ import json import logging from glob import glob +from typing import NamedTuple # constants controlling the subdirectory where new vs. existing Profiles are saved NEW_SUBDIR = 'new' @@ -20,6 +21,17 @@ logger = logging.getLogger(__name__) +class CachedProfile(NamedTuple): + """Data class to hold Support Profile's data and metadata (status) + + Args: + data (dict): full JSON data of this Support Profile + is_new (bool): track if Support Profile has ever been processed. Ought to start as True + """ + data: dict + is_new: bool + + class ProfileStore: """Data storage using JSON files on filesystem that simulates CRUD operations""" def __init__(self, base_dir: str): @@ -32,9 +44,9 @@ def __init__(self, base_dir: str): if not os.path.exists(_dir): os.mkdir(_dir) - # cache of JSON data of all Support Profiles, divided into new vs. existing Profiles - self.new_profiles: list[dict] = [] - self.existing_profiles = self._read_existing_profiles() + # cache of JSON data of all Support Profiles, marked as new vs. existing Profiles + self.profile_cache = [CachedProfile(profile, is_new=False) + for profile in self._read_existing_profiles()] def get_all(self, filter_new_profiles = False) -> list[dict]: """Get all Support Profile JSONs persisted in this API, filtering by status='new' @@ -46,9 +58,8 @@ def get_all(self, filter_new_profiles = False) -> list[dict]: returned to IDSS Engine on previous requests (never processed). Default is False (return all existing profiles). """ - if filter_new_profiles: - return self.new_profiles - return self.existing_profiles + return [cached_profile.data for cached_profile in self.profile_cache + if cached_profile.is_new == filter_new_profiles] def save(self, profile: dict) -> str | None: """Persist a new Support Profile Profile to this API @@ -60,13 +71,14 @@ def save(self, profile: dict) -> str | None: profile_id = profile.get('id') # determine the right filepath where JSON data will be written - filepath = os.path.join(self._new_dir , profile_id + '.json') + filepath = os.path.join(self._new_dir, profile_id + '.json') logger.info('Now saving profile to path: %s', filepath) with open(filepath, 'w', encoding='utf-8') as file: json.dump(profile, file) # save JSON to filesystem - self.new_profiles.append(profile) # add profile to in-memory cache + # add profile to in-memory cache + self.profile_cache.append(CachedProfile(profile, is_new=True)) return profile_id def move_to_existing(self, profile_id: str) -> bool: @@ -76,8 +88,16 @@ def move_to_existing(self, profile_id: str) -> bool: Returns: bool: True on success. False if JSON with this profile_id not found on filesystem """ - new_filepath = os.path.join(self._new_dir, profile_id + '.json') + # find the profile data from the new_profiles cache and move it to existing_profiles + cached_profile = next((profile for profile in self.profile_cache + if profile.data['id'] == profile_id), None) + if not cached_profile: + # profile is not in cache; it must not exist + logger.warning('Support Profile %s expected in profile_cache but not found', + profile_id) + return False + new_filepath = os.path.join(self._new_dir, profile_id + '.json') if not os.path.exists(new_filepath): # TODO: why is this thrown when requesting /profiles?status=new ? logger.warning('Attempt to mark as "existing" profile that is not found: %s', @@ -88,24 +108,9 @@ def move_to_existing(self, profile_id: str) -> bool: existing_filepath = os.path.join(self._existing_dir, profile_id + '.json') os.rename(new_filepath, existing_filepath) - # find the profile data from the new_profiles cache and move it to existing_profiles - profile_data = next((profile for profile in self.new_profiles - if profile['id'] == profile_id), None) - if not profile_data: - logger.warning('Support Profile %s expected in new_profiles cache but not found', - profile_id) - - # unexpectedly, profile is not in new_profiles cache; - # recover from this by re-reading the JSON from file, because file must exist - filename = os.path.join(self._existing_dir, profile_id + '.json') - logger.debug('Attempting to read existing profile from path: %s', filename) - with open(filename, 'r', encoding='utf-8') as file: - profile_data = json.load(file) - - # add Profile to existing_profiles cache, and scrub from new_profiles cache - self.existing_profiles.append(profile_data) - self.new_profiles = [profile for profile in self.existing_profiles - if profile['id'] != profile_id] + # find this profile in the cache and change is_new flag to false + cache_index = self.profile_cache.index(cached_profile) + self.profile_cache[cache_index].is_new = False return True @@ -117,12 +122,10 @@ def delete(self, profile_id: str) -> bool: """ logger.info('Deleting profile_id %s', profile_id) filepath = os.path.join(self._existing_dir, profile_id + '.json') - profile_cache = self.existing_profiles # assume profile is in "existing" cache if not os.path.exists(filepath): # profile does not in exist in "existing" subdirectory, maybe its in "new" filepath = os.path.join(self._existing_dir, profile_id + '.json') - profile_cache = self.new_profiles # now assume profile is in "new" cache if not os.path.exists(filepath): logger.warning('Cannot delete profile %s; JSON file not found in %s or %s', @@ -132,9 +135,9 @@ def delete(self, profile_id: str) -> bool: logger.debug('Attempting to delete profile at path: %s', filepath) os.remove(filepath) - # drop profile from new/existing cache (whichever subdirectory it was found in) - profile_cache = [profile for profile in profile_cache if profile['id'] != profile_id] - + # drop profile from cache + self.profile_cache = [cached_profile for cached_profile in self.profile_cache + if cached_profile.data['id'] != profile_id] return True def _read_existing_profiles(self) -> list[dict]: diff --git a/python/idsse/testing/ims_service/test/__init__.py b/python/nwsc_dummy_service/test/__init__.py similarity index 100% rename from python/idsse/testing/ims_service/test/__init__.py rename to python/nwsc_dummy_service/test/__init__.py diff --git a/python/nwsc_dummy_service/test/test_profile_store.py b/python/nwsc_dummy_service/test/test_profile_store.py new file mode 100644 index 0000000..9edd23a --- /dev/null +++ b/python/nwsc_dummy_service/test/test_profile_store.py @@ -0,0 +1,2 @@ +"""Tests for src/profile_store.py""" +pass From f39cd5e31a51ee449b22322baeef233947411904 Mon Sep 17 00:00:00 2001 From: Mackenzie Grimes - NOAA Affiliate Date: Wed, 18 Dec 2024 12:36:37 -0700 Subject: [PATCH 06/24] load IMS responses as individual support profiles on startup, NamedTuple -> dataclass --- docker/mockims/dev/Dockerfile | 10 ++-- .../nwsc_dummy_web_service.py | 4 +- .../nwsc_dummy_service/src/profile_store.py | 50 ++++++++++++++----- .../profiles/nwsc_test_response_1.json | 0 .../profiles/nwsc_test_response_2.json | 0 .../profiles/nwsc_test_response_3.json | 0 6 files changed, 44 insertions(+), 20 deletions(-) rename python/nwsc_dummy_service/{ => src}/profiles/nwsc_test_response_1.json (100%) rename python/nwsc_dummy_service/{ => src}/profiles/nwsc_test_response_2.json (100%) rename python/nwsc_dummy_service/{ => src}/profiles/nwsc_test_response_3.json (100%) diff --git a/docker/mockims/dev/Dockerfile b/docker/mockims/dev/Dockerfile index 396001c..3113dc6 100644 --- a/docker/mockims/dev/Dockerfile +++ b/docker/mockims/dev/Dockerfile @@ -8,17 +8,17 @@ LABEL maintainer ${maintainer} #RUN conda config --add channels conda-forge && \ # conda install -y flask=2.3.2 -WORKDIR /python/ims_service +WORKDIR /python/nwsc_dummy_service # Copy source files -COPY ./python/idsse/testing/ims_service/src/ims_service.py /python/ims_service/ +COPY ./python/nwsc_dummy_service/*.py /python/nwsc_dummy_service/ -# (TEMPORARY) Copy canned criteria files. To be removed when integration with IMS API exists -COPY ./python/idsse/testing/ims_service/profiles/*.json /python/profiles/ +# (TEMPORARY) Copy canned criteria files. To be removed when integration with NWS Connect API exists +COPY ./python/nwsc_dummy_service/src/profiles/*.json /python/profiles/ # The volume mapping here is kind of strange for k8s deployment, because if we map an empty volume to /criteria # then the temp copy of json above will get blown away by the volume mapping...just omit it for k8s deployment # for now. #VOLUME /python/profiles -ENTRYPOINT [ "python3", "/python/ims_service/ims_service.py" ] +ENTRYPOINT [ "python3", "/python/nwsc_dummy_service/nwsc_dummy_web_service.py", "--base_dir", "/python/profiles"] diff --git a/python/nwsc_dummy_service/nwsc_dummy_web_service.py b/python/nwsc_dummy_service/nwsc_dummy_web_service.py index 22f8fc7..e468351 100644 --- a/python/nwsc_dummy_service/nwsc_dummy_web_service.py +++ b/python/nwsc_dummy_service/nwsc_dummy_web_service.py @@ -67,7 +67,7 @@ def handler(self): return jsonify({'message': f'Profile {profile_id} deleted'}), 204 # otherwise, must be 'GET' operation - data_source = request.args.get('dataSource', None, type='str') + data_source = request.args.get('dataSource', None, type=str) if data_source != 'NBM': return jsonify({'profiles': [], 'errors': [f'Invalid dataSource: {data_source}']}), 400 @@ -75,7 +75,7 @@ def handler(self): if profile_status == 'existing': profiles = self.profile_store.get_all() - if profile_status == 'new': + elif profile_status == 'new': profiles = self.profile_store.get_all(filter_new_profiles=True) # update ProfileStore to label all queried events as no longer "new"; # they've now been returned to IDSS Engine clients at least once diff --git a/python/nwsc_dummy_service/src/profile_store.py b/python/nwsc_dummy_service/src/profile_store.py index 6ea29e5..ec836e5 100644 --- a/python/nwsc_dummy_service/src/profile_store.py +++ b/python/nwsc_dummy_service/src/profile_store.py @@ -11,8 +11,8 @@ import os import json import logging +from dataclasses import dataclass #, field from glob import glob -from typing import NamedTuple # constants controlling the subdirectory where new vs. existing Profiles are saved NEW_SUBDIR = 'new' @@ -21,8 +21,10 @@ logger = logging.getLogger(__name__) -class CachedProfile(NamedTuple): - """Data class to hold Support Profile's data and metadata (status) + +@dataclass +class CachedProfile: + """Data class to hold Support Profile's data and metadata ("new" vs "existing" status) Args: data (dict): full JSON data of this Support Profile @@ -31,6 +33,11 @@ class CachedProfile(NamedTuple): data: dict is_new: bool + @property + def id(self) -> str: + """The Support Profile UUID""" + return self.data.get('id') + class ProfileStore: """Data storage using JSON files on filesystem that simulates CRUD operations""" @@ -44,9 +51,26 @@ def __init__(self, base_dir: str): if not os.path.exists(_dir): os.mkdir(_dir) + # load any NWS Connect response files dumped into the base_dir + for response_filename in glob('*.json', root_dir=self._base_dir): + response_filepath = os.path.join(self._base_dir, response_filename) + logger.warning('Loading profiles from raw API response file: %s', response_filepath) + + with open(response_filepath, 'r', encoding='utf-8') as infile: + data: dict = json.load(infile) + + # loop through all profiles in this file, + # save them to "existing" directory as individual profiles + for profile in data.get('profiles', []): + profile_filepath = os.path.join(self._existing_dir, f'{profile["id"]}.json') + logger.info('Saving existing profile to file: %s', profile_filepath) + + with open(profile_filepath, 'w', encoding='utf-8') as outfile: + json.dump(profile, outfile) + # cache of JSON data of all Support Profiles, marked as new vs. existing Profiles self.profile_cache = [CachedProfile(profile, is_new=False) - for profile in self._read_existing_profiles()] + for profile in self._load_existing_profiles()] def get_all(self, filter_new_profiles = False) -> list[dict]: """Get all Support Profile JSONs persisted in this API, filtering by status='new' @@ -71,7 +95,7 @@ def save(self, profile: dict) -> str | None: profile_id = profile.get('id') # determine the right filepath where JSON data will be written - filepath = os.path.join(self._new_dir, profile_id + '.json') + filepath = os.path.join(self._new_dir, f'{profile_id}.json') logger.info('Now saving profile to path: %s', filepath) with open(filepath, 'w', encoding='utf-8') as file: @@ -97,7 +121,7 @@ def move_to_existing(self, profile_id: str) -> bool: profile_id) return False - new_filepath = os.path.join(self._new_dir, profile_id + '.json') + new_filepath = os.path.join(self._new_dir, f'{profile_id}.json') if not os.path.exists(new_filepath): # TODO: why is this thrown when requesting /profiles?status=new ? logger.warning('Attempt to mark as "existing" profile that is not found: %s', @@ -105,12 +129,12 @@ def move_to_existing(self, profile_id: str) -> bool: return False # move the JSON file from the "new" to the "existing" directory and update cache - existing_filepath = os.path.join(self._existing_dir, profile_id + '.json') + existing_filepath = os.path.join(self._existing_dir, f'{profile_id}.json') os.rename(new_filepath, existing_filepath) - # find this profile in the cache and change is_new flag to false - cache_index = self.profile_cache.index(cached_profile) - self.profile_cache[cache_index].is_new = False + # update this profile's is_new flag in in-memory cache + profile_index = self.profile_cache.index(cached_profile) + self.profile_cache[profile_index].is_new = False return True @@ -121,11 +145,11 @@ def delete(self, profile_id: str) -> bool: bool: True on success """ logger.info('Deleting profile_id %s', profile_id) - filepath = os.path.join(self._existing_dir, profile_id + '.json') + filepath = os.path.join(self._existing_dir, f'{profile_id}.json') if not os.path.exists(filepath): # profile does not in exist in "existing" subdirectory, maybe its in "new" - filepath = os.path.join(self._existing_dir, profile_id + '.json') + filepath = os.path.join(self._new_dir, f'{profile_id}.json') if not os.path.exists(filepath): logger.warning('Cannot delete profile %s; JSON file not found in %s or %s', @@ -140,7 +164,7 @@ def delete(self, profile_id: str) -> bool: if cached_profile.data['id'] != profile_id] return True - def _read_existing_profiles(self) -> list[dict]: + def _load_existing_profiles(self) -> list[dict]: """Read all JSON files from this ProfileStore's `existing` subdirectory""" logger.info('Loading existing Support Profiles from path: %s', self._existing_dir) diff --git a/python/nwsc_dummy_service/profiles/nwsc_test_response_1.json b/python/nwsc_dummy_service/src/profiles/nwsc_test_response_1.json similarity index 100% rename from python/nwsc_dummy_service/profiles/nwsc_test_response_1.json rename to python/nwsc_dummy_service/src/profiles/nwsc_test_response_1.json diff --git a/python/nwsc_dummy_service/profiles/nwsc_test_response_2.json b/python/nwsc_dummy_service/src/profiles/nwsc_test_response_2.json similarity index 100% rename from python/nwsc_dummy_service/profiles/nwsc_test_response_2.json rename to python/nwsc_dummy_service/src/profiles/nwsc_test_response_2.json diff --git a/python/nwsc_dummy_service/profiles/nwsc_test_response_3.json b/python/nwsc_dummy_service/src/profiles/nwsc_test_response_3.json similarity index 100% rename from python/nwsc_dummy_service/profiles/nwsc_test_response_3.json rename to python/nwsc_dummy_service/src/profiles/nwsc_test_response_3.json From c72c42ad35f58fbf958872e672cb9845f65d00e4 Mon Sep 17 00:00:00 2001 From: Mackenzie Grimes - NOAA Affiliate Date: Wed, 18 Dec 2024 12:42:47 -0700 Subject: [PATCH 07/24] add .id attribute to CachedProfile --- .../nwsc_dummy_service/src/profile_store.py | 20 +++++++++---------- 1 file changed, 9 insertions(+), 11 deletions(-) diff --git a/python/nwsc_dummy_service/src/profile_store.py b/python/nwsc_dummy_service/src/profile_store.py index ec836e5..a389b5b 100644 --- a/python/nwsc_dummy_service/src/profile_store.py +++ b/python/nwsc_dummy_service/src/profile_store.py @@ -11,7 +11,7 @@ import os import json import logging -from dataclasses import dataclass #, field +from dataclasses import dataclass from glob import glob # constants controlling the subdirectory where new vs. existing Profiles are saved @@ -92,18 +92,17 @@ def save(self, profile: dict) -> str | None: str | None: UUID of saved Support Profile on success, otherwise None """ logger.debug('Now saving new profile: %s', profile) - profile_id = profile.get('id') - - # determine the right filepath where JSON data will be written - filepath = os.path.join(self._new_dir, f'{profile_id}.json') + cached_profile = CachedProfile(profile, is_new=True) + # save Profile JSON to filesystem + filepath = os.path.join(self._new_dir, f'{cached_profile.id}.json') logger.info('Now saving profile to path: %s', filepath) with open(filepath, 'w', encoding='utf-8') as file: - json.dump(profile, file) # save JSON to filesystem + json.dump(profile, file) # add profile to in-memory cache - self.profile_cache.append(CachedProfile(profile, is_new=True)) - return profile_id + self.profile_cache.append(cached_profile) + return cached_profile.id def move_to_existing(self, profile_id: str) -> bool: """Mark a formerly "new" Support Profile as "existing", a.k.a. has been returned in @@ -114,7 +113,7 @@ def move_to_existing(self, profile_id: str) -> bool: """ # find the profile data from the new_profiles cache and move it to existing_profiles cached_profile = next((profile for profile in self.profile_cache - if profile.data['id'] == profile_id), None) + if profile.id == profile_id), None) if not cached_profile: # profile is not in cache; it must not exist logger.warning('Support Profile %s expected in profile_cache but not found', @@ -123,7 +122,6 @@ def move_to_existing(self, profile_id: str) -> bool: new_filepath = os.path.join(self._new_dir, f'{profile_id}.json') if not os.path.exists(new_filepath): - # TODO: why is this thrown when requesting /profiles?status=new ? logger.warning('Attempt to mark as "existing" profile that is not found: %s', new_filepath) return False @@ -161,7 +159,7 @@ def delete(self, profile_id: str) -> bool: # drop profile from cache self.profile_cache = [cached_profile for cached_profile in self.profile_cache - if cached_profile.data['id'] != profile_id] + if cached_profile.id != profile_id] return True def _load_existing_profiles(self) -> list[dict]: From 4b73ec570931eb95dc1bc5bace1a42dcb82ca8a0 Mon Sep 17 00:00:00 2001 From: Mackenzie Grimes - NOAA Affiliate Date: Wed, 18 Dec 2024 12:46:37 -0700 Subject: [PATCH 08/24] drop ims_service from setup.py, update docker/local/Dockerfile --- docker/mockims/dev/Dockerfile | 2 +- docker/mockims/local/Dockerfile | 19 ++++++++++--------- python/setup.py | 1 - 3 files changed, 11 insertions(+), 11 deletions(-) diff --git a/docker/mockims/dev/Dockerfile b/docker/mockims/dev/Dockerfile index 3113dc6..b8fe1fd 100644 --- a/docker/mockims/dev/Dockerfile +++ b/docker/mockims/dev/Dockerfile @@ -1,4 +1,4 @@ -# IMS Gateway Request Service using python commons base image +# NWSC Dummy Service using python commons base image FROM ghcr.io/noaa-gsl/idss/commons/python/python-base:main ARG maintainer diff --git a/docker/mockims/local/Dockerfile b/docker/mockims/local/Dockerfile index b9bcf07..7e1f4aa 100644 --- a/docker/mockims/local/Dockerfile +++ b/docker/mockims/local/Dockerfile @@ -1,5 +1,4 @@ -# IMS Gateway Request Service using python sci base image -# sci is required because the use of shapely +# NWSC Dummy Service using python commons base image FROM idss.engine.commons.python-base:local ARG maintainer @@ -7,17 +6,19 @@ LABEL maintainer ${maintainer} # Install additional dependencies #RUN conda config --add channels conda-forge && \ -# conda install -y =<1.0.0> +# conda install -y flask=2.3.2 -WORKDIR /python/ims_service +WORKDIR /python/nwsc_dummy_service # Copy source files -COPY ./python/idsse/testing/ims_service/src/ims_service.py /python/ims_service/ +COPY ./python/nwsc_dummy_service/*.py /python/nwsc_dummy_service/ -# (TEMPORARY) Copy canned criteria files. To be removed when integration with IMS API exists -COPY ./python/idsse/testing/ims_service/profiles/*.json /python/profiles/ +# (TEMPORARY) Copy canned criteria files. To be removed when integration with NWS Connect API exists +COPY ./python/nwsc_dummy_service/src/profiles/*.json /python/profiles/ -# If you need to have a local mount, otherwise service will use version controlled jsons +# The volume mapping here is kind of strange for k8s deployment, because if we map an empty volume to /criteria +# then the temp copy of json above will get blown away by the volume mapping...just omit it for k8s deployment +# for now. #VOLUME /python/profiles -ENTRYPOINT [ "python3", "/python/ims_service/ims_service.py" ] \ No newline at end of file +ENTRYPOINT [ "python3", "/python/nwsc_dummy_service/nwsc_dummy_web_service.py", "--base_dir", "/python/profiles"] diff --git a/python/setup.py b/python/setup.py index 11cad14..ed24afc 100644 --- a/python/setup.py +++ b/python/setup.py @@ -29,7 +29,6 @@ def package_files(directory): 'idsse.testing.idsse_common', 'idsse.testing.ims_request', 'idsse.testing.ims_response', - 'idsse.testing.ims_service', 'idsse.testing.nwsc_gateway', 'idsse.testing.risk_processor', 'idsse.testing.risk_processor.binghamton', From caa722c62fc5eae0aaf59b4964c137e96b3b6038 Mon Sep 17 00:00:00 2001 From: Mackenzie Grimes - NOAA Affiliate Date: Wed, 18 Dec 2024 16:00:01 -0700 Subject: [PATCH 09/24] add test_profile_store unit tests --- .../nwsc_dummy_service/src/profile_store.py | 45 ++++-- python/nwsc_dummy_service/test/__init__.py | 3 + .../test/nwsc_dummy_integration_test.py | 11 ++ .../test/test_profile_store.py | 145 +++++++++++++++++- 4 files changed, 188 insertions(+), 16 deletions(-) create mode 100644 python/nwsc_dummy_service/test/nwsc_dummy_integration_test.py diff --git a/python/nwsc_dummy_service/src/profile_store.py b/python/nwsc_dummy_service/src/profile_store.py index a389b5b..feceaa9 100644 --- a/python/nwsc_dummy_service/src/profile_store.py +++ b/python/nwsc_dummy_service/src/profile_store.py @@ -48,8 +48,7 @@ def __init__(self, base_dir: str): # ensure that base directory and all expected subdirectories exist for _dir in [self._base_dir, self._new_dir, self._existing_dir]: - if not os.path.exists(_dir): - os.mkdir(_dir) + os.makedirs(_dir, exist_ok=True) # load any NWS Connect response files dumped into the base_dir for response_filename in glob('*.json', root_dir=self._base_dir): @@ -68,9 +67,13 @@ def __init__(self, base_dir: str): with open(profile_filepath, 'w', encoding='utf-8') as outfile: json.dump(profile, outfile) - # cache of JSON data of all Support Profiles, marked as new vs. existing Profiles - self.profile_cache = [CachedProfile(profile, is_new=False) - for profile in self._load_existing_profiles()] + # populate cache of JSON data of all Support Profiles, marked as new vs. existing + existing_profiles = [CachedProfile(profile, is_new=False) + for profile in self._load_profiles_from_filesystem(self._existing_dir)] + new_profiles = [CachedProfile(profile, is_new=True) + for profile in self._load_profiles_from_filesystem(self._new_dir)] + + self.profile_cache = existing_profiles + new_profiles def get_all(self, filter_new_profiles = False) -> list[dict]: """Get all Support Profile JSONs persisted in this API, filtering by status='new' @@ -79,8 +82,8 @@ def get_all(self, filter_new_profiles = False) -> list[dict]: Args: filter_new_profiles (bool): if True, get only Support Profiles that have never been - returned to IDSS Engine on previous requests (never processed). Default is False - (return all existing profiles). + returned to IDSS Engine on previous requests (never processed). Default is False: + return all existing profiles. """ return [cached_profile.data for cached_profile in self.profile_cache if cached_profile.is_new == filter_new_profiles] @@ -92,6 +95,14 @@ def save(self, profile: dict) -> str | None: str | None: UUID of saved Support Profile on success, otherwise None """ logger.debug('Now saving new profile: %s', profile) + + # if profile ID is already in the cache, reject this save + existing_profile = next(((cached_obj for cached_obj in self.profile_cache + if cached_obj.id == profile.get('id'))), None) + if existing_profile: + logger.warning('Cannot save profile; already exists %s', existing_profile.id) + return None + cached_profile = CachedProfile(profile, is_new=True) # save Profile JSON to filesystem @@ -162,18 +173,22 @@ def delete(self, profile_id: str) -> bool: if cached_profile.id != profile_id] return True - def _load_existing_profiles(self) -> list[dict]: - """Read all JSON files from this ProfileStore's `existing` subdirectory""" - logger.info('Loading existing Support Profiles from path: %s', self._existing_dir) + def _load_profiles_from_filesystem(self, dir_: str) -> list[dict]: + """Read all JSON files from one of this ProfileStore's subdirectories, and return list of + the discovered files' json data. + + Args: + dir_ (str): path to scan for Support Profile or NWS Connect API response JSON files + """ + logger.info('Loading Support Profiles JSON files from path: %s', dir_) profile_list: list[dict] = [] - for filename in glob('*.json', root_dir=self._existing_dir): - with open(os.path.join(self._existing_dir, filename), 'r', encoding='utf-8') as file: + for filename in glob('*.json', root_dir=dir_): + with open(os.path.join(dir_, filename), 'r', encoding='utf-8') as file: json_data: dict = json.load(file) - # if this is a pure NWS Connect response, profile will be nested inside `profiles` + # if this is a pure NWS Connect response, profile data is nested inside `profiles` if profiles := json_data.get('profiles', None) and isinstance(profiles, list): - for profile in profiles: - profile_list.append(profile) + profile_list.extend(profiles) else: # this file is assumed to be just a Support Profile profile_list.append(json_data) diff --git a/python/nwsc_dummy_service/test/__init__.py b/python/nwsc_dummy_service/test/__init__.py index e69de29..a8379f2 100644 --- a/python/nwsc_dummy_service/test/__init__.py +++ b/python/nwsc_dummy_service/test/__init__.py @@ -0,0 +1,3 @@ +"""Add nwsc_dummy_service top-level module to sys.path""" +import sys +sys.path.append('..') diff --git a/python/nwsc_dummy_service/test/nwsc_dummy_integration_test.py b/python/nwsc_dummy_service/test/nwsc_dummy_integration_test.py new file mode 100644 index 0000000..8bdb24e --- /dev/null +++ b/python/nwsc_dummy_service/test/nwsc_dummy_integration_test.py @@ -0,0 +1,11 @@ +"""Integration test for NWS Connect Dummy Web Service""" +# ---------------------------------------------------------------------------------- +# Created on Wed Dec 18 2024 +# +# Copyright (c) 2024 Colorado State University. All rights reserved. (1) +# +# Contributors: +# Mackenzie Grimes (1) +# +# ---------------------------------------------------------------------------------- +pass diff --git a/python/nwsc_dummy_service/test/test_profile_store.py b/python/nwsc_dummy_service/test/test_profile_store.py index 9edd23a..dc450a8 100644 --- a/python/nwsc_dummy_service/test/test_profile_store.py +++ b/python/nwsc_dummy_service/test/test_profile_store.py @@ -1,2 +1,145 @@ """Tests for src/profile_store.py""" -pass +# ---------------------------------------------------------------------------------- +# Created on Wed Dec 18 2024 +# +# Copyright (c) 2024 Colorado State University. All rights reserved. (1) +# +# Contributors: +# Mackenzie Grimes (1) +# +# ---------------------------------------------------------------------------------- +# pylint: disable=missing-function-docstring,redefined-outer-name +import json +import os +import shutil +from copy import deepcopy +from glob import glob + +from pytest import fail, fixture + +from nwsc_dummy_service.src.profile_store import (CachedProfile, ProfileStore, + NEW_SUBDIR, EXISTING_SUBDIR) + +# constants +STORE_BASE_DIR = os.path.join(os.path.dirname(__file__), 'temp') +RAW_JSON_PATH = os.path.join(os.path.dirname(__file__), '..', 'src', 'profiles') + +EXAMPLE_UUID = '9835b194-74de-4321-aa6b-d769972dc7cb' + +with open(os.path.join(RAW_JSON_PATH, 'nwsc_test_response_1.json'), 'r', encoding='utf-8') as f: + EXAMPLE_SUPPORT_PROFILE: dict = json.load(f) + + +def _empty_directory(dir_path: str): + for filename in os.listdir(dir_path): + filepath = os.path.join(dir_path, filename) + if os.path.isdir(filepath): + if len(os.listdir(filepath)) > 0: + _empty_directory(filepath) # recursively delete child directories + os.rmdir(filepath) + else: + os.remove(filepath) + + +# fixtures +def startup(): + """Runs before each test is executed. Create test resource file structure""" + os.makedirs(STORE_BASE_DIR, exist_ok=True) + _empty_directory(STORE_BASE_DIR) # delete any existing files/directories + + # copy all JSON files from ../src/profiles/ to the ProfileStore's base dir + for response_file in glob('*.json', root_dir=RAW_JSON_PATH): + shutil.copy(os.path.join(RAW_JSON_PATH, response_file), STORE_BASE_DIR) + + +def teardown(): + """Clean up any files/directories created during test""" + _empty_directory(STORE_BASE_DIR) + os.rmdir(STORE_BASE_DIR) + + +@fixture(autouse=True) +def startup_and_teardown(): + startup() + yield # run test + teardown() + + +@fixture +def store(): + return ProfileStore(STORE_BASE_DIR) + +# tests +def test_profile_store_loads_api_responses(store: ProfileStore): + assert sorted([c.id for c in store.profile_cache]) == [ + 'a08370c6-ab87-4808-bd51-a8597e58410d', + 'e1033860-f198-4c6a-a91b-beaec905132f', + 'fd35adec-d2a0-49a9-a320-df20a7b6d681', + ] + + for cache_obj in store.profile_cache: + # should have loaded all profiles as status "existing", file should exist in that subdir + assert not cache_obj.is_new + filepath = os.path.join(STORE_BASE_DIR, EXISTING_SUBDIR, f'{cache_obj.id}.json') + assert os.path.exists(filepath) + + # new directory should be empty to begin with + assert os.listdir(os.path.join(STORE_BASE_DIR, NEW_SUBDIR)) == [] + + +def test_get_all_profiles(store: ProfileStore): + result = store.get_all() + assert len(result) == 3 + + result = store.get_all(filter_new_profiles=True) + assert len(result) == 0 + + +def test_save_adds_to_new_profiles(store: ProfileStore): + new_profile = deepcopy(EXAMPLE_SUPPORT_PROFILE) + new_profile['id'] = EXAMPLE_UUID + + new_profile_id = store.save(new_profile) + + assert new_profile_id == EXAMPLE_UUID + # profile should now be returned by get() request for new profiles + new_profile_list = store.get_all(filter_new_profiles=True) + assert [p.get('id') for p in new_profile_list] == [EXAMPLE_UUID] + + # profile should not be returned by get() request for existing profiles + existing_profile_list = store.get_all() + assert EXAMPLE_UUID not in [p.get('id') for p in existing_profile_list] + + # file should exist in the "new" subdirectory + assert os.path.exists(os.path.join(STORE_BASE_DIR, NEW_SUBDIR, f'{new_profile_id}.json')) + + +def test_move_to_existing_success(store: ProfileStore): + new_profile = deepcopy(EXAMPLE_SUPPORT_PROFILE) + new_profile['id'] = EXAMPLE_UUID + store.save(new_profile) + + new_profiles = store.get_all(filter_new_profiles=True) + assert [p['id'] for p in new_profiles] == [EXAMPLE_UUID] + + store.move_to_existing(EXAMPLE_UUID) + + new_profiles = store.get_all(filter_new_profiles=True) + assert new_profiles == [] # Support Profile has vanished from list of new + existing_profiles = store.get_all() + assert EXAMPLE_UUID in [p['id'] for p in existing_profiles] # now Profile is in existing list + + +def test_store_loads_jsons_from_new(store: ProfileStore): + # create a pre-existing "new" profile as well as the 3 "existing" profiles + profile = deepcopy(store.get_all()[0]) + profile['id'] = EXAMPLE_UUID # give copied profile a unique identifier + store.save(profile) + + # simulate starting ProfileStore process fresh, with existing JSONs on filesystem + _new_store = ProfileStore(STORE_BASE_DIR) + + # newly creatd ProfileStore should have correctly loaded and labeled "new" Profile + new_profile_list = _new_store.get_all(filter_new_profiles=True) + assert len(new_profile_list) == 1 + assert len(_new_store.profile_cache) == 4 # 3 existing, 1 new From 7fd21aca0406e3f4b208f68900032632c82caf46 Mon Sep 17 00:00:00 2001 From: Mackenzie Grimes - NOAA Affiliate Date: Wed, 18 Dec 2024 16:00:42 -0700 Subject: [PATCH 10/24] rename nwsc_dummy_web_service to ncd_web_service --- docker/mockims/dev/Dockerfile | 2 +- docker/mockims/local/Dockerfile | 2 +- python/nwsc_dummy_service/README.md | 6 +++--- ..._dummy_web_service.py => ncd_web_service.py} | 17 +++++++++++++---- 4 files changed, 18 insertions(+), 9 deletions(-) rename python/nwsc_dummy_service/{nwsc_dummy_web_service.py => ncd_web_service.py} (87%) diff --git a/docker/mockims/dev/Dockerfile b/docker/mockims/dev/Dockerfile index b8fe1fd..ad2451f 100644 --- a/docker/mockims/dev/Dockerfile +++ b/docker/mockims/dev/Dockerfile @@ -21,4 +21,4 @@ COPY ./python/nwsc_dummy_service/src/profiles/*.json /python/profiles/ # for now. #VOLUME /python/profiles -ENTRYPOINT [ "python3", "/python/nwsc_dummy_service/nwsc_dummy_web_service.py", "--base_dir", "/python/profiles"] +ENTRYPOINT [ "python3", "/python/nwsc_dummy_service/ncd_web_service.py", "--base_dir", "/python/profiles"] diff --git a/docker/mockims/local/Dockerfile b/docker/mockims/local/Dockerfile index 7e1f4aa..6f87305 100644 --- a/docker/mockims/local/Dockerfile +++ b/docker/mockims/local/Dockerfile @@ -21,4 +21,4 @@ COPY ./python/nwsc_dummy_service/src/profiles/*.json /python/profiles/ # for now. #VOLUME /python/profiles -ENTRYPOINT [ "python3", "/python/nwsc_dummy_service/nwsc_dummy_web_service.py", "--base_dir", "/python/profiles"] +ENTRYPOINT [ "python3", "/python/nwsc_dummy_service/ncd_web_service.py", "--base_dir", "/python/profiles"] diff --git a/python/nwsc_dummy_service/README.md b/python/nwsc_dummy_service/README.md index 9c48d49..2c52cf8 100644 --- a/python/nwsc_dummy_service/README.md +++ b/python/nwsc_dummy_service/README.md @@ -1,7 +1,7 @@ # NWS Connect Proxy Service ## Overview --The `proxy-service` is dummy NWS Connect service that simulates storing a set of Support Profiles and serving them up in a simple REST interface. +-The `nwsc-dummy-service` is a web service that simulates storing a set of Support Profiles and serving them up in a simple REST interface. ## Configurations The NWS Connect proxy service should be started as a standalone service and offers two end-points in support of the NWSConnect Gateway request/response services. Those services should be provided with the network address of this services endpoints via their command line arguments for testing purposes. @@ -71,7 +71,7 @@ The most common way to get python dependencies installed is to use either [conda brew install peak/tap/s5cmd ``` -Lastly, `cd` to the `./python` directory, and start the relevant service. For example, for Request Service: +Lastly, `cd` to the `./python/nwsc_dummy_service` directory, and start the NWS Connect Dummy service: ```sh -python3 nwsc_dummy_web_service.py +python3 ncd_web_service.py --base_dir /path/to/some/dir ``` diff --git a/python/nwsc_dummy_service/nwsc_dummy_web_service.py b/python/nwsc_dummy_service/ncd_web_service.py similarity index 87% rename from python/nwsc_dummy_service/nwsc_dummy_web_service.py rename to python/nwsc_dummy_service/ncd_web_service.py index e468351..ae49797 100644 --- a/python/nwsc_dummy_service/nwsc_dummy_web_service.py +++ b/python/nwsc_dummy_service/ncd_web_service.py @@ -1,4 +1,4 @@ -"""Dummy web service simulating behaviors of NWS Connect core services""" +"""NWS Connect Dummy service simulating behaviors of NWS Connect core services""" # ---------------------------------------------------------------------------------- # Created on Fri Apr 07 2023 # @@ -49,6 +49,7 @@ class EventsRoute: def __init__(self, base_dir: str): self.profile_store = ProfileStore(base_dir) + # pylint: disable=too-many-return-statements def handler(self): """Logic for requests to /all-events""" # check that this request has proper key to get or add data @@ -58,12 +59,18 @@ def handler(self): if request.method == 'POST': # request is saving new Support Profile event request_body: dict = request.json - profile_id = self.profile_store.save(request_body) # TODO: handle failure? + profile_id = self.profile_store.save(request_body) + if not profile_id: + return jsonify({'message': f'Profile {request_body.get("id")} already exists'} + ), 400 + return jsonify({'message': f'Profile {profile_id} saved'}), 201 if request.method == 'DELETE': profile_id = request.args.get('uuid', default=None, type=str) - self.profile_store.delete(profile_id) # TODO: handle failure? + is_deleted = self.profile_store.delete(profile_id) + if not is_deleted: + return jsonify({'message': f'Profile {profile_id} not found'}), 404 return jsonify({'message': f'Profile {profile_id} deleted'}), 204 # otherwise, must be 'GET' operation @@ -85,7 +92,9 @@ def handler(self): else: # status query param should have been 'existing' or 'new' - return jsonify({'message': f'Invalid profile status: {profile_status}'}), 400 + return jsonify( + {'profiles': [], 'errors': [f'Invalid profile status: {profile_status}']} + ), 400 return jsonify({'profiles': profiles, 'errors': []}), 200 From 70499772870e96ed147886191e4ad1ce14242bc1 Mon Sep 17 00:00:00 2001 From: Mackenzie Grimes - NOAA Affiliate Date: Wed, 18 Dec 2024 16:16:29 -0700 Subject: [PATCH 11/24] Add unit test GitHub Action, add dummy service to linter --- .github/workflows/linter.yml | 7 +++++ .github/workflows/run-tests.yml | 56 +++++++++++++++++++++++++++++++++ 2 files changed, 63 insertions(+) create mode 100644 .github/workflows/run-tests.yml diff --git a/.github/workflows/linter.yml b/.github/workflows/linter.yml index b0dd658..1d9fcb0 100644 --- a/.github/workflows/linter.yml +++ b/.github/workflows/linter.yml @@ -43,3 +43,10 @@ jobs: - name: Run code linter run: pylint ./python/idsse/testing --max-line-length=120 --recursive true + + - name: Set PYTHONPATH for dummy service + run: | + echo "PYTHONPATH=python/nwsc_dummy_service" >> $GITHUB_ENV + + - name: Run code linter for dummy service + run: pylint ./python/nwsc_dummy_service --max-line-length=120 --recursive true diff --git a/.github/workflows/run-tests.yml b/.github/workflows/run-tests.yml new file mode 100644 index 0000000..2a635a1 --- /dev/null +++ b/.github/workflows/run-tests.yml @@ -0,0 +1,56 @@ +name: Run Pytest + +on: + pull_request: + +jobs: + build: + runs-on: ubuntu-latest + defaults: + run: + shell: bash -el {0} + strategy: + matrix: + python-version: [ "3.11" ] + steps: + - uses: actions/checkout@v3 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + + - name: Install python dependencies + run: | + python -m pip install --upgrade pip + pip install pytest pytest-cov pylint==2.17.5 python-dateutil==2.8.2 + + - name: Checkout idss-engine-commons + uses: actions/checkout@v2 + with: + repository: NOAA-GSL/idss-engine-commons + ref: main + path: commons/ + + - name: Install IDSSE python commons + working-directory: commons/python/idsse_common + run: pip install . + + - name: Set PYTHONPATH for pytest + run: | + echo "PYTHONPATH=python/nwsc_dummy_service" >> $GITHUB_ENV + + - name: Test pytest + working-directory: python/nwsc_dummy_service/test + run: | + set -o pipefail; # exit immediately if pytest fails (tee obfuscates the exit code) + pytest --cov=.. --cov-report=term --junitxml=./pytest.xml | tee ./coverage.txt; + + - name: Pytest coverage comment + if: ${{ github.ref == 'refs/heads/main' }} + id: coverageComment + uses: MishaKav/pytest-coverage-comment@main + with: + hide-comment: true + badge-title: Coverage + title: Report + pytest-coverage-path: python/nwsc_dummy_service/test/coverage.txt From e79aa312c26df105f7142cd1b3531d3897292470 Mon Sep 17 00:00:00 2001 From: Mackenzie Grimes - NOAA Affiliate Date: Wed, 18 Dec 2024 16:32:37 -0700 Subject: [PATCH 12/24] improve test coverage, fix unit test import --- .../test/test_profile_store.py | 63 ++++++++++++++----- 1 file changed, 48 insertions(+), 15 deletions(-) diff --git a/python/nwsc_dummy_service/test/test_profile_store.py b/python/nwsc_dummy_service/test/test_profile_store.py index dc450a8..0be16c1 100644 --- a/python/nwsc_dummy_service/test/test_profile_store.py +++ b/python/nwsc_dummy_service/test/test_profile_store.py @@ -15,10 +15,9 @@ from copy import deepcopy from glob import glob -from pytest import fail, fixture +from pytest import fixture -from nwsc_dummy_service.src.profile_store import (CachedProfile, ProfileStore, - NEW_SUBDIR, EXISTING_SUBDIR) +from python.nwsc_dummy_service.src.profile_store import ProfileStore, NEW_SUBDIR, EXISTING_SUBDIR # constants STORE_BASE_DIR = os.path.join(os.path.dirname(__file__), 'temp') @@ -27,7 +26,7 @@ EXAMPLE_UUID = '9835b194-74de-4321-aa6b-d769972dc7cb' with open(os.path.join(RAW_JSON_PATH, 'nwsc_test_response_1.json'), 'r', encoding='utf-8') as f: - EXAMPLE_SUPPORT_PROFILE: dict = json.load(f) + EXAMPLE_SUPPORT_PROFILE: dict = json.load(f)['profiles'][0] def _empty_directory(dir_path: str): @@ -69,6 +68,7 @@ def startup_and_teardown(): def store(): return ProfileStore(STORE_BASE_DIR) + # tests def test_profile_store_loads_api_responses(store: ProfileStore): assert sorted([c.id for c in store.profile_cache]) == [ @@ -87,6 +87,21 @@ def test_profile_store_loads_api_responses(store: ProfileStore): assert os.listdir(os.path.join(STORE_BASE_DIR, NEW_SUBDIR)) == [] +def test_store_loads_jsons_from_new(store: ProfileStore): + # create a pre-existing "new" profile as well as the 3 "existing" profiles + profile = deepcopy(store.get_all()[0]) + profile['id'] = EXAMPLE_UUID # give copied profile a unique identifier + store.save(profile) + + # simulate starting ProfileStore process fresh, with existing JSONs on filesystem + _new_store = ProfileStore(STORE_BASE_DIR) + + # newly creatd ProfileStore should have correctly loaded and labeled "new" Profile + new_profile_list = _new_store.get_all(filter_new_profiles=True) + assert len(new_profile_list) == 1 + assert len(_new_store.profile_cache) == 4 # 3 existing, 1 new + + def test_get_all_profiles(store: ProfileStore): result = store.get_all() assert len(result) == 3 @@ -114,6 +129,19 @@ def test_save_adds_to_new_profiles(store: ProfileStore): assert os.path.exists(os.path.join(STORE_BASE_DIR, NEW_SUBDIR, f'{new_profile_id}.json')) +def test_save_rejects_existing_profile(store: ProfileStore): + new_profile = deepcopy(EXAMPLE_SUPPORT_PROFILE) # use Support Profile that already exists + + new_profile_id = store.save(new_profile) + + assert not new_profile_id + # no new profile should have been added + new_profile_list = store.get_all(filter_new_profiles=True) + assert new_profile_list == [] + # file should not exist in the "new" subdirectory + assert not os.path.exists(os.path.join(STORE_BASE_DIR, NEW_SUBDIR, f'{new_profile["id"]}.json')) + + def test_move_to_existing_success(store: ProfileStore): new_profile = deepcopy(EXAMPLE_SUPPORT_PROFILE) new_profile['id'] = EXAMPLE_UUID @@ -130,16 +158,21 @@ def test_move_to_existing_success(store: ProfileStore): assert EXAMPLE_UUID in [p['id'] for p in existing_profiles] # now Profile is in existing list -def test_store_loads_jsons_from_new(store: ProfileStore): - # create a pre-existing "new" profile as well as the 3 "existing" profiles - profile = deepcopy(store.get_all()[0]) - profile['id'] = EXAMPLE_UUID # give copied profile a unique identifier - store.save(profile) +def test_delete_profile(store: ProfileStore): + existing_profile_list = store.get_all() + profile_id = existing_profile_list[0]['id'] - # simulate starting ProfileStore process fresh, with existing JSONs on filesystem - _new_store = ProfileStore(STORE_BASE_DIR) + success = store.delete(profile_id) - # newly creatd ProfileStore should have correctly loaded and labeled "new" Profile - new_profile_list = _new_store.get_all(filter_new_profiles=True) - assert len(new_profile_list) == 1 - assert len(_new_store.profile_cache) == 4 # 3 existing, 1 new + # after delete, profile should not be returned to get() request, and JSON file should be gone + assert success + existing_profile_list = store.get_all() + assert profile_id not in [p['id'] for p in existing_profile_list] + assert not os.path.exists(os.path.join(STORE_BASE_DIR, EXISTING_SUBDIR, f'{profile_id}.json')) + + +def test_delete_profile_failure(store: ProfileStore): + profile_id = '11111111-2222-3333-444444444444' # fake ID does not exist in ProfileStore + + success = store.delete(profile_id) + assert not success From c2470fedd5cbe0b8bce0a9b48ae4dc8963292ebf Mon Sep 17 00:00:00 2001 From: Mackenzie Grimes - NOAA Affiliate Date: Wed, 18 Dec 2024 16:35:09 -0700 Subject: [PATCH 13/24] ignore pylint warning about "id" variable --- python/nwsc_dummy_service/src/profile_store.py | 1 + 1 file changed, 1 insertion(+) diff --git a/python/nwsc_dummy_service/src/profile_store.py b/python/nwsc_dummy_service/src/profile_store.py index feceaa9..e45be2b 100644 --- a/python/nwsc_dummy_service/src/profile_store.py +++ b/python/nwsc_dummy_service/src/profile_store.py @@ -30,6 +30,7 @@ class CachedProfile: data (dict): full JSON data of this Support Profile is_new (bool): track if Support Profile has ever been processed. Ought to start as True """ + # pylint: disable=invalid-name data: dict is_new: bool From f9a9def213e68646451686dc5f73c6fd05e4cc8b Mon Sep 17 00:00:00 2001 From: Mackenzie Grimes - NOAA Affiliate Date: Wed, 18 Dec 2024 17:32:47 -0700 Subject: [PATCH 14/24] increase test_ncd_web_service.py unit test coverage --- python/nwsc_dummy_service/ncd_web_service.py | 4 +- python/nwsc_dummy_service/test/__init__.py | 3 +- .../test/nwsc_dummy_integration_test.py | 11 - .../test/test_ncd_web_service.py | 196 ++++++++++++++++++ 4 files changed, 200 insertions(+), 14 deletions(-) delete mode 100644 python/nwsc_dummy_service/test/nwsc_dummy_integration_test.py create mode 100644 python/nwsc_dummy_service/test/test_ncd_web_service.py diff --git a/python/nwsc_dummy_service/ncd_web_service.py b/python/nwsc_dummy_service/ncd_web_service.py index ae49797..b79bab6 100644 --- a/python/nwsc_dummy_service/ncd_web_service.py +++ b/python/nwsc_dummy_service/ncd_web_service.py @@ -104,7 +104,7 @@ class AppWrapper: """Web server class wrapping Flask operations""" def __init__(self, base_dir: str): """Build Flask app instance, mapping handler to each endpoint""" - self.app = Flask(__name__) + self.app = Flask(__name__, static_folder=None) # no need for a static folder health_route = HealthRoute() events_route = EventsRoute(base_dir) @@ -126,7 +126,7 @@ def create_app(args: Namespace = None) -> Flask: return AppWrapper(base_dir).app -if __name__ == '__main__': +if __name__ == '__main__': # pragma: no cover parser = ArgumentParser() parser.add_argument('--port', dest='port', default=5000, type=int, help='The port the web server will listen on.') diff --git a/python/nwsc_dummy_service/test/__init__.py b/python/nwsc_dummy_service/test/__init__.py index a8379f2..1e12c5a 100644 --- a/python/nwsc_dummy_service/test/__init__.py +++ b/python/nwsc_dummy_service/test/__init__.py @@ -1,3 +1,4 @@ """Add nwsc_dummy_service top-level module to sys.path""" +import os import sys -sys.path.append('..') +sys.path.append(os.path.join(os.path.dirname(__file__), '..')) diff --git a/python/nwsc_dummy_service/test/nwsc_dummy_integration_test.py b/python/nwsc_dummy_service/test/nwsc_dummy_integration_test.py deleted file mode 100644 index 8bdb24e..0000000 --- a/python/nwsc_dummy_service/test/nwsc_dummy_integration_test.py +++ /dev/null @@ -1,11 +0,0 @@ -"""Integration test for NWS Connect Dummy Web Service""" -# ---------------------------------------------------------------------------------- -# Created on Wed Dec 18 2024 -# -# Copyright (c) 2024 Colorado State University. All rights reserved. (1) -# -# Contributors: -# Mackenzie Grimes (1) -# -# ---------------------------------------------------------------------------------- -pass diff --git a/python/nwsc_dummy_service/test/test_ncd_web_service.py b/python/nwsc_dummy_service/test/test_ncd_web_service.py new file mode 100644 index 0000000..e6d720c --- /dev/null +++ b/python/nwsc_dummy_service/test/test_ncd_web_service.py @@ -0,0 +1,196 @@ +"""Unit tests for ncd_web_service.py""" +# ---------------------------------------------------------------------------------- +# Created on Wed Dec 18 2024 +# +# Copyright (c) 2024 Colorado State University. All rights reserved. (1) +# +# Contributors: +# Mackenzie Grimes (1) +# +# ---------------------------------------------------------------------------------- +import json +from datetime import timedelta +from unittest.mock import Mock + +from flask import Request, Response +from pytest import fixture, MonkeyPatch +from werkzeug.datastructures import MultiDict + +from python.nwsc_dummy_service.ncd_web_service import (AppWrapper, Flask, Namespace, ProfileStore, + create_app, datetime, GSL_KEY) + +# constants +EXAMPLE_DATETIME = datetime(2024, 1, 1, 12, 34) +EXAMPLE_UUID = '9835b194-74de-4321-aa6b-d769972dc7cb' + + +# fixtures +@fixture +def mock_datetime(monkeypatch: MonkeyPatch) -> Mock: + mock_obj = Mock(name='MockDatetime') + mock_obj.now.return_value = EXAMPLE_DATETIME + monkeypatch.setattr('python.nwsc_dummy_service.ncd_web_service.datetime', mock_obj) + + return mock_obj + + +@fixture +def mock_profile_store(monkeypatch: MonkeyPatch) -> Mock: + mock_obj = Mock(name='MockProfileStore', spec=ProfileStore) + monkeypatch.setattr('python.nwsc_dummy_service.ncd_web_service.ProfileStore', mock_obj) + return mock_obj + + +@fixture +def mock_jsonify(monkeypatch: MonkeyPatch) -> Mock: + def mock_func(*args, **_kwargs): + return Response(bytes(json.dumps(args[0]), 'utf-8'), content_type='application/json') + + mock_obj = Mock(name='MockJsonify') + mock_obj.side_effect = mock_func + monkeypatch.setattr('python.nwsc_dummy_service.ncd_web_service.jsonify', mock_obj) + return mock_obj + + +@fixture +def mock_current_app(monkeypatch: MonkeyPatch) -> Mock: + mock_obj = Mock(name='MockCurrentApp', spec=Flask) + mock_obj.logger.info.return_value = None + mock_obj.logger.error.return_value = None + monkeypatch.setattr('python.nwsc_dummy_service.ncd_web_service.current_app', mock_obj) + return mock_obj + + +@fixture +def mock_request(monkeypatch: MonkeyPatch, mock_current_app, mock_jsonify) -> Mock: + mock_obj = Mock(name='MockFlaskRequest', spec=Request) + mock_obj.origin = 'http://example.com:5000' + mock_obj.method = 'GET' + mock_obj.headers = MultiDict({'X-Api-Key': GSL_KEY}) + monkeypatch.setattr('python.nwsc_dummy_service.ncd_web_service.request', mock_obj) + return mock_obj + + +@fixture +def wrapper(mock_profile_store, mock_datetime, mock_request) -> AppWrapper: + return AppWrapper('/fake/base/dir') + + +def test_create_app(mock_profile_store: Mock): + args = Namespace() + args.base_dir = '/fake/base/dir' + + _app = create_app(args) + + assert isinstance(_app, Flask) + endpoint_dict = _app.view_functions + assert sorted(list(endpoint_dict.keys())) == ['events', 'health'] + + +def test_health_route(wrapper: AppWrapper, mock_datetime: Mock): + # simulate that server has been running for 5 minutes + mock_datetime.now.return_value = EXAMPLE_DATETIME + timedelta(minutes=5) + + result: tuple[Response, int] = wrapper.app.view_functions['health']() + + response, status_code = result + assert status_code == 200 + assert response.json == { + 'startedAt': '2024-01-01T12:34:00.000Z', + 'uptime': 5 * 60 + } + +def test_events_bad_key(wrapper: AppWrapper, mock_request: Mock): + mock_request.headers = MultiDict({'X-Api-Key': 'A_BAD_KEY'}) + + result: tuple[Response, int] = wrapper.app.view_functions['events']() + + assert result[1] == 401 + + +def test_get_bad_data_source(wrapper: AppWrapper, mock_request: Mock): + mock_request.args = MultiDict({'dataSource': 'A BAD DATA SOURCE'}) + + result: tuple[Response, int] = wrapper.app.view_functions['events']() + + assert result[1] == 400 + + +def test_get_bad_status(wrapper: AppWrapper, mock_request: Mock): + mock_request.args = MultiDict({'dataSource': 'NBM', 'status': 'NOT REAL STATUS'}) + + result: tuple[Response, int] = wrapper.app.view_functions['events']() + + response, status_code = result + assert status_code == 400 + assert response.json == {'profiles': [], 'errors': ['Invalid profile status: NOT REAL STATUS']} + + +def test_get_existing_profiles(wrapper: AppWrapper, mock_request: Mock, mock_profile_store: Mock): + mock_request.args = MultiDict({'dataSource': 'NBM', 'status': 'existing'}) + example_profile_list = [{'id': EXAMPLE_UUID, 'name': 'My Profile'}] + mock_profile_store.return_value.get_all.return_value = example_profile_list + + result: tuple[Response, int] = wrapper.app.view_functions['events']() + + response, status_code = result + assert status_code == 200 + assert response.json == {'profiles': example_profile_list, 'errors': []} + mock_profile_store.return_value.get_all.assert_called_with() # filter_new_profiles not set + + +def test_get_new_profiles(wrapper: AppWrapper, mock_request: Mock, mock_profile_store: Mock): + mock_request.args = MultiDict({'dataSource': 'NBM', 'status': 'new'}) + example_profile = {'id': EXAMPLE_UUID, 'name': 'My Profile'} + mock_profile_store.return_value.get_all.return_value = [example_profile] + + result: tuple[Response, int] = wrapper.app.view_functions['events']() + + response, status_code = result + assert status_code == 200 + assert response.json == {'profiles': [example_profile], 'errors': []} + + func_call_args = mock_profile_store.return_value.get_all.mock_calls + assert func_call_args[0][2] == {'filter_new_profiles': True} # filter_new_profiles set to True + + +def test_create_profile_success(wrapper: AppWrapper, mock_request: Mock, mock_profile_store: Mock): + mock_request.method = 'POST' + mock_request.json = {'id': EXAMPLE_UUID, 'name': 'My Profile'} + mock_profile_store.return_value.save.return_value = EXAMPLE_UUID # save() success + + result: tuple[Response, int] = wrapper.app.view_functions['events']() + + assert result[1] == 201 + + +def test_create_previous_profile_failure(wrapper: AppWrapper, + mock_request: Mock, + mock_profile_store: Mock): + mock_request.method = 'POST' + mock_request.json = {'id': EXAMPLE_UUID, 'name': 'My Profile'} + mock_profile_store.return_value.save.return_value = None # save() rejected, profile must exist + + result: tuple[Response, int] = wrapper.app.view_functions['events']() + + assert result[1] == 400 + + +def test_delete_profile_success(wrapper: AppWrapper, mock_request: Mock, mock_profile_store: Mock): + mock_request.method = 'DELETE' + mock_request.args = MultiDict({'uuid': EXAMPLE_UUID}) + mock_profile_store.return_value.delete.return_value = True # delete worked + + result: tuple[Response, int] = wrapper.app.view_functions['events']() + + assert result[1] == 204 + + +def test_delete_profile_failure(wrapper: AppWrapper, mock_request: Mock, mock_profile_store: Mock): + mock_request.method = 'DELETE' + mock_request.args = MultiDict({'uuid': EXAMPLE_UUID}) + mock_profile_store.return_value.delete.return_value = False # delete() rejected, profile must exist + + result: tuple[Response, int] = wrapper.app.view_functions['events']() + + assert result[1] == 404 From e97d3f0e3295e31b7067bb055d90ca99369bede5 Mon Sep 17 00:00:00 2001 From: Mackenzie Grimes - NOAA Affiliate Date: Wed, 18 Dec 2024 17:35:46 -0700 Subject: [PATCH 15/24] add flask to run-tests.yml, linter ignore pytest syntax --- .github/workflows/run-tests.yml | 2 +- python/nwsc_dummy_service/test/test_ncd_web_service.py | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/run-tests.yml b/.github/workflows/run-tests.yml index 2a635a1..44d4fcd 100644 --- a/.github/workflows/run-tests.yml +++ b/.github/workflows/run-tests.yml @@ -22,7 +22,7 @@ jobs: - name: Install python dependencies run: | python -m pip install --upgrade pip - pip install pytest pytest-cov pylint==2.17.5 python-dateutil==2.8.2 + pip install pytest pytest-cov pylint==2.17.5 python-dateutil==2.8.2 flask==2.3.2 - name: Checkout idss-engine-commons uses: actions/checkout@v2 diff --git a/python/nwsc_dummy_service/test/test_ncd_web_service.py b/python/nwsc_dummy_service/test/test_ncd_web_service.py index e6d720c..a326798 100644 --- a/python/nwsc_dummy_service/test/test_ncd_web_service.py +++ b/python/nwsc_dummy_service/test/test_ncd_web_service.py @@ -8,6 +8,7 @@ # Mackenzie Grimes (1) # # ---------------------------------------------------------------------------------- +# pylint: disable=missing-function-docstring,redefined-outer-name,unused-argument import json from datetime import timedelta from unittest.mock import Mock From b23926ffb66c3579ad7bc15262ace98215e9a9f0 Mon Sep 17 00:00:00 2001 From: Mackenzie Grimes - NOAA Affiliate Date: Thu, 19 Dec 2024 11:52:08 -0700 Subject: [PATCH 16/24] store GSL_KEY in app.config --- python/nwsc_dummy_service/ncd_web_service.py | 3 +-- python/nwsc_dummy_service/test/test_ncd_web_service.py | 1 + 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/python/nwsc_dummy_service/ncd_web_service.py b/python/nwsc_dummy_service/ncd_web_service.py index b79bab6..e94e81a 100644 --- a/python/nwsc_dummy_service/ncd_web_service.py +++ b/python/nwsc_dummy_service/ncd_web_service.py @@ -53,7 +53,7 @@ def __init__(self, base_dir: str): def handler(self): """Logic for requests to /all-events""" # check that this request has proper key to get or add data - if request.headers.get('X-Api-Key') != GSL_KEY: + if request.headers.get('X-Api-Key') != current_app.config['GSL_KEY']: return jsonify({'message': 'ERROR: Unauthorized'}), 401 if request.method == 'POST': @@ -99,7 +99,6 @@ def handler(self): return jsonify({'profiles': profiles, 'errors': []}), 200 - class AppWrapper: """Web server class wrapping Flask operations""" def __init__(self, base_dir: str): diff --git a/python/nwsc_dummy_service/test/test_ncd_web_service.py b/python/nwsc_dummy_service/test/test_ncd_web_service.py index a326798..ee69ed5 100644 --- a/python/nwsc_dummy_service/test/test_ncd_web_service.py +++ b/python/nwsc_dummy_service/test/test_ncd_web_service.py @@ -58,6 +58,7 @@ def mock_current_app(monkeypatch: MonkeyPatch) -> Mock: mock_obj = Mock(name='MockCurrentApp', spec=Flask) mock_obj.logger.info.return_value = None mock_obj.logger.error.return_value = None + mock_obj.config = MultiDict({'GSL_KEY': GSL_KEY}) monkeypatch.setattr('python.nwsc_dummy_service.ncd_web_service.current_app', mock_obj) return mock_obj From 019330eaf9241b0d7f4080126f84d2bce95bf493 Mon Sep 17 00:00:00 2001 From: Mackenzie Grimes - NOAA Affiliate Date: Thu, 19 Dec 2024 12:01:50 -0700 Subject: [PATCH 17/24] minor code cleanup --- python/nwsc_dummy_service/ncd_web_service.py | 2 +- python/nwsc_dummy_service/src/profile_store.py | 8 ++------ python/nwsc_dummy_service/test/test_ncd_web_service.py | 8 ++++++-- python/nwsc_dummy_service/test/test_profile_store.py | 2 +- 4 files changed, 10 insertions(+), 10 deletions(-) diff --git a/python/nwsc_dummy_service/ncd_web_service.py b/python/nwsc_dummy_service/ncd_web_service.py index e94e81a..597aca0 100644 --- a/python/nwsc_dummy_service/ncd_web_service.py +++ b/python/nwsc_dummy_service/ncd_web_service.py @@ -88,7 +88,7 @@ def handler(self): # they've now been returned to IDSS Engine clients at least once current_app.logger.info('Got all new profiles: %s', profiles) for profile in profiles: - self.profile_store.move_to_existing(profile['id']) + self.profile_store.mark_as_existing(profile['id']) else: # status query param should have been 'existing' or 'new' diff --git a/python/nwsc_dummy_service/src/profile_store.py b/python/nwsc_dummy_service/src/profile_store.py index e45be2b..a0eb4fd 100644 --- a/python/nwsc_dummy_service/src/profile_store.py +++ b/python/nwsc_dummy_service/src/profile_store.py @@ -21,7 +21,6 @@ logger = logging.getLogger(__name__) - @dataclass class CachedProfile: """Data class to hold Support Profile's data and metadata ("new" vs "existing" status) @@ -116,7 +115,7 @@ def save(self, profile: dict) -> str | None: self.profile_cache.append(cached_profile) return cached_profile.id - def move_to_existing(self, profile_id: str) -> bool: + def mark_as_existing(self, profile_id: str) -> bool: """Mark a formerly "new" Support Profile as "existing", a.k.a. has been returned in API response at least once and should no longer be processed as "new" @@ -141,10 +140,7 @@ def move_to_existing(self, profile_id: str) -> bool: # move the JSON file from the "new" to the "existing" directory and update cache existing_filepath = os.path.join(self._existing_dir, f'{profile_id}.json') os.rename(new_filepath, existing_filepath) - - # update this profile's is_new flag in in-memory cache - profile_index = self.profile_cache.index(cached_profile) - self.profile_cache[profile_index].is_new = False + cached_profile.is_new = False return True diff --git a/python/nwsc_dummy_service/test/test_ncd_web_service.py b/python/nwsc_dummy_service/test/test_ncd_web_service.py index ee69ed5..0542425 100644 --- a/python/nwsc_dummy_service/test/test_ncd_web_service.py +++ b/python/nwsc_dummy_service/test/test_ncd_web_service.py @@ -152,8 +152,12 @@ def test_get_new_profiles(wrapper: AppWrapper, mock_request: Mock, mock_profile_ assert status_code == 200 assert response.json == {'profiles': [example_profile], 'errors': []} - func_call_args = mock_profile_store.return_value.get_all.mock_calls - assert func_call_args[0][2] == {'filter_new_profiles': True} # filter_new_profiles set to True + get_call_args = mock_profile_store.return_value.get_all.mock_calls + assert get_call_args[0][2] == {'filter_new_profiles': True} # filter_new_profiles set to True + + # expect that we told ProfileStore to label this profile as not new + mark_existing_call_args = mock_profile_store.return_value.mark_as_existing.mock_calls + assert mark_existing_call_args[0][1][0] == example_profile['id'] def test_create_profile_success(wrapper: AppWrapper, mock_request: Mock, mock_profile_store: Mock): diff --git a/python/nwsc_dummy_service/test/test_profile_store.py b/python/nwsc_dummy_service/test/test_profile_store.py index 0be16c1..f0c3a0f 100644 --- a/python/nwsc_dummy_service/test/test_profile_store.py +++ b/python/nwsc_dummy_service/test/test_profile_store.py @@ -150,7 +150,7 @@ def test_move_to_existing_success(store: ProfileStore): new_profiles = store.get_all(filter_new_profiles=True) assert [p['id'] for p in new_profiles] == [EXAMPLE_UUID] - store.move_to_existing(EXAMPLE_UUID) + store.mark_as_existing(EXAMPLE_UUID) new_profiles = store.get_all(filter_new_profiles=True) assert new_profiles == [] # Support Profile has vanished from list of new From 889e9e42c8efd973f46339bb476c15f6a5231b9d Mon Sep 17 00:00:00 2001 From: Mackenzie Grimes - NOAA Affiliate Date: Thu, 19 Dec 2024 12:16:42 -0700 Subject: [PATCH 18/24] allow dataSource 'ANY' --- python/nwsc_dummy_service/ncd_web_service.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/python/nwsc_dummy_service/ncd_web_service.py b/python/nwsc_dummy_service/ncd_web_service.py index 597aca0..12d0933 100644 --- a/python/nwsc_dummy_service/ncd_web_service.py +++ b/python/nwsc_dummy_service/ncd_web_service.py @@ -75,7 +75,7 @@ def handler(self): # otherwise, must be 'GET' operation data_source = request.args.get('dataSource', None, type=str) - if data_source != 'NBM': + if data_source not in ['NBM', 'ANY']: return jsonify({'profiles': [], 'errors': [f'Invalid dataSource: {data_source}']}), 400 profile_status = request.args.get('status', default='existing', type=str) From 0b12e6b66fbc133f876e40de6f24fe73d2c0988a Mon Sep 17 00:00:00 2001 From: Mackenzie Grimes - NOAA Affiliate Date: Thu, 19 Dec 2024 12:20:10 -0700 Subject: [PATCH 19/24] fix Dockerfile --- docker/mockims/dev/Dockerfile | 1 + python/nwsc_dummy_service/ncd_web_service.py | 1 + 2 files changed, 2 insertions(+) diff --git a/docker/mockims/dev/Dockerfile b/docker/mockims/dev/Dockerfile index ad2451f..81d6627 100644 --- a/docker/mockims/dev/Dockerfile +++ b/docker/mockims/dev/Dockerfile @@ -12,6 +12,7 @@ WORKDIR /python/nwsc_dummy_service # Copy source files COPY ./python/nwsc_dummy_service/*.py /python/nwsc_dummy_service/ +COPY ./python/nwsc_dummy_service/src/*.py /python/nwsc_dummy_service/src/ # (TEMPORARY) Copy canned criteria files. To be removed when integration with NWS Connect API exists COPY ./python/nwsc_dummy_service/src/profiles/*.json /python/profiles/ diff --git a/python/nwsc_dummy_service/ncd_web_service.py b/python/nwsc_dummy_service/ncd_web_service.py index 12d0933..3264b2e 100644 --- a/python/nwsc_dummy_service/ncd_web_service.py +++ b/python/nwsc_dummy_service/ncd_web_service.py @@ -104,6 +104,7 @@ class AppWrapper: def __init__(self, base_dir: str): """Build Flask app instance, mapping handler to each endpoint""" self.app = Flask(__name__, static_folder=None) # no need for a static folder + self.app.config['GSL_KEY'] = GSL_KEY health_route = HealthRoute() events_route = EventsRoute(base_dir) From 175ae7199ca56a85a30615762a67153b76f2ed9d Mon Sep 17 00:00:00 2001 From: Mackenzie Grimes - NOAA Affiliate Date: Thu, 19 Dec 2024 12:21:12 -0700 Subject: [PATCH 20/24] move docker/mockims to docker/dummyservice --- .github/workflows/build-package-mock-ims.yml | 6 +++--- docker/{mockims => dummyservice}/dev/Dockerfile | 0 docker/{mockims => dummyservice}/local/Dockerfile | 1 + 3 files changed, 4 insertions(+), 3 deletions(-) rename docker/{mockims => dummyservice}/dev/Dockerfile (100%) rename docker/{mockims => dummyservice}/local/Dockerfile (92%) diff --git a/.github/workflows/build-package-mock-ims.yml b/.github/workflows/build-package-mock-ims.yml index 24fa783..1015482 100644 --- a/.github/workflows/build-package-mock-ims.yml +++ b/.github/workflows/build-package-mock-ims.yml @@ -11,7 +11,7 @@ jobs: fail-fast: true matrix: app: - - mockims + - dummyservice steps: - name: Login to GitHub Container Registry @@ -20,7 +20,7 @@ jobs: registry: ghcr.io username: ${{github.actor}} password: ${{secrets.GITHUB_TOKEN}} - + - name: pull run: | docker pull ghcr.io/noaa-gsl/idss/commons/python/python-base:main @@ -69,7 +69,7 @@ jobs: --build-arg COMMITBRANCH=${{env.BRANCH}} \ --build-arg COMMITSHA=${{github.sha}} \ -t ${{env.DEV_REGISTRY}}/${{env.APP_LOWERCASE}}:${{env.BRANCH}} \ - -f ./docker/mockims/dev/Dockerfile . + -f ./docker/dummyservice/dev/Dockerfile . - name: Run Trivy vulnerability scanner uses: aquasecurity/trivy-action@master diff --git a/docker/mockims/dev/Dockerfile b/docker/dummyservice/dev/Dockerfile similarity index 100% rename from docker/mockims/dev/Dockerfile rename to docker/dummyservice/dev/Dockerfile diff --git a/docker/mockims/local/Dockerfile b/docker/dummyservice/local/Dockerfile similarity index 92% rename from docker/mockims/local/Dockerfile rename to docker/dummyservice/local/Dockerfile index 6f87305..29ecc8f 100644 --- a/docker/mockims/local/Dockerfile +++ b/docker/dummyservice/local/Dockerfile @@ -12,6 +12,7 @@ WORKDIR /python/nwsc_dummy_service # Copy source files COPY ./python/nwsc_dummy_service/*.py /python/nwsc_dummy_service/ +COPY ./python/nwsc_dummy_service/src/*.py /python/nwsc_dummy_service/src/ # (TEMPORARY) Copy canned criteria files. To be removed when integration with NWS Connect API exists COPY ./python/nwsc_dummy_service/src/profiles/*.json /python/profiles/ From a80f35f5a399304cdc2060915b2607e4100cd6a5 Mon Sep 17 00:00:00 2001 From: Mackenzie Grimes - NOAA Affiliate Date: Thu, 19 Dec 2024 13:57:06 -0700 Subject: [PATCH 21/24] update readme.md --- python/nwsc_dummy_service/README.md | 19 +++++++++++++++++-- 1 file changed, 17 insertions(+), 2 deletions(-) diff --git a/python/nwsc_dummy_service/README.md b/python/nwsc_dummy_service/README.md index 2c52cf8..a048c11 100644 --- a/python/nwsc_dummy_service/README.md +++ b/python/nwsc_dummy_service/README.md @@ -40,9 +40,9 @@ To run this service can run in isolation, it does not requires a rabbitmq server docker run --rm --name proxy-service idss.engine.service.proxy.service:local ``` -Optional parameters include: +Required parameters include: ``` - None + --base_dir /path/to/file/dir # file location where JSON files will be read and written ``` #### Python (local) @@ -75,3 +75,18 @@ Lastly, `cd` to the `./python/nwsc_dummy_service` directory, and start the NWS C ```sh python3 ncd_web_service.py --base_dir /path/to/some/dir ``` + +On startup, the service creates 'existing' and 'new' subdirectories at the path location given by `--base_dir` if needed, then reads into its in-memory cache any existing JSON files in the base directory or either subdirectory. + +### Endpoints +- GET `/health` +- GET `/all-events?dataSource=ANY&status=existing` + - Get list of existing Support Profiles (not new). Will be formatted like `{ "profiles": [], "errors": []}` +- GET `/all-events?dataSource=ANY&status=new` + - Get only new (never before processed) Support Profiles. After a profile is returned to any API request, it will disappear from the "new" list, only appearing in `status=existing` filter requests. +- POST `/all-events` + - Create a new Support Profile to be stored by the API. The body of the request will be the JSON saved--the `id` field should be unique. +- DELETE `/all-events?uuid=` + - Permanently remove an existing Support Profile from the API. `uuid` must match one of the saved Support Profile JSON's `id` attribute, otherwise it will return `404`. + +Note that all requests to the `/all-events` endpoint require an `X-Api-Key` header that must match the approved key, or the API will return `401`. \ No newline at end of file From 8ab95f61bbd757f1a66575ad711ab4bfd2b3ad80 Mon Sep 17 00:00:00 2001 From: Mackenzie Grimes - NOAA Affiliate Date: Thu, 19 Dec 2024 14:06:27 -0700 Subject: [PATCH 22/24] hide /test and __init__.py from test coverage --- python/nwsc_dummy_service/test/.coveragerc | 2 ++ 1 file changed, 2 insertions(+) create mode 100644 python/nwsc_dummy_service/test/.coveragerc diff --git a/python/nwsc_dummy_service/test/.coveragerc b/python/nwsc_dummy_service/test/.coveragerc new file mode 100644 index 0000000..02afcac --- /dev/null +++ b/python/nwsc_dummy_service/test/.coveragerc @@ -0,0 +1,2 @@ +[run] +omit = */test_*,__init__.py From 1587839ce7eeffcf534160b4e6792fa61c3769d7 Mon Sep 17 00:00:00 2001 From: Mackenzie Grimes - NOAA Affiliate Date: Thu, 19 Dec 2024 14:37:49 -0700 Subject: [PATCH 23/24] rename service to to nwsc_proxy --- .github/workflows/build-package-mock-ims.yml | 4 ++-- .github/workflows/linter.yml | 8 ++++---- .github/workflows/run-tests.yml | 6 +++--- docker/dummyservice/dev/Dockerfile | 12 ++++++------ docker/dummyservice/local/Dockerfile | 12 ++++++------ .../{nwsc_dummy_service => nwsc_proxy}/README.md | 10 +++++----- .../__init__.py | 0 .../ncp_web_service.py} | 2 +- .../src/__init__.py | 0 .../src/profile_store.py | 0 .../src/profiles/nwsc_test_response_1.json | 0 .../src/profiles/nwsc_test_response_2.json | 0 .../src/profiles/nwsc_test_response_3.json | 0 .../test/.coveragerc | 0 .../test/__init__.py | 2 +- .../test/test_ncp_web_service.py} | 16 ++++++++-------- .../test/test_profile_store.py | 2 +- 17 files changed, 37 insertions(+), 37 deletions(-) rename python/{nwsc_dummy_service => nwsc_proxy}/README.md (88%) rename python/{nwsc_dummy_service => nwsc_proxy}/__init__.py (100%) rename python/{nwsc_dummy_service/ncd_web_service.py => nwsc_proxy/ncp_web_service.py} (98%) rename python/{nwsc_dummy_service => nwsc_proxy}/src/__init__.py (100%) rename python/{nwsc_dummy_service => nwsc_proxy}/src/profile_store.py (100%) rename python/{nwsc_dummy_service => nwsc_proxy}/src/profiles/nwsc_test_response_1.json (100%) rename python/{nwsc_dummy_service => nwsc_proxy}/src/profiles/nwsc_test_response_2.json (100%) rename python/{nwsc_dummy_service => nwsc_proxy}/src/profiles/nwsc_test_response_3.json (100%) rename python/{nwsc_dummy_service => nwsc_proxy}/test/.coveragerc (100%) rename python/{nwsc_dummy_service => nwsc_proxy}/test/__init__.py (59%) rename python/{nwsc_dummy_service/test/test_ncd_web_service.py => nwsc_proxy/test/test_ncp_web_service.py} (91%) rename python/{nwsc_dummy_service => nwsc_proxy}/test/test_profile_store.py (98%) diff --git a/.github/workflows/build-package-mock-ims.yml b/.github/workflows/build-package-mock-ims.yml index 1015482..9f03e15 100644 --- a/.github/workflows/build-package-mock-ims.yml +++ b/.github/workflows/build-package-mock-ims.yml @@ -11,7 +11,7 @@ jobs: fail-fast: true matrix: app: - - dummyservice + - proxy steps: - name: Login to GitHub Container Registry @@ -69,7 +69,7 @@ jobs: --build-arg COMMITBRANCH=${{env.BRANCH}} \ --build-arg COMMITSHA=${{github.sha}} \ -t ${{env.DEV_REGISTRY}}/${{env.APP_LOWERCASE}}:${{env.BRANCH}} \ - -f ./docker/dummyservice/dev/Dockerfile . + -f ./docker/nwsc_proxy/dev/Dockerfile . - name: Run Trivy vulnerability scanner uses: aquasecurity/trivy-action@master diff --git a/.github/workflows/linter.yml b/.github/workflows/linter.yml index 1d9fcb0..47537f7 100644 --- a/.github/workflows/linter.yml +++ b/.github/workflows/linter.yml @@ -44,9 +44,9 @@ jobs: - name: Run code linter run: pylint ./python/idsse/testing --max-line-length=120 --recursive true - - name: Set PYTHONPATH for dummy service + - name: Set PYTHONPATH for proxy service run: | - echo "PYTHONPATH=python/nwsc_dummy_service" >> $GITHUB_ENV + echo "PYTHONPATH=python/nwsc_proxy" >> $GITHUB_ENV - - name: Run code linter for dummy service - run: pylint ./python/nwsc_dummy_service --max-line-length=120 --recursive true + - name: Run code linter for proxy service + run: pylint ./python/nwsc_proxy --max-line-length=120 --recursive true diff --git a/.github/workflows/run-tests.yml b/.github/workflows/run-tests.yml index 44d4fcd..548f27d 100644 --- a/.github/workflows/run-tests.yml +++ b/.github/workflows/run-tests.yml @@ -37,10 +37,10 @@ jobs: - name: Set PYTHONPATH for pytest run: | - echo "PYTHONPATH=python/nwsc_dummy_service" >> $GITHUB_ENV + echo "PYTHONPATH=python/nwsc_proxy" >> $GITHUB_ENV - name: Test pytest - working-directory: python/nwsc_dummy_service/test + working-directory: python/nwsc_proxy/test run: | set -o pipefail; # exit immediately if pytest fails (tee obfuscates the exit code) pytest --cov=.. --cov-report=term --junitxml=./pytest.xml | tee ./coverage.txt; @@ -53,4 +53,4 @@ jobs: hide-comment: true badge-title: Coverage title: Report - pytest-coverage-path: python/nwsc_dummy_service/test/coverage.txt + pytest-coverage-path: python/nwsc_proxy/test/coverage.txt diff --git a/docker/dummyservice/dev/Dockerfile b/docker/dummyservice/dev/Dockerfile index 81d6627..db0276d 100644 --- a/docker/dummyservice/dev/Dockerfile +++ b/docker/dummyservice/dev/Dockerfile @@ -1,4 +1,4 @@ -# NWSC Dummy Service using python commons base image +# NWSC Proxy Service using python commons base image FROM ghcr.io/noaa-gsl/idss/commons/python/python-base:main ARG maintainer @@ -8,18 +8,18 @@ LABEL maintainer ${maintainer} #RUN conda config --add channels conda-forge && \ # conda install -y flask=2.3.2 -WORKDIR /python/nwsc_dummy_service +WORKDIR /python/nwsc_proxy # Copy source files -COPY ./python/nwsc_dummy_service/*.py /python/nwsc_dummy_service/ -COPY ./python/nwsc_dummy_service/src/*.py /python/nwsc_dummy_service/src/ +COPY ./python/nwsc_proxy/*.py /python/nwsc_proxy/ +COPY ./python/nwsc_proxy/src/*.py /python/nwsc_proxy/src/ # (TEMPORARY) Copy canned criteria files. To be removed when integration with NWS Connect API exists -COPY ./python/nwsc_dummy_service/src/profiles/*.json /python/profiles/ +COPY ./python/nwsc_proxy/src/profiles/*.json /python/profiles/ # The volume mapping here is kind of strange for k8s deployment, because if we map an empty volume to /criteria # then the temp copy of json above will get blown away by the volume mapping...just omit it for k8s deployment # for now. #VOLUME /python/profiles -ENTRYPOINT [ "python3", "/python/nwsc_dummy_service/ncd_web_service.py", "--base_dir", "/python/profiles"] +ENTRYPOINT [ "python3", "/python/nwsc_proxy/ncp_web_service.py", "--base_dir", "/python/profiles"] diff --git a/docker/dummyservice/local/Dockerfile b/docker/dummyservice/local/Dockerfile index 29ecc8f..6acf977 100644 --- a/docker/dummyservice/local/Dockerfile +++ b/docker/dummyservice/local/Dockerfile @@ -1,4 +1,4 @@ -# NWSC Dummy Service using python commons base image +# NWSC Proxy Service using python commons base image FROM idss.engine.commons.python-base:local ARG maintainer @@ -8,18 +8,18 @@ LABEL maintainer ${maintainer} #RUN conda config --add channels conda-forge && \ # conda install -y flask=2.3.2 -WORKDIR /python/nwsc_dummy_service +WORKDIR /python/nwsc_proxy # Copy source files -COPY ./python/nwsc_dummy_service/*.py /python/nwsc_dummy_service/ -COPY ./python/nwsc_dummy_service/src/*.py /python/nwsc_dummy_service/src/ +COPY ./python/nwsc_proxy/*.py /python/nwsc_proxy/ +COPY ./python/nwsc_proxy/src/*.py /python/nwsc_proxy/src/ # (TEMPORARY) Copy canned criteria files. To be removed when integration with NWS Connect API exists -COPY ./python/nwsc_dummy_service/src/profiles/*.json /python/profiles/ +COPY ./python/nwsc_proxy/src/profiles/*.json /python/profiles/ # The volume mapping here is kind of strange for k8s deployment, because if we map an empty volume to /criteria # then the temp copy of json above will get blown away by the volume mapping...just omit it for k8s deployment # for now. #VOLUME /python/profiles -ENTRYPOINT [ "python3", "/python/nwsc_dummy_service/ncd_web_service.py", "--base_dir", "/python/profiles"] +ENTRYPOINT [ "python3", "/python/nwsc_proxy/ncp_web_service.py", "--base_dir", "/python/profiles"] diff --git a/python/nwsc_dummy_service/README.md b/python/nwsc_proxy/README.md similarity index 88% rename from python/nwsc_dummy_service/README.md rename to python/nwsc_proxy/README.md index a048c11..6f6d618 100644 --- a/python/nwsc_dummy_service/README.md +++ b/python/nwsc_proxy/README.md @@ -1,10 +1,10 @@ # NWS Connect Proxy Service - ## Overview --The `nwsc-dummy-service` is a web service that simulates storing a set of Support Profiles and serving them up in a simple REST interface. +## Overview +-The `nwsc-proxy` is a web service that simulates NWS Connect systems storing a set of Support Profiles and serving them up in a simple REST interface. ## Configurations -The NWS Connect proxy service should be started as a standalone service and offers two end-points in support of the NWSConnect Gateway request/response services. Those services should be provided with the network address of this services endpoints via their command line arguments for testing purposes. +The NWS Connect proxy service should be started as a standalone service and offers two end-points in support of the NWSConnect Gateway services. Those services should be provided with the network address of this services endpoints via their command line arguments for testing purposes. ## Build, Release, and Run @@ -71,9 +71,9 @@ The most common way to get python dependencies installed is to use either [conda brew install peak/tap/s5cmd ``` -Lastly, `cd` to the `./python/nwsc_dummy_service` directory, and start the NWS Connect Dummy service: +Lastly, `cd` to the `./python/nwsc_proxy` directory, and start the NWS Connect Proxy service: ```sh -python3 ncd_web_service.py --base_dir /path/to/some/dir +python3 ncp_web_service.py --base_dir /path/to/some/dir ``` On startup, the service creates 'existing' and 'new' subdirectories at the path location given by `--base_dir` if needed, then reads into its in-memory cache any existing JSON files in the base directory or either subdirectory. diff --git a/python/nwsc_dummy_service/__init__.py b/python/nwsc_proxy/__init__.py similarity index 100% rename from python/nwsc_dummy_service/__init__.py rename to python/nwsc_proxy/__init__.py diff --git a/python/nwsc_dummy_service/ncd_web_service.py b/python/nwsc_proxy/ncp_web_service.py similarity index 98% rename from python/nwsc_dummy_service/ncd_web_service.py rename to python/nwsc_proxy/ncp_web_service.py index 3264b2e..bb337ef 100644 --- a/python/nwsc_dummy_service/ncd_web_service.py +++ b/python/nwsc_proxy/ncp_web_service.py @@ -1,4 +1,4 @@ -"""NWS Connect Dummy service simulating behaviors of NWS Connect core services""" +"""NWS Connect Proxy service simulating behaviors of NWS Connect core services""" # ---------------------------------------------------------------------------------- # Created on Fri Apr 07 2023 # diff --git a/python/nwsc_dummy_service/src/__init__.py b/python/nwsc_proxy/src/__init__.py similarity index 100% rename from python/nwsc_dummy_service/src/__init__.py rename to python/nwsc_proxy/src/__init__.py diff --git a/python/nwsc_dummy_service/src/profile_store.py b/python/nwsc_proxy/src/profile_store.py similarity index 100% rename from python/nwsc_dummy_service/src/profile_store.py rename to python/nwsc_proxy/src/profile_store.py diff --git a/python/nwsc_dummy_service/src/profiles/nwsc_test_response_1.json b/python/nwsc_proxy/src/profiles/nwsc_test_response_1.json similarity index 100% rename from python/nwsc_dummy_service/src/profiles/nwsc_test_response_1.json rename to python/nwsc_proxy/src/profiles/nwsc_test_response_1.json diff --git a/python/nwsc_dummy_service/src/profiles/nwsc_test_response_2.json b/python/nwsc_proxy/src/profiles/nwsc_test_response_2.json similarity index 100% rename from python/nwsc_dummy_service/src/profiles/nwsc_test_response_2.json rename to python/nwsc_proxy/src/profiles/nwsc_test_response_2.json diff --git a/python/nwsc_dummy_service/src/profiles/nwsc_test_response_3.json b/python/nwsc_proxy/src/profiles/nwsc_test_response_3.json similarity index 100% rename from python/nwsc_dummy_service/src/profiles/nwsc_test_response_3.json rename to python/nwsc_proxy/src/profiles/nwsc_test_response_3.json diff --git a/python/nwsc_dummy_service/test/.coveragerc b/python/nwsc_proxy/test/.coveragerc similarity index 100% rename from python/nwsc_dummy_service/test/.coveragerc rename to python/nwsc_proxy/test/.coveragerc diff --git a/python/nwsc_dummy_service/test/__init__.py b/python/nwsc_proxy/test/__init__.py similarity index 59% rename from python/nwsc_dummy_service/test/__init__.py rename to python/nwsc_proxy/test/__init__.py index 1e12c5a..7072501 100644 --- a/python/nwsc_dummy_service/test/__init__.py +++ b/python/nwsc_proxy/test/__init__.py @@ -1,4 +1,4 @@ -"""Add nwsc_dummy_service top-level module to sys.path""" +"""Add nwsc_proxy top-level module to sys.path""" import os import sys sys.path.append(os.path.join(os.path.dirname(__file__), '..')) diff --git a/python/nwsc_dummy_service/test/test_ncd_web_service.py b/python/nwsc_proxy/test/test_ncp_web_service.py similarity index 91% rename from python/nwsc_dummy_service/test/test_ncd_web_service.py rename to python/nwsc_proxy/test/test_ncp_web_service.py index 0542425..1f7bbf7 100644 --- a/python/nwsc_dummy_service/test/test_ncd_web_service.py +++ b/python/nwsc_proxy/test/test_ncp_web_service.py @@ -1,4 +1,4 @@ -"""Unit tests for ncd_web_service.py""" +"""Unit tests for ncp_web_service.py""" # ---------------------------------------------------------------------------------- # Created on Wed Dec 18 2024 # @@ -17,8 +17,8 @@ from pytest import fixture, MonkeyPatch from werkzeug.datastructures import MultiDict -from python.nwsc_dummy_service.ncd_web_service import (AppWrapper, Flask, Namespace, ProfileStore, - create_app, datetime, GSL_KEY) +from python.nwsc_proxy.ncp_web_service import (AppWrapper, Flask, Namespace, ProfileStore, + create_app, datetime, GSL_KEY) # constants EXAMPLE_DATETIME = datetime(2024, 1, 1, 12, 34) @@ -30,7 +30,7 @@ def mock_datetime(monkeypatch: MonkeyPatch) -> Mock: mock_obj = Mock(name='MockDatetime') mock_obj.now.return_value = EXAMPLE_DATETIME - monkeypatch.setattr('python.nwsc_dummy_service.ncd_web_service.datetime', mock_obj) + monkeypatch.setattr('python.nwsc_proxy.ncp_web_service.datetime', mock_obj) return mock_obj @@ -38,7 +38,7 @@ def mock_datetime(monkeypatch: MonkeyPatch) -> Mock: @fixture def mock_profile_store(monkeypatch: MonkeyPatch) -> Mock: mock_obj = Mock(name='MockProfileStore', spec=ProfileStore) - monkeypatch.setattr('python.nwsc_dummy_service.ncd_web_service.ProfileStore', mock_obj) + monkeypatch.setattr('python.nwsc_proxy.ncp_web_service.ProfileStore', mock_obj) return mock_obj @@ -49,7 +49,7 @@ def mock_func(*args, **_kwargs): mock_obj = Mock(name='MockJsonify') mock_obj.side_effect = mock_func - monkeypatch.setattr('python.nwsc_dummy_service.ncd_web_service.jsonify', mock_obj) + monkeypatch.setattr('python.nwsc_proxy.ncp_web_service.jsonify', mock_obj) return mock_obj @@ -59,7 +59,7 @@ def mock_current_app(monkeypatch: MonkeyPatch) -> Mock: mock_obj.logger.info.return_value = None mock_obj.logger.error.return_value = None mock_obj.config = MultiDict({'GSL_KEY': GSL_KEY}) - monkeypatch.setattr('python.nwsc_dummy_service.ncd_web_service.current_app', mock_obj) + monkeypatch.setattr('python.nwsc_proxy.ncp_web_service.current_app', mock_obj) return mock_obj @@ -69,7 +69,7 @@ def mock_request(monkeypatch: MonkeyPatch, mock_current_app, mock_jsonify) -> Mo mock_obj.origin = 'http://example.com:5000' mock_obj.method = 'GET' mock_obj.headers = MultiDict({'X-Api-Key': GSL_KEY}) - monkeypatch.setattr('python.nwsc_dummy_service.ncd_web_service.request', mock_obj) + monkeypatch.setattr('python.nwsc_proxy.ncp_web_service.request', mock_obj) return mock_obj diff --git a/python/nwsc_dummy_service/test/test_profile_store.py b/python/nwsc_proxy/test/test_profile_store.py similarity index 98% rename from python/nwsc_dummy_service/test/test_profile_store.py rename to python/nwsc_proxy/test/test_profile_store.py index f0c3a0f..6cdce91 100644 --- a/python/nwsc_dummy_service/test/test_profile_store.py +++ b/python/nwsc_proxy/test/test_profile_store.py @@ -17,7 +17,7 @@ from pytest import fixture -from python.nwsc_dummy_service.src.profile_store import ProfileStore, NEW_SUBDIR, EXISTING_SUBDIR +from python.nwsc_proxy.src.profile_store import ProfileStore, NEW_SUBDIR, EXISTING_SUBDIR # constants STORE_BASE_DIR = os.path.join(os.path.dirname(__file__), 'temp') From 16b9fa41bcf4b3db2226642733c0afb777d763db Mon Sep 17 00:00:00 2001 From: Mackenzie Grimes - NOAA Affiliate Date: Thu, 19 Dec 2024 14:55:48 -0700 Subject: [PATCH 24/24] rename a couple more "ims" references --- .github/workflows/build-package-mock-ims.yml | 2 +- docker/{dummyservice => nwsc_proxy}/dev/Dockerfile | 0 docker/{dummyservice => nwsc_proxy}/local/Dockerfile | 0 python/nwsc_proxy/README.md | 2 +- 4 files changed, 2 insertions(+), 2 deletions(-) rename docker/{dummyservice => nwsc_proxy}/dev/Dockerfile (100%) rename docker/{dummyservice => nwsc_proxy}/local/Dockerfile (100%) diff --git a/.github/workflows/build-package-mock-ims.yml b/.github/workflows/build-package-mock-ims.yml index 9f03e15..5e1bcfd 100644 --- a/.github/workflows/build-package-mock-ims.yml +++ b/.github/workflows/build-package-mock-ims.yml @@ -1,4 +1,4 @@ -name: Build and Package the Mock IMS Service to Dev Registry +name: Build and Package the NWSC Proxy Service to Dev Registry on: push: branches: [ main ] diff --git a/docker/dummyservice/dev/Dockerfile b/docker/nwsc_proxy/dev/Dockerfile similarity index 100% rename from docker/dummyservice/dev/Dockerfile rename to docker/nwsc_proxy/dev/Dockerfile diff --git a/docker/dummyservice/local/Dockerfile b/docker/nwsc_proxy/local/Dockerfile similarity index 100% rename from docker/dummyservice/local/Dockerfile rename to docker/nwsc_proxy/local/Dockerfile diff --git a/python/nwsc_proxy/README.md b/python/nwsc_proxy/README.md index 6f6d618..297c256 100644 --- a/python/nwsc_proxy/README.md +++ b/python/nwsc_proxy/README.md @@ -17,7 +17,7 @@ The subsections below outline how to build the images within this project. All m - targeted environment: `:aws` --- -### IMS Service +### NWSC Proxy Service From the IDSS Engine project root directory `idss-engine/build///`: `$ docker-compose build proxy_service`