From 16d4f3236798c28aaf8802c87999a0402d1f8926 Mon Sep 17 00:00:00 2001 From: VineetBala-AOT <90332175+VineetBala-AOT@users.noreply.github.com> Date: Tue, 6 Feb 2024 11:27:50 -0800 Subject: [PATCH 01/42] DESENG-493: CRON config alignment and engagement metadata dependency removal (#2375) * Updating cron config and sample env --- CHANGELOG.MD | 6 + met-api/sample.env | 2 +- met-api/src/met_api/config.py | 2 +- .../met_api/services/engagement_service.py | 2 +- met-api/src/met_api/utils/notification.py | 5 +- met-api/src/met_api/utils/user_context.py | 3 +- met-cron/config.py | 386 ++++++++++-------- met-cron/sample.env | 106 ++++- .../services/closing_soon_mail_service.py | 5 +- .../services/comment_redact_service.py | 5 +- .../src/met_cron/services/mail_service.py | 14 +- .../met_cron/services/publish_mail_service.py | 5 +- .../met_cron/utils/subscription_checker.py | 9 +- 13 files changed, 343 insertions(+), 207 deletions(-) diff --git a/CHANGELOG.MD b/CHANGELOG.MD index 45e0defb1..30f93e860 100644 --- a/CHANGELOG.MD +++ b/CHANGELOG.MD @@ -1,3 +1,9 @@ +## February 06, 2024 +- **Task** Streamline CRON jobs [DESENG-493](https://apps.itsm.gov.bc.ca/jira/browse/DESENG-493) + - Aligned the CRON configuration and sample environment files with the structure used in the Met API. + - Eliminated the reliance on engagement metadata within CRON jobs. + - Implemented necessary code adjustments to seamlessly integrate with the updated CRON configuration. + ## February 05, 2024 - **Task** Change "Superuser" to "Administrator" [DESENG-476](https://apps.itsm.gov.bc.ca/jira/browse/DESENG-476) diff --git a/met-api/sample.env b/met-api/sample.env index 6b659668f..2fbe53638 100644 --- a/met-api/sample.env +++ b/met-api/sample.env @@ -89,7 +89,7 @@ ACCESS_REQUEST_EMAIL_SUBJECT= ACCESS_REQUEST_EMAIL_ADDRESS="accessRequestHandler.fakeName@gov.bc.ca" # Site paths for creating emails from templates -SITE_URL=localhost:3000 +SITE_URL=http://localhost:3000 SURVEY_PATH=/surveys/submit/{survey_id}/{token} USER_MANAGEMENT_PATH=/usermanagement SUBMISSION_PATH=/engagements/{engagement_id}/edit/{token} diff --git a/met-api/src/met_api/config.py b/met-api/src/met_api/config.py index 2b30e4e06..e47321e4a 100644 --- a/met-api/src/met_api/config.py +++ b/met-api/src/met_api/config.py @@ -289,7 +289,7 @@ def SQLALCHEMY_DATABASE_URI(self) -> str: 'CLOSEOUT': { 'ID': os.getenv('CLOSEOUT_EMAIL_TEMPLATE_ID'), 'SUBJECT': os.getenv('CLOSEOUT_EMAIL_SUBJECT', - 'MET - Engagement Closed'), + 'The public commenting period for {engagement_name} is now closed.'), }, 'ACCESS_REQUEST': { 'ID': os.getenv('ACCESS_REQUEST_EMAIL_TEMPLATE_ID'), diff --git a/met-api/src/met_api/services/engagement_service.py b/met-api/src/met_api/services/engagement_service.py index 0895c15b3..0432f1028 100644 --- a/met-api/src/met_api/services/engagement_service.py +++ b/met-api/src/met_api/services/engagement_service.py @@ -295,7 +295,7 @@ def _render_email_template(engagement: EngagementModel): engagement_url = notification.get_tenant_site_url(engagement.tenant_id, dashboard_path) templates = current_app.config['EMAIL_TEMPLATES'] subject = templates['CLOSEOUT']['SUBJECT'].format(engagement_name=engagement.name) - email_environment = templates['ENVIROMENT'] + email_environment = templates['ENVIRONMENT'] tenant_name = EngagementService._get_tenant_name( engagement.tenant_id) args = { diff --git a/met-api/src/met_api/utils/notification.py b/met-api/src/met_api/utils/notification.py index 472ced067..27a2e96e4 100644 --- a/met-api/src/met_api/utils/notification.py +++ b/met-api/src/met_api/utils/notification.py @@ -12,12 +12,13 @@ def get_tenant_site_url(tenant_id, path=''): """Get the tenant specific site url (domain / tenant / path).""" is_single_tenant_environment = current_app.config.get('IS_SINGLE_TENANT_ENVIRONMENT', False) - site_url = current_app.config.get('SITE_URL', '') + paths = current_app.config['PATH_CONFIG'] + site_url = paths.get('SITE', '') if not is_single_tenant_environment: if tenant_id is None: raise ValueError('Missing tenant id.') tenant: Tenant = Tenant.find_by_id(tenant_id) - return site_url + f'/{tenant.short_name}' + path + return site_url + f'/{tenant.short_name.lower()}' + path else: return site_url + path diff --git a/met-api/src/met_api/utils/user_context.py b/met-api/src/met_api/utils/user_context.py index facafc426..c6be6f5b5 100644 --- a/met-api/src/met_api/utils/user_context.py +++ b/met-api/src/met_api/utils/user_context.py @@ -39,7 +39,8 @@ def __init__(self): self._last_name: str = token_info.get('lastname', None) self._tenant_id: str = token_info.get(TENANT_ID_JWT_CLAIM, None) self._bearer_token: str = _get_token() - self._roles: list = current_app.config['JWT_ROLE_CALLBACK'](token_info) + self._roles: list = current_app.config['JWT_ROLE_CALLBACK'](token_info) if 'client_roles' in token_info \ + else [] self._sub: str = token_info.get('sub', None) self._name: str = f"{token_info.get('firstname', None)} {token_info.get('lastname', None)}" diff --git a/met-cron/config.py b/met-cron/config.py index 070319461..fbbb5edb1 100644 --- a/met-cron/config.py +++ b/met-cron/config.py @@ -20,24 +20,21 @@ """ import os -import sys +from typing import Union from dotenv import find_dotenv, load_dotenv -# this will load all the envars from a .env file located in the project root (api) -load_dotenv(find_dotenv()) +from met_api.utils.constants import TestKeyConfig +from met_api.utils.util import is_truthy -CONFIGURATION = { - 'development': 'config.DevConfig', - 'testing': 'config.TestConfig', - 'production': 'config.ProdConfig', - 'default': 'config.ProdConfig', - # Alembic connects to migration config which is MET Analytics Database - 'migration': 'config.MigrationConfig', -} +# Search in increasingly higher folders for a .env file, then load it, +# appending any variables we find to the current environment. +load_dotenv(find_dotenv()) +# remove all env variables with no text (allows for entries to be unset easily) +os.environ = {k: v for k, v in os.environ.items() if v} -def get_named_config(environment: 'str | None') -> '_Config': +def get_named_config(environment: Union[str, None]) -> '_Config': """ Retrieve a configuration object by name. Used by the Flask app factory. @@ -56,111 +53,175 @@ def get_named_config(environment: 'str | None') -> '_Config': } try: print(f'Loading configuration: {environment}...') - return config_mapping[environment]() - except KeyError: - raise KeyError(f'Configuration "{environment}" not found.') + return config_mapping.get(environment or 'production', ProdConfig)() + except KeyError as e: + raise KeyError(f'Configuration "{environment}" not found.') from e + + +def env_truthy(env_var, default: Union[bool, str] = False): + """ + Return True if the environment variable is set to a truthy value. + + Accepts a default value, which is returned if the environment variable is + not set. + """ + return is_truthy(os.getenv(env_var, str(default))) class _Config(): # pylint: disable=too-few-public-methods """Base class configuration that should set reasonable defaults for all the other configurations.""" + def __init__(self) -> None: + """ + Initialize the configuration object. + + Performs more advanced configuration logic that is not possible + in the normal class definition. + """ + # If extending this class, call super().__init__() in your constructor. + print(f'SQLAlchemy URL: {self.SQLALCHEMY_DATABASE_URI}') + + # apply configs to _Config in the format that flask_jwt_oidc expects + # this flattens the JWT_CONFIG dict into individual attributes + for key, value in self.JWT_CONFIG.items(): + setattr(self, f'JWT_OIDC_{key}', value) + + # Enable live reload and interactive API debugger for developers + os.environ['FLASK_DEBUG'] = str(self.USE_DEBUG) + + @property + # pylint: disable=invalid-name + def SQLALCHEMY_DATABASE_URI(self) -> str: + """ + Dynamically fetch the SQLAlchemy Database URI based on the DB config. + + This avoids having to redefine the URI after setting the DB access + credentials in subclasses. Can be overridden by env variables. + """ + return os.environ.get( + 'SQLALCHEMY_DATABASE_URI', + f'postgresql://' + f'{self.DB_CONFIG.get("USER")}:{self.DB_CONFIG.get("PASSWORD")}@' + f'{self.DB_CONFIG.get("HOST")}:{self.DB_CONFIG.get("PORT")}/' + f'{self.DB_CONFIG.get("NAME")}' + ) + PROJECT_ROOT = os.path.abspath(os.path.dirname(__file__)) - SECRET_KEY = 'a secret' + # Used for session management. Randomized by default for security, but + # should be set to a fixed value in production to avoid invalidating sessions. + SECRET_KEY = os.getenv('SECRET_KEY', os.urandom(24)) + + # If enabled, Exceptions are propagated up, instead of being handled + # by the the app’s error handlers. Enable this for tests. + TESTING = env_truthy('FLASK_TESTING', default=False) + + # If enabled, the interactive debugger will be shown for any + # unhandled Exceptions, and the server will be reloaded when code changes. + USE_DEBUG = env_truthy('FLASK_DEBUG', default=False) + + # PostgreSQL configuration + DB_CONFIG = DB = { + 'USER': os.getenv('DATABASE_USERNAME', ''), + 'PASSWORD': os.getenv('DATABASE_PASSWORD', ''), + 'NAME': os.getenv('DATABASE_NAME', ''), + 'HOST': os.getenv('DATABASE_HOST', ''), + 'PORT': os.getenv('DATABASE_PORT', '5432'), + } - TESTING = False - DEBUG = False + # SQLAlchemy settings + # Echoes the SQL queries generated - useful for debugging + SQLALCHEMY_ECHO = env_truthy('SQLALCHEMY_ECHO') + # Disable modification tracking for performance + SQLALCHEMY_TRACK_MODIFICATIONS = env_truthy('SQLALCHEMY_TRACK_MODIFICATIONS') + + # Keycloak configuration + KEYCLOAK_CONFIG = KC = { + 'BASE_URL': os.getenv('KEYCLOAK_BASE_URL', ''), + 'REALMNAME': os.getenv('KEYCLOAK_REALMNAME', 'standard'), + 'SERVICE_ACCOUNT_ID': os.getenv('MET_ADMIN_CLIENT_ID'), + 'SERVICE_ACCOUNT_SECRET': os.getenv('MET_ADMIN_CLIENT_SECRET'), + 'ADMIN_USERNAME': os.getenv('MET_ADMIN_CLIENT_ID'), + 'ADMIN_SECRET': os.getenv('MET_ADMIN_CLIENT_SECRET'), + 'CONNECT_TIMEOUT': int(os.getenv('KEYCLOAK_CONNECT_TIMEOUT', '60')), + } - # POSTGRESQL CONFIGURATION FOR MET ANALYTICS DATABASE .Used in the Bind and Migration config - DB_USER = os.getenv('DATABASE_USERNAME', '') - DB_PASSWORD = os.getenv('DATABASE_PASSWORD', '') - DB_NAME = os.getenv('DATABASE_NAME', '') - DB_HOST = os.getenv('DATABASE_HOST', '') - DB_PORT = os.getenv('DATABASE_PORT', '5432') + # JWT OIDC Settings (for Keycloak) + JWT_CONFIG = JWT = { + 'ISSUER': ( + _issuer := os.getenv( + 'JWT_OIDC_ISSUER', + f'{KC["BASE_URL"]}/realms/{KC["REALMNAME"]}' + )), + 'WELL_KNOWN_CONFIG': os.getenv( + 'JWT_OIDC_WELL_KNOWN_CONFIG', + f'{_issuer}/.well-known/openid-configuration', + ), + 'JWKS_URI': os.getenv('JWT_OIDC_JWKS_URI', f'{_issuer}/protocol/openid-connect/certs'), + 'ALGORITHMS': os.getenv('JWT_OIDC_ALGORITHMS', 'RS256'), + 'AUDIENCE': os.getenv('JWT_OIDC_AUDIENCE', 'account'), + 'CACHING_ENABLED': str(env_truthy('JWT_OIDC_CACHING_ENABLED', True)), + 'JWKS_CACHE_TIMEOUT': int(os.getenv('JWT_OIDC_JWKS_CACHE_TIMEOUT', '300')), + 'ROLE_CLAIM': os.getenv('JWT_OIDC_ROLE_CLAIM', 'client_roles'), + } - # POSTGRESQL CONFIGURATION FOR MET MASTER DATABASE - MET_DB_USER = os.getenv('MET_DATABASE_USERNAME', '') - MET_DB_PASSWORD = os.getenv('MET_DATABASE_PASSWORD', '') - MET_DB_HOST = os.getenv('MET_DATABASE_HOST', '') - MET_DB_PORT = os.getenv('MET_DATABASE_PORT', '5432') - MET_DB_NAME = os.getenv('MET_DB_NAME', '') + # The following are the paths used in the email templates. They do not + # determine the actual paths used in the application. They are used to + # construct the links in the emails sent to users. + PATH_CONFIG = PATHS = { + 'SITE': os.getenv('SITE_URL'), + 'SUBSCRIBE': os.getenv( + 'SUBSCRIBE_PATH', '/engagements/{engagement_id}/subscribe/{token}' + ), + 'UNSUBSCRIBE': os.getenv( + 'UNSUBSCRIBE_PATH', '/engagements/{engagement_id}/unsubscribe/{participant_id}' + ), + 'ENGAGEMENT': { + 'VIEW': os.getenv('ENGAGEMENT_PATH', '/engagements/{engagement_id}/view'), + 'SLUG': os.getenv('ENGAGEMENT_PATH_SLUG', '/{slug}'), + 'DASHBOARD': os.getenv( + 'ENGAGEMENT_DASHBOARD_PATH', '/engagements/{engagement_id}/comments/public' + ), + 'DASHBOARD_SLUG': os.getenv( + 'ENGAGEMENT_DASHBOARD_PATH_SLUG', '/{slug}/comments/public' + ), + } + } - """ - Though the main data base assosiated with this microservice is MET Analytics database , its configured as a bind. - MET DB is configured as the main database since the models are used from the met-api python module and we cant change it to add bind. - So all the models in this project will have a bind. + # Templates for sending users various notifications by email. + EMAIL_TEMPLATES = { + # The time of day when engagements get closed. This should match the + # value in met-cron/cron/crontab + 'CLOSING_TIME': os.getenv('ENGAGEMENT_END_TIME', '5 PM'), + 'FROM_ADDRESS': os.getenv('EMAIL_FROM_ADDRESS'), + 'ENVIRONMENT': os.getenv('EMAIL_ENVIRONMENT'), + 'CLOSEOUT': { + 'ID': os.getenv('CLOSEOUT_EMAIL_TEMPLATE_ID'), + 'SUBJECT': os.getenv('CLOSEOUT_EMAIL_SUBJECT', + 'The public commenting period for {engagement_name} is now closed.'), + }, + 'CLOSING_SOON': { + 'ID': os.getenv('CLOSING_SOON_EMAIL_TEMPLATE_ID'), + 'SUBJECT': os.getenv('CLOSING_SOON_EMAIL_SUBJECT', + 'Public comment period closes in 2 days'), + }, + 'PUBLISH': { + 'ID': os.getenv('PUBLISH_EMAIL_TEMPLATE_ID'), + 'SUBJECT': os.getenv('PUBLISH_EMAIL_SUBJECT', 'Share your feedback'), + } + } - To handle migrations ,we need to connect to MET Analytics database.For that a new Miigration config is added. - """ + # The secret key used for encryption when sending emails to participants. + EMAIL_SECRET_KEY = os.getenv('EMAIL_SECRET_KEY', os.urandom(24)) - # MET MASTER DB. - SQLALCHEMY_DATABASE_URI = f'postgresql://{MET_DB_USER}:{MET_DB_PASSWORD}@{MET_DB_HOST}:{int(MET_DB_PORT)}/{MET_DB_NAME}' + # Single tenant environment mode - disables certain checks for user + # permissions and tenant access. When enabled, all users are assumed to + # have access to all tenants. Will probably cause bugs if enabled. + IS_SINGLE_TENANT_ENVIRONMENT = env_truthy('IS_SINGLE_TENANT_ENVIRONMENT') - # MET ANALYTICS DB. - SQLALCHEMY_BINDS = { - 'met_db_analytics': f'postgresql://{DB_USER}:{DB_PASSWORD}@{DB_HOST}:{int(DB_PORT)}/{DB_NAME}' - } - SQLALCHEMY_ECHO = False - SQLALCHEMY_TRACK_MODIFICATIONS = False - - # JWT_OIDC Settings - JWT_OIDC_WELL_KNOWN_CONFIG = os.getenv('JWT_OIDC_WELL_KNOWN_CONFIG') - JWT_OIDC_ALGORITHMS = os.getenv('JWT_OIDC_ALGORITHMS', 'RS256') - JWT_OIDC_JWKS_URI = os.getenv('JWT_OIDC_JWKS_URI') - JWT_OIDC_ISSUER = os.getenv('JWT_OIDC_ISSUER') - JWT_OIDC_AUDIENCE = os.getenv('JWT_OIDC_AUDIENCE', 'account') - JWT_OIDC_CACHING_ENABLED = os.getenv('JWT_OIDC_CACHING_ENABLED', 'True') - JWT_OIDC_JWKS_CACHE_TIMEOUT = 300 - - S3_BUCKET = os.getenv('S3_BUCKET') - S3_ACCESS_KEY_ID = os.getenv('S3_ACCESS_KEY_ID') - S3_SECRET_ACCESS_KEY = os.getenv('S3_SECRET_ACCESS_KEY') - S3_HOST = os.getenv('S3_HOST') - S3_REGION = os.getenv('S3_REGION') - S3_SERVICE = os.getenv('S3_SERVICE') - - TIME_DELTA_IN_MINUTES = os.getenv('TIME_DELTA_IN_MINUTES', 30) - - print(f'SQLAlchemy URL (_Config): {SQLALCHEMY_DATABASE_URI}') - - # Service account details - KEYCLOAK_SERVICE_ACCOUNT_ID = os.getenv('MET_ADMIN_CLIENT_ID') - KEYCLOAK_SERVICE_ACCOUNT_SECRET = os.getenv('MET_ADMIN_CLIENT_SECRET') - - # front end endpoints - SITE_URL = os.getenv('SITE_URL') - # needed for close out emails for met api - ENGAGEMENT_DASHBOARD_PATH = os.getenv('ENGAGEMENT_DASHBOARD_PATH', '/engagements/{engagement_id}/comments/public') - ENGAGEMENT_DASHBOARD_PATH_SLUG = os.getenv('ENGAGEMENT_DASHBOARD_PATH_SLUG', '/{slug}/comments/public') - # needed for publish emails for met api - ENGAGEMENT_VIEW_PATH = os.getenv('ENGAGEMENT_VIEW_PATH', '/engagements/{engagement_id}/view') - ENGAGEMENT_VIEW_PATH_SLUG = os.getenv('ENGAGEMENT_VIEW_PATH_SLUG', '/{slug}') - UNSUBSCRIBE_PATH = os.getenv('UNSUBSCRIBE_PATH', '/engagements/{engagement_id}/unsubscribe/{participant_id}') - - # The GC notify email variables - # Publish Email Service - EMAIL_SECRET_KEY = os.getenv('EMAIL_SECRET_KEY', 'secret') - PUBLISH_ENGAGEMENT_EMAIL_TEMPLATE_ID = os.getenv('PUBLISH_ENGAGEMENT_EMAIL_TEMPLATE_ID') - PUBLISH_ENGAGEMENT_EMAIL_SUBJECT = os.getenv('PUBLISH_ENGAGEMENT_EMAIL_SUBJECT', 'Share your feedback') - - # EAO is a single Tenant Environment where EAO is the only env and should be set to True - # This flag decides if additonal tenant based checks has to be carried or not - IS_SINGLE_TENANT_ENVIRONMENT = os.getenv('IS_SINGLE_TENANT_ENVIRONMENT', 'False').lower() == 'true' - - # Closing Soon Email Service - ENGAGEMENT_CLOSING_SOON_EMAIL_TEMPLATE_ID = os.getenv('ENGAGEMENT_CLOSING_SOON_EMAIL_TEMPLATE_ID') - ENGAGEMENT_CLOSING_SOON_EMAIL_SUBJECT = os.getenv('ENGAGEMENT_CLOSING_SOON_EMAIL_SUBJECT', - 'Public comment period closes in 2 days') - - # Email Service - ENGAGEMENT_CLOSEOUT_EMAIL_TEMPLATE_ID = os.getenv('ENGAGEMENT_CLOSEOUT_EMAIL_TEMPLATE_ID') - ENGAGEMENT_CLOSEOUT_EMAIL_SUBJECT = \ - os.getenv('ENGAGEMENT_CLOSEOUT_EMAIL_SUBJECT', '{engagement_name} - What we heard') + # The API endpoint used to send emails to participants. NOTIFICATIONS_EMAIL_ENDPOINT = os.getenv('NOTIFICATIONS_EMAIL_ENDPOINT') - # Environment from which email is sent - EMAIL_ENVIRONMENT = os.getenv('EMAIL_ENVIRONMENT', '') - # config for comment_redact_service N_DAYS = os.getenv('N_DAYS', 14) REDACTION_TEXT = os.getenv('REDACTION_TEXT', '[Comment Redacted]') @@ -174,54 +235,63 @@ class _Config(): # pylint: disable=too-few-public-methods class MigrationConfig(): # pylint: disable=too-few-public-methods """Base class configuration that should set reasonable defaults for all the other configurations.""" - PROJECT_ROOT = os.path.abspath(os.path.dirname(__file__)) - - SECRET_KEY = 'a secret' - - TESTING = False - DEBUG = False - - # Migration connects to the MET Analytics database - DB_USER = os.getenv('DATABASE_USERNAME', '') - DB_PASSWORD = os.getenv('DATABASE_PASSWORD', '') - DB_NAME = os.getenv('DATABASE_NAME', '') - DB_HOST = os.getenv('DATABASE_HOST', '') - DB_PORT = os.getenv('DATABASE_PORT', '5432') - SQLALCHEMY_DATABASE_URI = f'postgresql://{DB_USER}:{DB_PASSWORD}@{DB_HOST}:{int(DB_PORT)}/{DB_NAME}' - SQLALCHEMY_ECHO = True - SQLALCHEMY_TRACK_MODIFICATIONS = True - - print(f'SQLAlchemy URL (_Config): {SQLALCHEMY_DATABASE_URI}') + # SQLAlchemy settings + # Echoes the SQL queries generated - useful for debugging + SQLALCHEMY_ECHO = env_truthy('SQLALCHEMY_ECHO', True) + # Disable modification tracking for performance + SQLALCHEMY_TRACK_MODIFICATIONS = env_truthy('SQLALCHEMY_TRACK_MODIFICATIONS', True) class DevConfig(_Config): # pylint: disable=too-few-public-methods """Dev Config.""" - TESTING = False - DEBUG = True - print(f'SQLAlchemy URL (DevConfig): {_Config.SQLALCHEMY_DATABASE_URI}') + # Default to using the debugger for development + USE_DEBUG = env_truthy('USE_DEBUG', True) class TestConfig(_Config): # pylint: disable=too-few-public-methods """In support of testing only.used by the py.test suite.""" - DEBUG = True - TESTING = True - DEBUG = True - TESTING = True - # POSTGRESQL - DB_USER = os.getenv('DATABASE_TEST_USERNAME', 'postgres') - DB_PASSWORD = os.getenv('DATABASE_TEST_PASSWORD', 'postgres') - DB_NAME = os.getenv('DATABASE_TEST_NAME', 'postgres') - DB_HOST = os.getenv('DATABASE_TEST_HOST', 'localhost') - DB_PORT = os.getenv('DATABASE_TEST_PORT', '54334') - MET_DB_PORT = os.getenv('DATABASE_TEST_PORT', '54333') - SQLALCHEMY_DATABASE_URI = os.getenv('DATABASE_TEST_URL', - f'postgresql://{DB_USER}:{DB_PASSWORD}@{DB_HOST}:{int(DB_PORT)}/{DB_NAME}') + def __init__(self) -> None: + """ + Initialize the object. + + This method is called when an object is created. It sets up the initial + state of the object. + + """ + super().__init__() + + # Override Keycloak variables here + self.KC['ADMIN_USERNAME'] = os.getenv( + 'KEYCLOAK_TEST_ADMIN_CLIENTID', + self.KC['ADMIN_USERNAME'] + ) + self.KC['ADMIN_SECRET'] = os.getenv( + 'KEYCLOAK_TEST_ADMIN_SECRET', + self.KC['ADMIN_SECRET'] + ) + self.KC['BASE_URL'] = os.getenv('KEYCLOAK_TEST_BASE_URL', self.KC['BASE_URL']) + self.KC['REALMNAME'] = os.getenv('KEYCLOAK_TEST_REALMNAME', self.KC['REALMNAME']) + + # Propagate exceptions up to the test runner + TESTING = env_truthy('FLASK_TESTING', default=True) + # explicitly disable the debugger; we want the tests to fail if an + # unhandled exception occurs + USE_DEBUG = False + + # Override the DB config to use the test database, if one is configured + DB_CONFIG = { + 'USER': os.getenv('DATABASE_TEST_USERNAME', _Config.DB.get('USER')), + 'PASSWORD': os.getenv('DATABASE_TEST_PASSWORD', _Config.DB.get('PASSWORD')), + 'NAME': os.getenv('DATABASE_TEST_NAME', _Config.DB.get('NAME')), + 'HOST': os.getenv('DATABASE_TEST_HOST', _Config.DB.get('HOST')), + 'PORT': os.getenv('DATABASE_TEST_PORT', _Config.DB.get('PORT')), + } # JWT OIDC settings - # JWT_OIDC_TEST_MODE will set jwt_manager to use - JWT_OIDC_TEST_MODE = True + # JWT OIDC Settings for the test environment + JWT_OIDC_TEST_MODE = True # enables the test mode for flask_jwt_oidc JWT_OIDC_TEST_AUDIENCE = os.getenv('JWT_OIDC_TEST_AUDIENCE') JWT_OIDC_TEST_CLIENT_SECRET = os.getenv('JWT_OIDC_TEST_CLIENT_SECRET') JWT_OIDC_TEST_ISSUER = os.getenv('JWT_OIDC_TEST_ISSUER') @@ -279,26 +349,10 @@ class TestConfig(_Config): # pylint: disable=too-few-public-methods 4H8UZcVFN95vEKxJiLRjAmj6g273pu9kK4ymXNEjWWJn -----END RSA PRIVATE KEY-----""" - KEYCLOAK_ADMIN_USERNAME = KEYCLOAK_BCROS_ADMIN_CLIENTID = os.getenv('KEYCLOAK_TEST_ADMIN_CLIENTID') - KEYCLOAK_ADMIN_SECRET = KEYCLOAK_BCROS_ADMIN_SECRET = os.getenv('KEYCLOAK_TEST_ADMIN_SECRET') - KEYCLOAK_BASE_URL = KEYCLOAK_BCROS_BASE_URL = os.getenv('KEYCLOAK_TEST_BASE_URL') - KEYCLOAK_REALMNAME = KEYCLOAK_BCROS_REALMNAME = os.getenv('KEYCLOAK_TEST_REALMNAME') JWT_OIDC_AUDIENCE = os.getenv('JWT_OIDC_TEST_AUDIENCE') JWT_OIDC_CLIENT_SECRET = os.getenv('JWT_OIDC_TEST_CLIENT_SECRET') JWT_OIDC_ISSUER = os.getenv('JWT_OIDC_TEST_ISSUER') - # Service account details - KEYCLOAK_SERVICE_ACCOUNT_ID = os.getenv('KEYCLOAK_TEST_ADMIN_CLIENTID') - KEYCLOAK_SERVICE_ACCOUNT_SECRET = os.getenv('KEYCLOAK_TEST_ADMIN_SECRET') - - # Legal-API URL - LEGAL_API_URL = 'https://mock-auth-tools.pathfinder.gov.bc.ca/rest/legal-api/2.7/api/v1' - - NOTIFY_API_URL = 'http://localhost:8080/notify-api/api/v1' - BCOL_API_URL = 'http://localhost:8080/bcol-api/api/v1' - PAY_API_URL = 'http://localhost:8080/pay-api/api/v1' - PAY_API_SANDBOX_URL = 'http://localhost:8080/pay-api/api/v1' - # If any value is present in this flag, starts up a keycloak docker USE_TEST_KEYCLOAK_DOCKER = os.getenv('USE_TEST_KEYCLOAK_DOCKER', None) USE_DOCKER_MOCK = os.getenv('USE_DOCKER_MOCK', None) @@ -307,25 +361,15 @@ class TestConfig(_Config): # pylint: disable=too-few-public-methods class DockerConfig(_Config): # pylint: disable=too-few-public-methods """In support of testing only.used by the py.test suite.""" - # POSTGRESQL - DB_USER = os.getenv('DATABASE_DOCKER_USERNAME') - DB_PASSWORD = os.getenv('DATABASE_DOCKER_PASSWORD') - DB_NAME = os.getenv('DATABASE_DOCKER_NAME') - DB_HOST = os.getenv('DATABASE_DOCKER_HOST') - DB_PORT = os.getenv('DATABASE_DOCKER_PORT', '5432') - SQLALCHEMY_DATABASE_URI = f'postgresql://{DB_USER}:{DB_PASSWORD}@{DB_HOST}:{int(DB_PORT)}/{DB_NAME}' - - print(f'SQLAlchemy URL (Docker): {SQLALCHEMY_DATABASE_URI}') + # Override DB config to use the docker database, if one is configured + DB_CONFIG = { + 'USER': os.getenv('DATABASE_DOCKER_USERNAME', _Config.DB.get('USER')), + 'PASSWORD': os.getenv('DATABASE_DOCKER_PASSWORD', _Config.DB.get('PASSWORD')), + 'NAME': os.getenv('DATABASE_DOCKER_NAME', _Config.DB.get('NAME')), + 'HOST': os.getenv('DATABASE_DOCKER_HOST', _Config.DB.get('HOST')), + 'PORT': os.getenv('DATABASE_DOCKER_PORT', _Config.DB.get('PORT')), + } class ProdConfig(_Config): # pylint: disable=too-few-public-methods """Production Config.""" - - SECRET_KEY = os.getenv('SECRET_KEY', None) - - if not SECRET_KEY: - SECRET_KEY = os.urandom(24) - print('WARNING: SECRET_KEY being set as a one-shot', file=sys.stderr) - - TESTING = False - DEBUG = False diff --git a/met-cron/sample.env b/met-cron/sample.env index f3f59d9d1..e245702b5 100644 --- a/met-cron/sample.env +++ b/met-cron/sample.env @@ -1,20 +1,96 @@ +# GDX MET CRON Configuration +# For more information on these values, please see the documentation +# or met-cron/config.py -DATABASE_USERNAME=analytics -DATABASE_PASSWORD=analytics -DATABASE_NAME=met -DATABASE_HOST=localhost -DATABASE_PORT=5432 +# Changes Flask's run mode and the set of env vars are used to configure the app. You should not need to change this here. +FLASK_ENV=development -MET_DATABASE_USERNAME=met -MET_DATABASE_PASSWORD=met -MET_DB_NAME=met -MET_DATABASE_HOST=localhost -MET_DATABASE_PORT=5432 +USE_DEBUG=True # Enable a dev-friendly debug mode +TESTING= # Handle errors normally (False) or raise exceptions (True) +# Miscellaneous Settings +SECRET_KEY="" # For Flask sessions. If unset, this value is randomized +IS_SINGLE_TENANT_ENVIRONMENT=false +USE_TEST_KEYCLOAK_DOCKER=false +USE_DOCKER_MOCK=false +ENGAGEMENT_END_TIME="5 PM" -MET_ADMIN_CLIENT_ID=met-admin -MET_ADMIN_CLIENT_SECRET= -JWT_OIDC_ISSUER=https://localhost:8080/auth/realms/met +# Keycloak configuration. +# Populate from 'GDX Modern Engagement Tools-installation-*.json' +# https://bcgov.github.io/sso-requests +KEYCLOAK_BASE_URL="" # auth-server-url +KEYCLOAK_REALMNAME="" # realm +MET_ADMIN_CLIENT_ID="" # resource +MET_ADMIN_CLIENT_SECRET="" # credentials.secret +KEYCLOAK_CONNECT_TIMEOUT="60" + +# JWT OIDC configuration for authentication +# Populate from 'GDX MET web (public)-installation-*.json' +JWT_OIDC_AUDIENCE="" # resource +JWT_OIDC_ISSUER="" # default: constructed from base url and realm name +JWT_OIDC_WELL_KNOWN_CONFIG="" # default: constructed from issuer +JWT_OIDC_JWKS_URI="" # default: constructed from issuer +# Object path to access roles from JWT token +JWT_OIDC_ROLE_CLAIM=client_roles # Keycloak schema +JWT_OIDC_CACHING_ENABLED=true # Enable caching of JWKS. +JWT_OIDC_JWKS_CACHE_TIMEOUT=300 # Timeout for JWKS cache in seconds. + +# Database Configuration +DATABASE_HOST="localhost" +DATABASE_PORT="5432" +DATABASE_USERNAME="postgres" +DATABASE_PASSWORD="postgres" +DATABASE_NAME="met" +#Default: set from above settings (this overrides them) +SQLALCHEMY_DATABASE_URI= +SQLALCHEMY_ECHO= +SQLALCHEMY_TRACK_MODIFICATIONS= + +# Email API Configuration +NOTIFICATIONS_EMAIL_ENDPOINT=https://met-notify-api-dev.apps.gold.devops.gov.bc.ca/api/v1/notifications/email +EMAIL_SECRET_KEY="notASecureKey" # If unset, this value is randomized +EMAIL_ENVIRONMENT= +EMAIL_FROM_ADDRESS="met-example@gov.bc.ca" + +# Email Template Configuration +# Default values for subject lines are provided as a reasonable starting point. +# If you need to customize email subjects, kindly update the relevant values in the subject lines. +CLOSEOUT_EMAIL_TEMPLATE_ID= +CLOSEOUT_EMAIL_SUBJECT= +CLOSING_SOON_EMAIL_TEMPLATE_ID= +CLOSING_SOON_EMAIL_SUBJECT= +PUBLISH_EMAIL_TEMPLATE_ID= +PUBLISH_EMAIL_SUBJECT= + +# Site paths for creating emails from templates SITE_URL=http://localhost:3000 -NOTIFICATIONS_EMAIL_ENDPOINT=https://localhost:5002/api/v1/notifications/email -ENGAGEMENT_CLOSEOUT_EMAIL_TEMPLATE_ID=b7ea041b-fc30-4ad3-acb2-82119dd4f95d \ No newline at end of file +SUBSCRIBE_PATH=/engagements/{engagement_id}/subscribe/{token} +UNSUBSCRIBE_PATH=/engagements/{engagement_id}/unsubscribe/{participant_id} +ENGAGEMENT_PATH=/engagements/{engagement_id}/view +ENGAGEMENT_PATH_SLUG=/{slug} +ENGAGEMENT_DASHBOARD_PATH=/engagements/{engagement_id}/comments/public +ENGAGEMENT_DASHBOARD_PATH_SLUG=/{slug}/comments/public + +JWT_OIDC_TEST_AUDIENCE=met-web +JWT_OIDC_TEST_CLIENT_SECRET="1111111111" +JWT_OIDC_TEST_ISSUER=localhost.localdomain +JWT_OIDC_TEST_ALGORITHMS=RS256 + +# Test database settings +# If unset, uses the same settings as the main database +DATABASE_TEST_USERNAME= +DATABASE_TEST_PASSWORD= +DATABASE_TEST_NAME= +DATABASE_TEST_HOST= +DATABASE_TEST_PORT= + +# A keycloak server is started automatically by Pytest; there is no need to start your own instance. +KEYCLOAK_TEST_BASE_URL="http://localhost:8081/auth" + +# Docker database settings +# If unset, uses the same settings as the main database +DATABASE_DOCKER_USERNAME= +DATABASE_DOCKER_PASSWORD= +DATABASE_DOCKER_NAME= +DATABASE_DOCKER_HOST= +DATABASE_DOCKER_PORT= \ No newline at end of file diff --git a/met-cron/src/met_cron/services/closing_soon_mail_service.py b/met-cron/src/met_cron/services/closing_soon_mail_service.py index db6cbc409..e798fece0 100644 --- a/met-cron/src/met_cron/services/closing_soon_mail_service.py +++ b/met-cron/src/met_cron/services/closing_soon_mail_service.py @@ -24,8 +24,9 @@ def do_mail(): """ offset_days: int = int(current_app.config.get('OFFSET_DAYS')) engagements_closing_soon = ClosingSoonEmailService.get_engagements_closing_soon(offset_days) - template_id = current_app.config.get('ENGAGEMENT_CLOSING_SOON_EMAIL_TEMPLATE_ID', None) - subject = current_app.config.get('ENGAGEMENT_CLOSING_SOON_EMAIL_SUBJECT') + templates = current_app.config['EMAIL_TEMPLATES'] + template_id = templates['CLOSING_SOON']['ID'] + subject = templates['CLOSING_SOON']['SUBJECT'] template = Template.get_template('engagement_closing_soon.html') for engagement in engagements_closing_soon: # Process each mails.First set status as PROCESSING diff --git a/met-cron/src/met_cron/services/comment_redact_service.py b/met-cron/src/met_cron/services/comment_redact_service.py index 2b78da15e..816091282 100644 --- a/met-cron/src/met_cron/services/comment_redact_service.py +++ b/met-cron/src/met_cron/services/comment_redact_service.py @@ -25,9 +25,10 @@ def do_redact_comments(): 3. Redact comments in submission_json by submission_ids """ - submissions = CommentRedactService._find_submissions_for_n_days_closed_engagements(days=current_app.config.get('N_DAYS', 14)) + n_days: int = int(current_app.config.get('N_DAYS')) + submissions = CommentRedactService._find_submissions_for_n_days_closed_engagements(days=current_app.config.get(n_days, 14)) if not submissions: - current_app.logger.info(f'>>>>>No Submissions for Engagements closed for {current_app.config.get("N_DAYS", 14)} days found.') + current_app.logger.info(f'>>>>>No Submissions for Engagements closed for {current_app.config.get(n_days, 14)} days found.') return current_app.logger.info('>>>>>Total Submissions to redact found: %s.', len(submissions)) submissions_ids = [submission.id for submission in submissions] diff --git a/met-cron/src/met_cron/services/mail_service.py b/met-cron/src/met_cron/services/mail_service.py index fda052eb0..1fe7d4dd6 100644 --- a/met-cron/src/met_cron/services/mail_service.py +++ b/met-cron/src/met_cron/services/mail_service.py @@ -6,7 +6,7 @@ from met_api.exceptions.business_exception import BusinessException from met_api.models import Tenant as TenantModel from met_api.models.engagement import Engagement as EngagementModel -from met_api.models.engagement_metadata import EngagementMetadataModel +# from met_api.models.engagement_metadata import EngagementMetadataModel from met_api.models.participant import Participant as ParticipantModel from met_api.models.subscription import Subscription as SubscriptionModel from met_api.services.email_verification_service import EmailVerificationService @@ -53,17 +53,19 @@ def _send_email_notification_for_subscription(engagement_id, template_id, subjec def _render_email_template(engagement, participant, template): site_url = notification.get_tenant_site_url(engagement.tenant_id) tenant_name = EmailService._get_tenant_name(engagement.tenant_id) - metadata_model: EngagementMetadataModel = EngagementMetadataModel.find_by_id(engagement.id) - project_name = None - if metadata_model and 'project_name' in metadata_model.project_metadata: - project_name = metadata_model.project_metadata.get('project_name') + # TODO should be re-visited once the engagement metadata functionality of completed + # metadata_model: EngagementMetadataModel = EngagementMetadataModel.find_by_id(engagement.id) + # project_name = None + # if metadata_model and 'project_name' in metadata_model.project_metadata: + # project_name = metadata_model.project_metadata.get('project_name') paths = current_app.config['PATH_CONFIG'] view_path = paths['ENGAGEMENT']['VIEW'].format(engagement_id=engagement.id) unsubscribe_url = paths['UNSUBSCRIBE'].format( engagement_id=engagement.id, participant_id=participant.id) email_environment = current_app.config['EMAIL_TEMPLATES']['ENVIRONMENT'] + # TODO should be re-visited once the engagement metadata functionality of completed args = { - 'project_name': project_name if project_name else engagement.name, + 'project_name': engagement.name, 'survey_url': f'{site_url}{view_path}', 'end_date': datetime.strftime(engagement.end_date, EmailVerificationService.full_date_format), 'tenant_name': tenant_name, diff --git a/met-cron/src/met_cron/services/publish_mail_service.py b/met-cron/src/met_cron/services/publish_mail_service.py index bfc141a97..891e42f73 100644 --- a/met-cron/src/met_cron/services/publish_mail_service.py +++ b/met-cron/src/met_cron/services/publish_mail_service.py @@ -21,8 +21,9 @@ def do_mail(): email_batch_size: int = int(current_app.config.get('MAIL_BATCH_SIZE')) mails = EmailQueueModel.get_unprocessed_mails_for_open_engagements(email_batch_size) mail: EmailQueueModel - template_id = current_app.config.get('PUBLISH_ENGAGEMENT_EMAIL_TEMPLATE_ID', None) - subject = current_app.config.get('PUBLISH_ENGAGEMENT_EMAIL_SUBJECT') + templates = current_app.config['EMAIL_TEMPLATES'] + template_id = templates['PUBLISH']['ID'] + subject = templates['PUBLISH']['SUBJECT'] template = Template.get_template('publish_engagement.html') for mail in mails: # Process each mails.First set status as PROCESSING diff --git a/met-cron/src/met_cron/utils/subscription_checker.py b/met-cron/src/met_cron/utils/subscription_checker.py index 927b1629b..d9698b617 100644 --- a/met-cron/src/met_cron/utils/subscription_checker.py +++ b/met-cron/src/met_cron/utils/subscription_checker.py @@ -17,7 +17,7 @@ A simple decorator to check the subscription for a user. """ from met_api.constants.subscription_type import SubscriptionType -from met_api.models.engagement_metadata import EngagementMetadataModel +# from met_api.models.engagement_metadata import EngagementMetadataModel class CheckSubscription: """Template helper class.""" @@ -34,8 +34,11 @@ def check_subscription(subscriber, engagement_id): if subscriber.type == SubscriptionType.TENANT.value: return True elif subscriber.type == SubscriptionType.PROJECT.value: - engagement_metadata: EngagementMetadataModel = EngagementMetadataModel.find_by_id(engagement_id) - if subscriber.project_id == engagement_metadata.project_id: + # TODO should be re-visited once the engagement metadata functionality of completed + # engagement_metadata: EngagementMetadataModel = EngagementMetadataModel.find_by_id(engagement_id) + # if subscriber.project_id == engagement_metadata.project_id: + # return True + if subscriber.engagement_id == engagement_id: return True elif subscriber.type == SubscriptionType.ENGAGEMENT.value: if subscriber.engagement_id == engagement_id: From 23faaae48701f1fdf73d3dfcbebbf59a87ab8f9c Mon Sep 17 00:00:00 2001 From: Ratheesh kumar R <108045773+ratheesh-aot@users.noreply.github.com> Date: Wed, 7 Feb 2024 11:52:24 -0800 Subject: [PATCH 02/42] DESENG-447 : Convert keycloak groups to composite roles for permission levels (#2376) * DESENG-447 Removed references to EAO in groups, remove group check for AuthGate * DESENG-447 Remove or comment out references to groups * DESENG-447: Commented out checks related to groups * DESENG-447: Fixing linting issues and unit test * Updated Changelog * Removed console.log statements * Removed console.log --------- Co-authored-by: Alex --- CHANGELOG.MD | 6 + met-api/.DS_Store | Bin 6148 -> 6148 bytes met-api/src/met_api/models/membership.py | 2 +- .../met_api/resources/engagement_members.py | 23 +- met-api/src/met_api/resources/staff_user.py | 42 +- met-api/src/met_api/services/authorization.py | 6 +- met-api/src/met_api/services/keycloak.py | 276 ++++++------ .../met_api/services/membership_service.py | 141 +++--- .../services/staff_user_membership_service.py | 29 +- .../met_api/services/staff_user_service.py | 86 ++-- .../src/met_api/services/tenant_service.py | 2 +- met-api/src/met_api/utils/constants.py | 32 +- met-api/src/met_api/utils/enums.py | 24 +- .../unit/api/test_engagement_membership.py | 284 ++++++------ met-api/tests/unit/api/test_user.py | 414 ++++++++++-------- .../tests/unit/api/test_user_membership.py | 163 +++---- met-api/tests/unit/services/test_keycloak.py | 30 +- .../UserManagement/AddTeamMemberModal.tsx | 2 +- .../listing/UserManagementContext.tsx | 2 +- .../userDetails/UserDetailsContext.tsx | 2 +- met-web/src/models/user.ts | 10 +- met-web/src/routes/AuthGate.tsx | 3 +- .../src/services/userService/api/index.tsx | 4 +- 23 files changed, 793 insertions(+), 790 deletions(-) diff --git a/CHANGELOG.MD b/CHANGELOG.MD index 30f93e860..80c4e5899 100644 --- a/CHANGELOG.MD +++ b/CHANGELOG.MD @@ -1,3 +1,9 @@ +## February 06, 2024 +- **Task**Convert keycloak groups to composite roles for permission levels [DESENG-447](https://apps.itsm.gov.bc.ca/jira/browse/DESENG-447) + - Commented out unit test related to Keycloak groups + - Changed reference of Keycloak `groups` to `roles` + - Commented out code related to Keycloak groups + ## February 06, 2024 - **Task** Streamline CRON jobs [DESENG-493](https://apps.itsm.gov.bc.ca/jira/browse/DESENG-493) - Aligned the CRON configuration and sample environment files with the structure used in the Met API. diff --git a/met-api/.DS_Store b/met-api/.DS_Store index f21bee2e693e8d55ffb72c2c40a29d85d54da0f4..9b9604455f160d33a60e78e5ac0060f028193a31 100644 GIT binary patch delta 81 zcmZoMXfc=|#>CJ*u~2NHo}wrd0|Nsi1A_nqgD-B`mu~2NHo}w@_0|Nsi1A_oVesWSyeiD!;Fqx5Yr8-EQnZc2vfT6%M y2O$R(bp&F)|6sttu(9VV+h%qSeh#3Cn;99uGf(ChG2{Rm$j|`9n>|GKFarR&CmPNG diff --git a/met-api/src/met_api/models/membership.py b/met-api/src/met_api/models/membership.py index 812f58d90..37a2b0167 100644 --- a/met-api/src/met_api/models/membership.py +++ b/met-api/src/met_api/models/membership.py @@ -93,7 +93,7 @@ def find_by_user_id( @classmethod def find_by_engagement_and_user_id(cls, eng_id, userid, status=None) \ -> Membership: - """Get a survey.""" + """Get a membership by engagement and user ID.""" query = db.session.query(Membership) \ .join(StaffUser, StaffUser.id == Membership.user_id) \ .filter(and_(Membership.engagement_id == eng_id, diff --git a/met-api/src/met_api/resources/engagement_members.py b/met-api/src/met_api/resources/engagement_members.py index c827568a8..4939c76fc 100644 --- a/met-api/src/met_api/resources/engagement_members.py +++ b/met-api/src/met_api/resources/engagement_members.py @@ -48,17 +48,18 @@ def get(engagement_id): except BusinessException as err: return {'message': err.error}, err.status_code - @staticmethod - @cross_origin(origins=allowedorigins()) - @_jwt.requires_auth - def post(engagement_id): - """Create a new membership.""" - # TODO validate against a schema. - try: - member = MembershipService.create_membership(engagement_id, request.get_json()) - return MembershipSchema().dump(member), HTTPStatus.OK - except BusinessException as err: - return {'message': err.error}, err.status_code + # TODO: Create membership method that uses composite roles + # @staticmethod + # @cross_origin(origins=allowedorigins()) + # @_jwt.requires_auth + # def post(engagement_id): + # """Create a new membership.""" + # # TODO validate against a schema. + # try: + # member = MembershipService.create_membership(engagement_id, request.get_json()) + # return MembershipSchema().dump(member), HTTPStatus.OK + # except BusinessException as err: + # return {'message': err.error}, err.status_code @cors_preflight('GET,OPTIONS') diff --git a/met-api/src/met_api/resources/staff_user.py b/met-api/src/met_api/resources/staff_user.py index a4947bf30..bacaeafbf 100644 --- a/met-api/src/met_api/resources/staff_user.py +++ b/met-api/src/met_api/resources/staff_user.py @@ -72,7 +72,7 @@ def get(): users = StaffUserService.find_users( pagination_options=pagination_options, search_text=args.get('search_text', '', str), - include_groups=args.get('include_groups', default=False, type=lambda v: v.lower() == 'true'), + include_roles=args.get('include_roles', default=False, type=lambda v: v.lower() == 'true'), include_inactive=args.get('include_inactive', default=False, type=lambda v: v.lower() == 'true') ) return jsonify(users), HTTPStatus.OK @@ -91,7 +91,7 @@ def get(user_id): args = request.args user = StaffUserService.get_user_by_id( user_id, - include_groups=args.get('include_groups', default=False, type=lambda v: v.lower() == 'true'), + include_roles=args.get('include_roles', default=False, type=lambda v: v.lower() == 'true'), include_inactive=True, ) return user, HTTPStatus.OK @@ -121,44 +121,6 @@ def patch(user_id): return str(err), HTTPStatus.BAD_REQUEST -@cors_preflight('POST, PUT') -@API.route('//groups') -class UserGroup(Resource): - """Add user to group.""" - - @staticmethod - @cross_origin(origins=allowedorigins()) - @require_role([Role.CREATE_ADMIN_USER.value], skip_tenant_check_for_admin=True) - def post(user_id): - """Add user to group.""" - try: - args = request.args - user_schema = StaffUserService().add_user_to_group(user_id, args.get('group')) - return user_schema, HTTPStatus.OK - except KeyError as err: - return str(err), HTTPStatus.INTERNAL_SERVER_ERROR - except ValueError as err: - return str(err), HTTPStatus.INTERNAL_SERVER_ERROR - except BusinessException as err: - return {'message': err.error}, err.status_code - - @staticmethod - @cross_origin(origins=allowedorigins()) - @_jwt.has_one_of_roles([Role.UPDATE_USER_GROUP.value]) - def put(user_id): - """Update user group.""" - try: - args = request.args - user_schema = StaffUserMembershipService().reassign_user(user_id, args.get('group')) - return user_schema, HTTPStatus.OK - except KeyError as err: - return str(err), HTTPStatus.INTERNAL_SERVER_ERROR - except ValueError as err: - return str(err), HTTPStatus.INTERNAL_SERVER_ERROR - except BusinessException as err: - return {'message': err.error}, err.status_code - - @cors_preflight('GET,OPTIONS') @API.route('//engagements') class EngagementMemberships(Resource): diff --git a/met-api/src/met_api/services/authorization.py b/met-api/src/met_api/services/authorization.py index e3d1862be..bce03218c 100644 --- a/met-api/src/met_api/services/authorization.py +++ b/met-api/src/met_api/services/authorization.py @@ -31,14 +31,15 @@ def check_auth(**kwargs): has_valid_roles = token_roles & permitted_roles if has_valid_roles: if not skip_tenant_check: + user_tenant_id = user_from_db.tenant_id _validate_tenant(kwargs.get('engagement_id'), user_tenant_id) return - team_permitted_roles = {MembershipType.TEAM_MEMBER.name, MembershipType.REVIEWER.name} & permitted_roles if team_permitted_roles: # check if he is a member of particular engagement. + has_valid_team_access = _has_team_membership(kwargs, user_from_context, team_permitted_roles) if has_valid_team_access: return @@ -63,16 +64,19 @@ def _has_team_membership(kwargs, user_from_context, team_permitted_roles) -> boo eng_id = kwargs.get('engagement_id') if not eng_id: + return False user = StaffUserModel.get_user_by_external_id(user_from_context.sub) if not user: + return False membership = MembershipModel.find_by_engagement_and_user_id(eng_id, user.id, status=MembershipStatus.ACTIVE.value) if not membership: + return False skip_tenant_check = current_app.config.get('IS_SINGLE_TENANT_ENVIRONMENT') diff --git a/met-api/src/met_api/services/keycloak.py b/met-api/src/met_api/services/keycloak.py index 0e4bf3fe4..7ab9e0305 100644 --- a/met-api/src/met_api/services/keycloak.py +++ b/met-api/src/met_api/services/keycloak.py @@ -14,7 +14,6 @@ """Utils for keycloak administration.""" import json -from typing import List import requests from flask import current_app @@ -25,80 +24,80 @@ class KeycloakService: # pylint: disable=too-few-public-methods """Keycloak services.""" - @staticmethod - def get_user_groups(user_id): - """Get user group from Keycloak by userid.""" - keycloak = current_app.config['KEYCLOAK_CONFIG'] - timeout = keycloak['CONNECT_TIMEOUT'] - base_url = keycloak['BASE_URL'] - realm = keycloak['REALMNAME'] - admin_token = KeycloakService._get_admin_token() - headers = { - 'Content-Type': ContentType.JSON.value, - 'Authorization': f'Bearer {admin_token}' - } - - # Get the user and return - query_user_url = f'{base_url}/admin/realms/{realm}/users/{user_id}/groups' - response = requests.get(query_user_url, headers=headers, timeout=timeout) - response.raise_for_status() - return response.json() - - @staticmethod - def get_users_groups(user_ids: List): - """Get user groups from Keycloak by user ids.For bulk purposes.""" - # TODO if List is bigger than a number ; if so reject. - keycloak = current_app.config['KEYCLOAK_CONFIG'] - base_url = keycloak['BASE_URL'] - # TODO fix this during tests and remove below - if not base_url: - return {} - keycloak = current_app.config['KEYCLOAK_CONFIG'] - realm = keycloak['REALMNAME'] - timeout = keycloak['CONNECT_TIMEOUT'] - admin_token = KeycloakService._get_admin_token() - headers = { - 'Content-Type': ContentType.JSON.value, - 'Authorization': f'Bearer {admin_token}' - } - user_group_mapping = {} - # Get the user and return - for user_id in user_ids: - query_user_url = f'{base_url}/admin/realms/{realm}/users/{user_id}/groups' - response = requests.get(query_user_url, headers=headers, timeout=timeout) - - if response.status_code == 200: - if (groups := response.json()) is not None: - user_group_mapping[user_id] = [group.get('name') for group in groups] - else: - user_group_mapping[user_id] = [] - - return user_group_mapping - - @staticmethod - def _get_group_id(admin_token: str, group_name: str): - """Get a group id for the group name.""" - keycloak = current_app.config['KEYCLOAK_CONFIG'] - base_url = keycloak['BASE_URL'] - realm = keycloak['REALMNAME'] - timeout = keycloak['CONNECT_TIMEOUT'] - get_group_url = f'{base_url}/admin/realms/{realm}/groups?search={group_name}' - headers = { - 'Content-Type': ContentType.JSON.value, - 'Authorization': f'Bearer {admin_token}' - } - response = requests.get(get_group_url, headers=headers, timeout=timeout) - return KeycloakService._find_group_or_subgroup_id(response.json(), group_name) - - @staticmethod - def _find_group_or_subgroup_id(groups: list, group_name: str): - """Return group id by searching main and sub groups.""" - for group in groups: - if group['name'] == group_name: - return group['id'] - if group_id := KeycloakService._find_group_or_subgroup_id(group['subGroups'], group_name): - return group_id - return None + # @staticmethod + # def get_user_groups(user_id): + # """Get user group from Keycloak by userid.""" + # keycloak = current_app.config['KEYCLOAK_CONFIG'] + # timeout = keycloak['CONNECT_TIMEOUT'] + # base_url = keycloak['BASE_URL'] + # realm = keycloak['REALMNAME'] + # admin_token = KeycloakService._get_admin_token() + # headers = { + # 'Content-Type': ContentType.JSON.value, + # 'Authorization': f'Bearer {admin_token}' + # } + + # # Get the user and return + # query_user_url = f'{base_url}/admin/realms/{realm}/users/{user_id}/groups' + # response = requests.get(query_user_url, headers=headers, timeout=timeout) + # response.raise_for_status() + # return response.json() + + # @staticmethod + # def get_users_groups(user_ids: List): + # """Get user groups from Keycloak by user ids.For bulk purposes.""" + # # TODO if List is bigger than a number ; if so reject. + # keycloak = current_app.config['KEYCLOAK_CONFIG'] + # base_url = keycloak['BASE_URL'] + # # TODO fix this during tests and remove below + # if not base_url: + # return {} + # keycloak = current_app.config['KEYCLOAK_CONFIG'] + # realm = keycloak['REALMNAME'] + # timeout = keycloak['CONNECT_TIMEOUT'] + # admin_token = KeycloakService._get_admin_token() + # headers = { + # 'Content-Type': ContentType.JSON.value, + # 'Authorization': f'Bearer {admin_token}' + # } + # user_group_mapping = {} + # # Get the user and return + # for user_id in user_ids: + # query_user_url = f'{base_url}/admin/realms/{realm}/users/{user_id}/groups' + # response = requests.get(query_user_url, headers=headers, timeout=timeout) + + # if response.status_code == 200: + # if (groups := response.json()) is not None: + # user_group_mapping[user_id] = [group.get('name') for group in groups] + # else: + # user_group_mapping[user_id] = [] + + # return user_group_mapping + + # @staticmethod + # def _get_group_id(admin_token: str, group_name: str): + # """Get a group id for the group name.""" + # keycloak = current_app.config['KEYCLOAK_CONFIG'] + # base_url = keycloak['BASE_URL'] + # realm = keycloak['REALMNAME'] + # timeout = keycloak['CONNECT_TIMEOUT'] + # get_group_url = f'{base_url}/admin/realms/{realm}/groups?search={group_name}' + # headers = { + # 'Content-Type': ContentType.JSON.value, + # 'Authorization': f'Bearer {admin_token}' + # } + # response = requests.get(get_group_url, headers=headers, timeout=timeout) + # return KeycloakService._find_group_or_subgroup_id(response.json(), group_name) + + # @staticmethod + # def _find_group_or_subgroup_id(groups: list, group_name: str): + # """Return group id by searching main and sub groups.""" + # for group in groups: + # if group['name'] == group_name: + # return group['id'] + # if group_id := KeycloakService._find_group_or_subgroup_id(group['subGroups'], group_name): + # return group_id + # return None @staticmethod def _get_admin_token(): @@ -122,49 +121,49 @@ def _get_admin_token(): ) return response.json().get('access_token') - @staticmethod - def _remove_user_from_group(user_id: str, group_name: str): - """Remove user from the keycloak group.""" - keycloak = current_app.config['KEYCLOAK_CONFIG'] - base_url = keycloak['BASE_URL'] - realm = keycloak['REALMNAME'] - timeout = keycloak['CONNECT_TIMEOUT'] - # Create an admin token - admin_token = KeycloakService._get_admin_token() - # Get the '$group_name' group - group_id = KeycloakService._get_group_id(admin_token, group_name) - - # Add user to the keycloak group '$group_name' - headers = { - 'Content-Type': ContentType.JSON.value, - 'Authorization': f'Bearer {admin_token}' - } - remove_group_url = f'{base_url}/admin/realms/{realm}/users/{user_id}/groups/{group_id}' - response = requests.delete(remove_group_url, headers=headers, - timeout=timeout) - response.raise_for_status() - - @staticmethod - def add_user_to_group(user_id: str, group_name: str): - """Add user to the keycloak group.""" - keycloak = current_app.config['KEYCLOAK_CONFIG'] - base_url = keycloak['BASE_URL'] - realm = keycloak['REALMNAME'] - timeout = keycloak['CONNECT_TIMEOUT'] - # Create an admin token - admin_token = KeycloakService._get_admin_token() - # Get the '$group_name' group - group_id = KeycloakService._get_group_id(admin_token, group_name) - - # Add user to the keycloak group '$group_name' - headers = { - 'Content-Type': ContentType.JSON.value, - 'Authorization': f'Bearer {admin_token}' - } - add_to_group_url = f'{base_url}/admin/realms/{realm}/users/{user_id}/groups/{group_id}' - response = requests.put(add_to_group_url, headers=headers, - timeout=timeout) - response.raise_for_status() + # @staticmethod + # def _remove_user_from_group(user_id: str, group_name: str): + # """Remove user from the keycloak group.""" + # keycloak = current_app.config['KEYCLOAK_CONFIG'] + # base_url = keycloak['BASE_URL'] + # realm = keycloak['REALMNAME'] + # timeout = keycloak['CONNECT_TIMEOUT'] + # # Create an admin token + # admin_token = KeycloakService._get_admin_token() + # # Get the '$group_name' group + # group_id = KeycloakService._get_group_id(admin_token, group_name) + + # # Add user to the keycloak group '$group_name' + # headers = { + # 'Content-Type': ContentType.JSON.value, + # 'Authorization': f'Bearer {admin_token}' + # } + # remove_group_url = f'{base_url}/admin/realms/{realm}/users/{user_id}/groups/{group_id}' + # response = requests.delete(remove_group_url, headers=headers, + # timeout=timeout) + # response.raise_for_status() + + # @staticmethod + # def add_user_to_group(user_id: str, group_name: str): + # """Add user to the keycloak group.""" + # keycloak = current_app.config['KEYCLOAK_CONFIG'] + # base_url = keycloak['BASE_URL'] + # realm = keycloak['REALMNAME'] + # timeout = keycloak['CONNECT_TIMEOUT'] + # # Create an admin token + # admin_token = KeycloakService._get_admin_token() + # # Get the '$group_name' group + # group_id = KeycloakService._get_group_id(admin_token, group_name) + + # # Add user to the keycloak group '$group_name' + # headers = { + # 'Content-Type': ContentType.JSON.value, + # 'Authorization': f'Bearer {admin_token}' + # } + # add_to_group_url = f'{base_url}/admin/realms/{realm}/users/{user_id}/groups/{group_id}' + # response = requests.put(add_to_group_url, headers=headers, + # timeout=timeout) + # response.raise_for_status() @staticmethod def add_attribute_to_user(user_id: str, attribute_value: str, attribute_id: str = 'tenant_id'): @@ -186,26 +185,26 @@ def add_attribute_to_user(user_id: str, attribute_value: str, attribute_id: str requests.put(user_url, json=user_data, headers=headers) response.raise_for_status() - @staticmethod - def remove_user_from_group(user_id: str, group_name: str): - """Remove user from the keycloak group.""" - keycloak = current_app.config['KEYCLOAK_CONFIG'] - base_url = keycloak['BASE_URL'] - realm = keycloak['REALMNAME'] - timeout = keycloak['CONNECT_TIMEOUT'] - # Create an admin token - admin_token = KeycloakService._get_admin_token() - # Get the '$group_name' group - group_id = KeycloakService._get_group_id(admin_token, group_name) - - # Remove user from the keycloak group '$group_name' - headers = { - 'Content-Type': ContentType.JSON.value, - 'Authorization': f'Bearer {admin_token}' - } - remove_from_group_url = f'{base_url}/admin/realms/{realm}/users/{user_id}/groups/{group_id}' - response = requests.delete(remove_from_group_url, headers=headers, timeout=timeout) - response.raise_for_status() + # @staticmethod + # def remove_user_from_group(user_id: str, group_name: str): + # """Remove user from the keycloak group.""" + # keycloak = current_app.config['KEYCLOAK_CONFIG'] + # base_url = keycloak['BASE_URL'] + # realm = keycloak['REALMNAME'] + # timeout = keycloak['CONNECT_TIMEOUT'] + # # Create an admin token + # admin_token = KeycloakService._get_admin_token() + # # Get the '$group_name' group + # group_id = KeycloakService._get_group_id(admin_token, group_name) + + # # Remove user from the keycloak group '$group_name' + # headers = { + # 'Content-Type': ContentType.JSON.value, + # 'Authorization': f'Bearer {admin_token}' + # } + # remove_from_group_url = f'{base_url}/admin/realms/{realm}/users/{user_id}/groups/{group_id}' + # response = requests.delete(remove_from_group_url, headers=headers, timeout=timeout) + # response.raise_for_status() @staticmethod def add_user(user: dict): @@ -217,7 +216,6 @@ def add_user(user: dict): realm = keycloak['REALMNAME'] timeout = keycloak['CONNECT_TIMEOUT'] - # Add user to the keycloak group '$group_name' headers = { 'Content-Type': ContentType.JSON.value, 'Authorization': f'Bearer {admin_token}' diff --git a/met-api/src/met_api/services/membership_service.py b/met-api/src/met_api/services/membership_service.py index d8015d4b1..5eb6401b6 100644 --- a/met-api/src/met_api/services/membership_service.py +++ b/met-api/src/met_api/services/membership_service.py @@ -4,14 +4,10 @@ from met_api.constants.membership_type import MembershipType from met_api.exceptions.business_exception import BusinessException -from met_api.models import StaffUser as StaffUserModel from met_api.models.engagement import Engagement as EngagementModel from met_api.models.membership import Membership as MembershipModel -from met_api.schemas.staff_user import StaffUserSchema from met_api.services import authorization -from met_api.services.staff_user_service import KEYCLOAK_SERVICE, StaffUserService -from met_api.utils.constants import Groups -from met_api.utils.enums import KeycloakGroups, MembershipStatus +from met_api.utils.enums import MembershipStatus from met_api.utils.roles import Role from met_api.utils.token_info import TokenInfo @@ -19,30 +15,30 @@ class MembershipService: """Membership management service.""" - @staticmethod - def create_membership(engagement_id, request_json: dict): - """Create membership.""" - user_id = request_json.get('user_id') - user: StaffUserModel = StaffUserModel.get_user_by_external_id(user_id) - if not user: - raise BusinessException( - error='Invalid User.', - status_code=HTTPStatus.BAD_REQUEST) - - one_of_roles = ( - MembershipType.TEAM_MEMBER.name, - Role.EDIT_MEMBERS.value - ) - authorization.check_auth(one_of_roles=one_of_roles, engagement_id=engagement_id) - - user_details = StaffUserSchema().dump(user) - # attach and map groups - StaffUserService.attach_groups([user_details]) - MembershipService._validate_create_membership(engagement_id, user_details) - group_name, membership_type = MembershipService._get_membership_details(user_details) - MembershipService._add_user_group(user_details, group_name) - membership = MembershipService._create_membership_model(engagement_id, user.id, membership_type) - return membership + # TODO: Create membership method that uses composite roles + # @staticmethod + # def create_membership(engagement_id, request_json: dict): + # """Create membership.""" + # user_id = request_json.get('user_id') + # user: StaffUserModel = StaffUserModel.get_user_by_external_id(user_id) + # if not user: + # raise BusinessException( + # error='Invalid User.', + # status_code=HTTPStatus.BAD_REQUEST) + + # one_of_roles = ( + # MembershipType.TEAM_MEMBER.name, + # Role.EDIT_MEMBERS.value + # ) + # authorization.check_auth(one_of_roles=one_of_roles, engagement_id=engagement_id) + + # user_details = StaffUserSchema().dump(user) + + # MembershipService._validate_create_membership(engagement_id, user_details) + # group_name, membership_type = MembershipService._get_membership_details(user_details) + # MembershipService._add_user_group(user_details, group_name) + # membership = MembershipService._create_membership_model(engagement_id, user.id, membership_type) + # return membership @staticmethod def _validate_create_membership(engagement_id, user_details): @@ -55,11 +51,12 @@ def _validate_create_membership(engagement_id, user_details): user_id = user_details.get('id') - groups = user_details.get('groups') - if KeycloakGroups.EAO_IT_ADMIN.value in groups: - raise BusinessException( - error='This user is already an Administrator.', - status_code=HTTPStatus.CONFLICT.value) + # TODO: Check for permission level once composite role permission levels are added. + # roles = user_details.get('roles') + # if KeycloakPermissionLevels.IT_ADMIN.value in roles: + # raise BusinessException( + # error='This user is already a Administrator.', + # status_code=HTTPStatus.CONFLICT.value) existing_membership = MembershipModel.find_by_engagement_and_user_id( engagement_id, @@ -78,43 +75,45 @@ def _validate_create_membership(engagement_id, user_details): error='You cannot add yourself to an engagement.', status_code=HTTPStatus.FORBIDDEN.value) - @staticmethod - def _get_membership_details(user_details): - """Get the group name and membership type for the user based on their assigned groups.""" - default_group_name = Groups.EAO_TEAM_MEMBER.name - default_membership_type = MembershipType.TEAM_MEMBER - - is_reviewer = Groups.EAO_REVIEWER.value in user_details.get('groups') - is_team_member = Groups.EAO_TEAM_MEMBER.value in user_details.get('groups') - - if is_reviewer: - # If the user is assigned to the EAO_REVIEWER group, set the group name and membership type accordingly - group_name = Groups.EAO_REVIEWER.name - membership_type = MembershipType.REVIEWER - elif is_team_member: - # If the user is assigned to the EAO_TEAM_MEMBER group, set the group name and membership type accordingly - group_name = Groups.EAO_TEAM_MEMBER.name - membership_type = MembershipType.TEAM_MEMBER - else: - # If the user is not assigned to either group, return default values for group name and membership type - group_name = default_group_name - membership_type = default_membership_type - - return group_name, membership_type - - @staticmethod - def _add_user_group(user: StaffUserModel, group_name=Groups.EAO_TEAM_MEMBER.name): - valid_member_teams = [Groups.EAO_TEAM_MEMBER.name, Groups.EAO_REVIEWER.name] - if group_name not in valid_member_teams: - raise BusinessException( - error='Invalid Group name.', - status_code=HTTPStatus.BAD_REQUEST - ) - - KEYCLOAK_SERVICE.add_user_to_group( - user_id=user.get('external_id'), - group_name=group_name - ) + # TODO: Replace this method with one that checks membership type with composite roles + # @staticmethod + # def _get_membership_details(user_details): + # """Get the group name and membership type for the user based on their assigned groups.""" + # default_group_name = Groups.TEAM_MEMBER.name + # default_membership_type = MembershipType.TEAM_MEMBER + + # is_reviewer = Groups.REVIEWER.value in user_details.get('groups') + # is_team_member = Groups.TEAM_MEMBER.value in user_details.get('groups') + + # if is_reviewer: + # # If the user is assigned to the REVIEWER group, set the group name and membership type accordingly + # group_name = Groups.REVIEWER.name + # membership_type = MembershipType.REVIEWER + # elif is_team_member: + # # If the user is assigned to the TEAM_MEMBER group, set the group name and membership type accordingly + # group_name = Groups.TEAM_MEMBER.name + # membership_type = MembershipType.TEAM_MEMBER + # else: + # # If the user is not assigned to either group, return default values for group name and membership type + # group_name = default_group_name + # membership_type = default_membership_type + + # return group_name, membership_type + + # TODO: Replace this method with a method to add composite roles + # @staticmethod + # def _add_user_group(user: StaffUserModel, group_name=Groups.TEAM_MEMBER.name): + # valid_member_teams = [Groups.TEAM_MEMBER.name, Groups.REVIEWER.name] + # if group_name not in valid_member_teams: + # raise BusinessException( + # error='Invalid Group name.', + # status_code=HTTPStatus.BAD_REQUEST + # ) + + # KEYCLOAK_SERVICE.add_user_to_group( + # user_id=user.get('external_id'), + # group_name=group_name + # ) @staticmethod def _create_membership_model(engagement_id, user_id, membership_type=MembershipType.TEAM_MEMBER): diff --git a/met-api/src/met_api/services/staff_user_membership_service.py b/met-api/src/met_api/services/staff_user_membership_service.py index ebe470f06..e698e58a7 100644 --- a/met-api/src/met_api/services/staff_user_membership_service.py +++ b/met-api/src/met_api/services/staff_user_membership_service.py @@ -7,51 +7,40 @@ from met_api.services.membership_service import MembershipService from met_api.services.staff_user_service import KEYCLOAK_SERVICE, StaffUserService from met_api.utils.user_context import UserContext, user_context -from met_api.utils.constants import Groups from met_api.utils.enums import UserStatus class StaffUserMembershipService: """Staff User Membership management service.""" + # TODO: Restore a way to add users to composite roles. @classmethod @user_context - def reassign_user(cls, user_id, group_name, **kwargs): - """Add user to a new group and reassign memberships.""" - user = StaffUserService.get_user_by_id(user_id, include_groups=True) + def reassign_user(cls, user_id, **kwargs): + """Add user to a new composite role and reassign memberships.""" + user = StaffUserService.get_user_by_id(user_id, include_roles=True) if not user: raise BusinessException( error='Invalid User.', status_code=HTTPStatus.BAD_REQUEST) external_id = user.get('external_id', None) - main_group = user.get('main_group', None) - if any([not external_id, not main_group]): + # TODO: Put check for composite role membership into this conditional. + if not external_id: raise BusinessException( error='Invalid User.', status_code=HTTPStatus.BAD_REQUEST) - if group_name not in Groups.__members__: - raise BusinessException( - error='Invalid Group.', - status_code=HTTPStatus.BAD_REQUEST) - - if main_group == group_name: - raise BusinessException( - error='User is already a member of this group.', - status_code=HTTPStatus.BAD_REQUEST) - user_from_context: UserContext = kwargs['user_context'] + if external_id == user_from_context.sub: raise BusinessException( - error='User cannot change their own group.', + error='User cannot change their own permission level.', status_code=HTTPStatus.CONFLICT.value) - StaffUserService.remove_user_from_group(external_id, Groups.get_name_by_value(main_group)) - StaffUserService.add_user_to_group(external_id, group_name) MembershipService.revoke_memberships_bulk(user_id) - new_user = StaffUserService.get_user_by_id(user_id, include_groups=True) + new_user = StaffUserService.get_user_by_id(user_id, include_roles=True) return StaffUserSchema().dump(new_user) @staticmethod diff --git a/met-api/src/met_api/services/staff_user_service.py b/met-api/src/met_api/services/staff_user_service.py index e7f7ba83b..4092a6f9b 100644 --- a/met-api/src/met_api/services/staff_user_service.py +++ b/met-api/src/met_api/services/staff_user_service.py @@ -9,8 +9,6 @@ from met_api.schemas.staff_user import StaffUserSchema from met_api.services.keycloak import KeycloakService from met_api.utils import notification -from met_api.utils.constants import GROUP_NAME_MAPPING, Groups -from met_api.utils.enums import KeycloakGroupName from met_api.utils.template import Template KEYCLOAK_SERVICE = KeycloakService() @@ -20,13 +18,15 @@ class StaffUserService: """User management service.""" @classmethod - def get_user_by_id(cls, _user_id, include_groups=False, include_inactive=False): + def get_user_by_id(cls, _user_id, include_roles=False, include_inactive=False): """Get user by id.""" user_schema = StaffUserSchema() db_user = StaffUserModel.get_by_id(_user_id, include_inactive) user = user_schema.dump(db_user) - if include_groups: - cls.attach_groups([user]) + if include_roles: + # TODO: Replace this method with one that uses composite roles + # cls.attach_roles([user]) + pass return user @classmethod @@ -100,42 +100,47 @@ def _render_email_template(user: StaffUserModel): ) return subject, body, args - @staticmethod - def attach_groups(user_collection): - """Attach keycloak groups to user object.""" - group_user_details = KEYCLOAK_SERVICE.get_users_groups( - [user.get('external_id') for user in user_collection]) - - for user in user_collection: - # Transform group name from EAO_ADMINISTRATOR to Administrator - # TODO etc;Arrive at a better implementation than keeping a static list - # TODO Probably add a custom attribute in the keycloak as title against a group? - groups = group_user_details.get(user.get('external_id')) - user['groups'] = '' - if groups: - user['groups'] = [GROUP_NAME_MAPPING.get(group, '') for group in groups] - if Groups.EAO_IT_ADMIN.value in user['groups']: - user['main_group'] = Groups.EAO_IT_ADMIN.value - elif Groups.EAO_TEAM_MEMBER.value in user['groups']: - user['main_group'] = Groups.EAO_TEAM_MEMBER.value - elif Groups.EAO_REVIEWER.value in user['groups']: - user['main_group'] = Groups.EAO_REVIEWER.value - else: - user['main_group'] = user['groups'][0] + # TODO: Replace this method with one that uses composite roles, if necessary + # @staticmethod + # def attach_roles(user_collection): + # """Attach keycloak groups to user object.""" + # group_user_details = KEYCLOAK_SERVICE.get_users_groups( + # [user.get('external_id') for user in user_collection]) + + # for user in user_collection: + # # Transform group name from ADMINISTRATOR to Administrator + # # TODO etc;Arrive at a better implementation than keeping a static list + # # TODO Probably add a custom attribute in the keycloak as title against a group? + # groups = group_user_details.get(user.get('external_id')) + # user['groups'] = '' + # if groups: + # user['groups'] = [GROUP_NAME_MAPPING.get(group, '') for group in groups] + # if Groups.IT_ADMIN.value in user['groups']: + # user['main_group'] = Groups.IT_ADMIN.value + # elif Groups.TEAM_MEMBER.value in user['groups']: + # user['main_group'] = Groups.TEAM_MEMBER.value + # elif Groups.REVIEWER.value in user['groups']: + # user['main_group'] = Groups.REVIEWER.value + # else: + # user['main_group'] = user['groups'][0] @classmethod def find_users( cls, pagination_options: PaginationOptions = None, search_text='', - include_groups=False, + include_roles=False, include_inactive=False ): """Return a list of users.""" users, total = StaffUserModel.get_all_paginated(pagination_options, search_text, include_inactive) user_collection = StaffUserSchema(many=True).dump(users) - if include_groups: - cls.attach_groups(user_collection) + + if include_roles: + # TODO: Replace this method with one that uses composite roles + # cls.attach_roles(user_collection) + pass + return { 'items': user_collection, 'total': total @@ -161,7 +166,9 @@ def add_user_to_group(cls, external_id: str, group_name: str): cls.validate_user(db_user) - KEYCLOAK_SERVICE.add_user_to_group(user_id=external_id, group_name=group_name) + # TODO: Replace this method with one that uses composite roles + print(group_name) + # KEYCLOAK_SERVICE.add_user_to_group(user_id=external_id, group_name=group_name) KEYCLOAK_SERVICE.add_attribute_to_user(user_id=external_id, attribute_value=g.tenant_id) return StaffUserSchema().dump(db_user) @@ -174,7 +181,9 @@ def remove_user_from_group(cls, external_id: str, group_name: str): if db_user is None: raise KeyError('User not found') - KEYCLOAK_SERVICE.remove_user_from_group(user_id=external_id, group_name=group_name) + # TODO: Replace this method with one that uses composite roles + print(group_name) + # KEYCLOAK_SERVICE.remove_user_from_group(user_id=external_id, group_name=group_name) return StaffUserSchema().dump(db_user) @@ -184,9 +193,10 @@ def validate_user(db_user: StaffUserModel): if db_user is None: raise KeyError('User not found') - groups = KEYCLOAK_SERVICE.get_user_groups(user_id=db_user.external_id) - group_names = [group.get('name') for group in groups] - if KeycloakGroupName.EAO_IT_ADMIN.value in group_names: - raise BusinessException( - error='This user is already an Administrator.', - status_code=HTTPStatus.CONFLICT.value) + # TODO: Restore permission level functionality to replace "groups" later + # groups = KEYCLOAK_SERVICE.get_user_groups(user_id=db_user.external_id) + # group_names = [group.get('name') for group in groups] + # if KeycloakGroupName.IT_ADMIN.value in group_names: + # raise BusinessException( + # error='This user is already an Administrator.', + # status_code=HTTPStatus.CONFLICT.value) diff --git a/met-api/src/met_api/services/tenant_service.py b/met-api/src/met_api/services/tenant_service.py index 2992e79f8..0797bee97 100644 --- a/met-api/src/met_api/services/tenant_service.py +++ b/met-api/src/met_api/services/tenant_service.py @@ -27,5 +27,5 @@ def get(cls, tenant_id): """Get a tenant by id.""" tenant = TenantModel.find_by_short_name(tenant_id) if not tenant: - raise ValueError('Tenant not found.') + raise ValueError('Tenant not found.', cls, tenant_id) return TenantSchema().dump(tenant) diff --git a/met-api/src/met_api/utils/constants.py b/met-api/src/met_api/utils/constants.py index 07c82c28b..5a7202c87 100644 --- a/met-api/src/met_api/utils/constants.py +++ b/met-api/src/met_api/utils/constants.py @@ -13,30 +13,28 @@ # limitations under the License. """Constants definitions.""" -from enum import Enum +# from enum import Enum +# TODO Remove this +# class Groups(Enum): +# """Enumeration representing user groups.""" -class Groups(Enum): - """Enumeration representing user groups.""" +# IT_ADMIN = 'Administrator' +# TEAM_MEMBER = 'Team Member' +# REVIEWER = 'Reviewer' +# IT_VIEWER = 'Viewer' - EAO_IT_ADMIN = 'Administrator' - EAO_TEAM_MEMBER = 'Team Member' - EAO_REVIEWER = 'Reviewer' - EAO_IT_VIEWER = 'Viewer' - - @staticmethod - def get_name_by_value(value): - """Get the name of a group by its value.""" - for group in Groups: - if group.value == value: - return group.name - raise ValueError('No matching key found for the given value.') +# @staticmethod +# def get_name_by_value(value): +# """Get the name of a group by its value.""" +# for group in Groups: +# if group.value == value: +# return group.name +# raise ValueError('No matching key found for the given value.') TENANT_ID_HEADER = 'tenant-id' -GROUP_NAME_MAPPING = {group.name: group.value for group in Groups} - TENANT_ID_JWT_CLAIM = 'tenant_id' diff --git a/met-api/src/met_api/utils/enums.py b/met-api/src/met_api/utils/enums.py index 93fe76cd7..64de3e805 100644 --- a/met-api/src/met_api/utils/enums.py +++ b/met-api/src/met_api/utils/enums.py @@ -59,22 +59,22 @@ class LoginSource(Enum): IDIR = 'idir' -class KeycloakGroups(Enum): - """Login Source.""" +class KeycloakPermissionLevels(Enum): + """Keycloak permission levels.""" - EAO_IT_ADMIN = 'Administrator' - EAO_IT_VIEWER = 'Viewer' - EAO_TEAM_MEMBER = 'Member' - EAO_REVIEWER = 'Reviewer' + IT_ADMIN = 'Administrator' + IT_VIEWER = 'Viewer' + TEAM_MEMBER = 'Member' + REVIEWER = 'Reviewer' -class KeycloakGroupName(Enum): - """Keycloak group names.""" +class KeycloakCompositeRoleNames(Enum): + """Keycloak composite role names.""" - EAO_IT_ADMIN = 'EAO_IT_ADMIN' - EAO_IT_VIEWER = 'EAO_IT_VIEWER' - EAO_TEAM_MEMBER = 'EAO_TEAM_MEMBER' - EAO_REVIEWER = 'EAO_REVIEWER' + IT_ADMIN = 'IT_ADMIN' + IT_VIEWER = 'IT_VIEWER' + TEAM_MEMBER = 'TEAM_MEMBER' + REVIEWER = 'REVIEWER' class MembershipType(IntEnum): diff --git a/met-api/tests/unit/api/test_engagement_membership.py b/met-api/tests/unit/api/test_engagement_membership.py index 711ab3bd8..2c29dd40b 100644 --- a/met-api/tests/unit/api/test_engagement_membership.py +++ b/met-api/tests/unit/api/test_engagement_membership.py @@ -5,118 +5,116 @@ """ import json from http import HTTPStatus -from unittest.mock import MagicMock, patch -import pytest +from unittest.mock import patch -from met_api.constants.membership_type import MembershipType from met_api.exceptions.business_exception import BusinessException from met_api.services.membership_service import MembershipService -from met_api.utils.enums import ContentType, KeycloakGroupName, MembershipStatus +from met_api.utils.enums import ContentType, MembershipStatus from tests.utilities.factory_utils import ( factory_auth_header, factory_engagement_model, factory_membership_model, factory_staff_user_model) memberships_url = '/api/engagements/{}/members' - -def test_create_engagement_membership_team_member(mocker, client, jwt, session, - setup_admin_user_and_claims): - """Assert that a team member engagement membership can be created.""" - user, claims = setup_admin_user_and_claims - engagement = factory_engagement_model() - staff_user = factory_staff_user_model() - headers = factory_auth_header(jwt=jwt, claims=claims) - - mock_add_user_to_group_keycloak_response = MagicMock() - mock_add_user_to_group_keycloak_response.status_code = HTTPStatus.NO_CONTENT - mock_add_user_to_group_keycloak = mocker.patch( - 'met_api.services.keycloak.KeycloakService.add_user_to_group', - return_value=mock_add_user_to_group_keycloak_response - ) - mock_get_users_groups_keycloak = mocker.patch( - 'met_api.services.keycloak.KeycloakService.get_users_groups', - return_value={staff_user.external_id: [KeycloakGroupName.EAO_TEAM_MEMBER.value]} - ) - - data = {'user_id': staff_user.external_id} - - rv = client.post( - memberships_url.format(engagement.id), - data=json.dumps(data), - headers=headers, - content_type=ContentType.JSON.value - ) - assert rv.status_code == HTTPStatus.OK - assert rv.json.get('engagement_id') == engagement.id - assert rv.json.get('user_id') == staff_user.id - assert rv.json.get('type') == MembershipType.TEAM_MEMBER - assert rv.json.get('status') == MembershipStatus.ACTIVE.value - mock_add_user_to_group_keycloak.assert_called() - mock_get_users_groups_keycloak.assert_called() - - with patch.object(MembershipService, 'create_membership', - side_effect=BusinessException('Test error', status_code=HTTPStatus.INTERNAL_SERVER_ERROR)): - rv = client.post( - memberships_url.format(engagement.id), - data=json.dumps(data), - headers=headers, - content_type=ContentType.JSON.value - ) - assert rv.status_code == HTTPStatus.INTERNAL_SERVER_ERROR - - -def test_create_engagement_membership_reviewer(mocker, client, jwt, session, - setup_admin_user_and_claims): - """Assert that a reviewer engagement membership can be created.""" - user, claims = setup_admin_user_and_claims - engagement = factory_engagement_model() - staff_user = factory_staff_user_model() - headers = factory_auth_header(jwt=jwt, claims=claims) - - mock_add_user_to_group_keycloak_response = MagicMock() - mock_add_user_to_group_keycloak_response.status_code = HTTPStatus.NO_CONTENT - mock_add_user_to_group_keycloak = mocker.patch( - 'met_api.services.keycloak.KeycloakService.add_user_to_group', - return_value=mock_add_user_to_group_keycloak_response - ) - mock_get_users_groups_keycloak = mocker.patch( - 'met_api.services.keycloak.KeycloakService.get_users_groups', - return_value={staff_user.external_id: [KeycloakGroupName.EAO_REVIEWER.value]} - ) - - data = {'user_id': staff_user.external_id} - - rv = client.post( - memberships_url.format(engagement.id), - data=json.dumps(data), - headers=headers, - content_type=ContentType.JSON.value - ) - assert rv.status_code == HTTPStatus.OK - assert rv.json.get('engagement_id') == engagement.id - assert rv.json.get('user_id') == staff_user.id - assert rv.json.get('type') == MembershipType.REVIEWER - assert rv.json.get('status') == MembershipStatus.ACTIVE.value - mock_add_user_to_group_keycloak.assert_called() - mock_get_users_groups_keycloak.assert_called() - - -def test_create_engagement_membership_unauthorized(client, jwt, session, - setup_unprivileged_user_and_claims): - """Assert that creating an engagement membership without proper authorization fails.""" - user, claims = setup_unprivileged_user_and_claims - engagement = factory_engagement_model() - staff_user = factory_staff_user_model() - headers = factory_auth_header(jwt=jwt, claims=claims) - data = {'user_id': staff_user.external_id} - - rv = client.post( - memberships_url.format(engagement.id), - data=json.dumps(data), - headers=headers, - content_type=ContentType.JSON.value - ) - assert rv.status_code == HTTPStatus.FORBIDDEN +# TODO: Replace this test with one that adds composite roles to user +# def test_create_engagement_membership_team_member(mocker, client, jwt, session, +# setup_admin_user_and_claims): +# """Assert that a team member engagement membership can be created.""" +# user, claims = setup_admin_user_and_claims +# engagement = factory_engagement_model() +# staff_user = factory_staff_user_model() +# headers = factory_auth_header(jwt=jwt, claims=claims) + +# mock_add_user_to_group_keycloak_response = MagicMock() +# mock_add_user_to_group_keycloak_response.status_code = HTTPStatus.NO_CONTENT +# mock_add_user_to_group_keycloak = mocker.patch( +# 'met_api.services.keycloak.KeycloakService.add_user_to_group', +# return_value=mock_add_user_to_group_keycloak_response +# ) +# mock_get_users_groups_keycloak = mocker.patch( +# 'met_api.services.keycloak.KeycloakService.get_users_groups', +# return_value={staff_user.external_id: [KeycloakGroupName.TEAM_MEMBER.value]} +# ) + +# data = {'user_id': staff_user.external_id} + +# rv = client.post( +# memberships_url.format(engagement.id), +# data=json.dumps(data), +# headers=headers, +# content_type=ContentType.JSON.value +# ) +# assert rv.status_code == HTTPStatus.OK +# assert rv.json.get('engagement_id') == engagement.id +# assert rv.json.get('user_id') == staff_user.id +# assert rv.json.get('type') == MembershipType.TEAM_MEMBER +# assert rv.json.get('status') == MembershipStatus.ACTIVE.value +# mock_add_user_to_group_keycloak.assert_called() +# mock_get_users_groups_keycloak.assert_called() + +# with patch.object(MembershipService, 'create_membership', +# side_effect=BusinessException('Test error', status_code=HTTPStatus.INTERNAL_SERVER_ERROR)): +# rv = client.post( +# memberships_url.format(engagement.id), +# data=json.dumps(data), +# headers=headers, +# content_type=ContentType.JSON.value +# ) +# assert rv.status_code == HTTPStatus.INTERNAL_SERVER_ERROR + +# TODO: Replace this test with one that adds composite roles to user +# def test_create_engagement_membership_reviewer(mocker, client, jwt, session, +# setup_admin_user_and_claims): +# """Assert that a reviewer engagement membership can be created.""" +# user, claims = setup_admin_user_and_claims +# engagement = factory_engagement_model() +# staff_user = factory_staff_user_model() +# headers = factory_auth_header(jwt=jwt, claims=claims) + +# mock_add_user_to_group_keycloak_response = MagicMock() +# mock_add_user_to_group_keycloak_response.status_code = HTTPStatus.NO_CONTENT +# mock_add_user_to_group_keycloak = mocker.patch( +# 'met_api.services.keycloak.KeycloakService.add_user_to_group', +# return_value=mock_add_user_to_group_keycloak_response +# ) +# mock_get_users_groups_keycloak = mocker.patch( +# 'met_api.services.keycloak.KeycloakService.get_users_groups', +# return_value={staff_user.external_id: [KeycloakGroupName.REVIEWER.value]} +# ) + +# data = {'user_id': staff_user.external_id} + +# rv = client.post( +# memberships_url.format(engagement.id), +# data=json.dumps(data), +# headers=headers, +# content_type=ContentType.JSON.value +# ) +# assert rv.status_code == HTTPStatus.OK +# assert rv.json.get('engagement_id') == engagement.id +# assert rv.json.get('user_id') == staff_user.id +# assert rv.json.get('type') == MembershipType.REVIEWER +# assert rv.json.get('status') == MembershipStatus.ACTIVE.value +# mock_add_user_to_group_keycloak.assert_called() +# mock_get_users_groups_keycloak.assert_called() + +# TODO: Replace this test with one that adds composite roles to user +# def test_create_engagement_membership_unauthorized(client, jwt, session, +# setup_unprivileged_user_and_claims): +# """Assert that creating an engagement membership without proper authorization fails.""" +# user, claims = setup_unprivileged_user_and_claims +# engagement = factory_engagement_model() +# staff_user = factory_staff_user_model() +# headers = factory_auth_header(jwt=jwt, claims=claims) +# data = {'user_id': staff_user.external_id} + +# rv = client.post( +# memberships_url.format(engagement.id), +# data=json.dumps(data), +# headers=headers, +# content_type=ContentType.JSON.value +# ) +# assert rv.status_code == HTTPStatus.FORBIDDEN def test_revoke_membership(client, jwt, session, @@ -265,43 +263,43 @@ def test_get_membership(client, jwt, session, assert rv.status_code == HTTPStatus.INTERNAL_SERVER_ERROR -@pytest.mark.parametrize('side_effect, expected_status', [ - (ValueError('Test error'), HTTPStatus.BAD_REQUEST), -]) -def test_get_all_engagements_by_user(mocker, client, jwt, session, side_effect, expected_status, - setup_admin_user_and_claims): - """Test that all engagements can be fetched for a member.""" - user, claims = setup_admin_user_and_claims - engagement = factory_engagement_model() - staff_user = factory_staff_user_model() - headers = factory_auth_header(jwt=jwt, claims=claims) - - mock_add_user_to_group_keycloak_response = MagicMock() - mock_add_user_to_group_keycloak_response.status_code = HTTPStatus.NO_CONTENT - mocker.patch( - 'met_api.services.keycloak.KeycloakService.add_user_to_group', - return_value=mock_add_user_to_group_keycloak_response - ) - mocker.patch( - 'met_api.services.keycloak.KeycloakService.get_users_groups', - return_value={staff_user.external_id: [KeycloakGroupName.EAO_TEAM_MEMBER.value]} - ) - - data = {'user_id': staff_user.external_id} - - rv = client.post( - memberships_url.format(engagement.id), - data=json.dumps(data), - headers=headers, - content_type=ContentType.JSON.value - ) - assert rv.status_code == HTTPStatus.OK - - rv = client.get( - f'/api/engagements/all/members/{staff_user.external_id}', - headers=headers, - content_type=ContentType.JSON.value - ) - - assert rv.status_code == HTTPStatus.OK - assert rv.json[0].get('engagement_id') == engagement.id +# @pytest.mark.parametrize('side_effect, expected_status', [ +# (ValueError('Test error'), HTTPStatus.BAD_REQUEST), +# ]) +# def test_get_all_engagements_by_user(mocker, client, jwt, session, side_effect, expected_status, +# setup_admin_user_and_claims): +# """Test that all engagements can be fetched for a member.""" +# user, claims = setup_admin_user_and_claims +# engagement = factory_engagement_model() +# staff_user = factory_staff_user_model() +# headers = factory_auth_header(jwt=jwt, claims=claims) + +# mock_add_user_to_group_keycloak_response = MagicMock() +# mock_add_user_to_group_keycloak_response.status_code = HTTPStatus.NO_CONTENT +# mocker.patch( +# 'met_api.services.keycloak.KeycloakService.add_user_to_group', +# return_value=mock_add_user_to_group_keycloak_response +# ) +# mocker.patch( +# 'met_api.services.keycloak.KeycloakService.get_users_groups', +# return_value={staff_user.external_id: [KeycloakGroupName.TEAM_MEMBER.value]} +# ) + +# data = {'user_id': staff_user.external_id} + +# rv = client.post( +# memberships_url.format(engagement.id), +# data=json.dumps(data), +# headers=headers, +# content_type=ContentType.JSON.value +# ) +# assert rv.status_code == HTTPStatus.OK + +# rv = client.get( +# f'/api/engagements/all/members/{staff_user.external_id}', +# headers=headers, +# content_type=ContentType.JSON.value +# ) + +# assert rv.status_code == HTTPStatus.OK +# assert rv.json[0].get('engagement_id') == engagement.id diff --git a/met-api/tests/unit/api/test_user.py b/met-api/tests/unit/api/test_user.py index d95e29556..4e35be2d8 100644 --- a/met-api/tests/unit/api/test_user.py +++ b/met-api/tests/unit/api/test_user.py @@ -17,70 +17,78 @@ Test-Suite to ensure that the /user endpoint is working as expected. """ -import copy from http import HTTPStatus from unittest.mock import MagicMock, patch + import pytest from flask import current_app -from met_api.exceptions.business_exception import BusinessException from met_api.models import Tenant as TenantModel from met_api.services.staff_user_membership_service import StaffUserMembershipService from met_api.services.staff_user_service import StaffUserService -from met_api.utils.enums import ContentType, KeycloakGroupName, UserStatus +from met_api.utils.enums import ContentType, UserStatus from tests.utilities.factory_scenarios import TestJwtClaims, TestUserInfo from tests.utilities.factory_utils import factory_auth_header, factory_staff_user_model, set_global_tenant -KEYCLOAK_SERVICE_MODULE = 'met_api.services.keycloak.KeycloakService' - - -def mock_add_user_to_group(mocker, mock_group_names): - """Mock the KeycloakService.add_user_to_group method.""" - mock_response = MagicMock() - mock_response.status_code = HTTPStatus.NO_CONTENT - - mock_add_user_to_group_keycloak = mocker.patch( - f'{KEYCLOAK_SERVICE_MODULE}.add_user_to_group', - return_value=mock_response - ) - mock_get_user_groups_keycloak = mocker.patch( - f'{KEYCLOAK_SERVICE_MODULE}.get_user_groups', - return_value=[{'name': group_name} for group_name in mock_group_names] - ) - mock_get_user_groups_keycloak = mocker.patch( - f'{KEYCLOAK_SERVICE_MODULE}.get_user_groups', - return_value=[{'name': group_name} for group_name in mock_group_names] - ) - - mock_add_attribute_to_user = mocker.patch( - f'{KEYCLOAK_SERVICE_MODULE}.add_attribute_to_user', - return_value=mock_response - ) - return mock_add_user_to_group_keycloak, mock_get_user_groups_keycloak, mock_add_attribute_to_user +KEYCLOAK_SERVICE_MODULE = 'met_api.services.keycloak.KeycloakService' -@pytest.mark.parametrize('side_effect, expected_status', [ - (KeyError('Test error'), HTTPStatus.INTERNAL_SERVER_ERROR), - (ValueError('Test error'), HTTPStatus.INTERNAL_SERVER_ERROR), -]) +# TODO: Replace this test with one that adds composite roles to user +# def mock_add_user_to_group(mocker, mock_group_names): +# """Mock the KeycloakService.add_user_to_group method.""" +# mock_response = MagicMock() +# mock_response.status_code = HTTPStatus.NO_CONTENT + +# mock_add_user_to_group_keycloak = mocker.patch( +# f'{KEYCLOAK_SERVICE_MODULE}.add_user_to_group', +# return_value=mock_response +# ) +# mock_get_user_groups_keycloak = mocker.patch( +# f'{KEYCLOAK_SERVICE_MODULE}.get_user_groups', +# return_value=[{'name': group_name} for group_name in mock_group_names] +# ) +# mock_get_user_groups_keycloak = mocker.patch( +# f'{KEYCLOAK_SERVICE_MODULE}.get_user_groups', +# return_value=[{'name': group_name} for group_name in mock_group_names] +# ) + +# mock_add_attribute_to_user = mocker.patch( +# f'{KEYCLOAK_SERVICE_MODULE}.add_attribute_to_user', +# return_value=mock_response +# ) +# return mock_add_user_to_group_keycloak, mock_get_user_groups_keycloak, mock_add_attribute_to_user + + +@pytest.mark.parametrize( + 'side_effect, expected_status', + [ + (KeyError('Test error'), HTTPStatus.INTERNAL_SERVER_ERROR), + (ValueError('Test error'), HTTPStatus.INTERNAL_SERVER_ERROR), + ], +) def test_create_staff_user(client, jwt, session, side_effect, expected_status): """Assert that a user can be POSTed.""" claims = TestJwtClaims.staff_admin_role headers = factory_auth_header(jwt=jwt, claims=claims) - rv = client.put('/api/user/', headers=headers, content_type=ContentType.JSON.value) + rv = client.put( + '/api/user/', headers=headers, content_type=ContentType.JSON.value + ) assert rv.status_code == HTTPStatus.OK assert rv.json.get('email_address') == claims.get('email') tenant_short_name = current_app.config.get('DEFAULT_TENANT_SHORT_NAME') tenant = TenantModel.find_by_short_name(tenant_short_name) assert rv.json.get('tenant_id') == str(tenant.id) - with patch.object(StaffUserService, 'create_or_update_user', side_effect=side_effect): - rv = client.put('/api/user/', headers=headers, content_type=ContentType.JSON.value) + with patch.object( + StaffUserService, 'create_or_update_user', side_effect=side_effect + ): + rv = client.put( + '/api/user/', headers=headers, content_type=ContentType.JSON.value + ) assert rv.status_code == expected_status -def test_get_staff_users(client, jwt, session, - setup_admin_user_and_claims): +def test_get_staff_users(client, jwt, session, setup_admin_user_and_claims): """Assert that a user can be POSTed.""" set_global_tenant() staff_1 = dict(TestUserInfo.user_staff_1) @@ -90,136 +98,142 @@ def test_get_staff_users(client, jwt, session, user, claims = setup_admin_user_and_claims headers = factory_auth_header(jwt=jwt, claims=claims) - rv = client.get('/api/user/', headers=headers, content_type=ContentType.JSON.value) + rv = client.get( + '/api/user/', headers=headers, content_type=ContentType.JSON.value + ) assert rv.status_code == HTTPStatus.OK assert rv.json.get('total') == 4 assert len(rv.json.get('items')) == 4 -@pytest.mark.parametrize('side_effect, expected_status', [ - (KeyError('Test error'), HTTPStatus.INTERNAL_SERVER_ERROR), - (ValueError('Test error'), HTTPStatus.INTERNAL_SERVER_ERROR), -]) -def test_add_user_to_admin_group(mocker, client, jwt, session, side_effect, expected_status, - setup_admin_user_and_claims): - """Assert that a user can be added to the admin group.""" - user = factory_staff_user_model() - - mock_add_user_to_group_keycloak, mock_get_user_groups_keycloak, mock_add_attribute_to_user = mock_add_user_to_group( - mocker, - [KeycloakGroupName.EAO_IT_VIEWER.value] - ) - - user, claims = setup_admin_user_and_claims - headers = factory_auth_header(jwt=jwt, claims=claims) - rv = client.post( - f'/api/user/{user.external_id}/groups?group=Administrator', - headers=headers, - content_type=ContentType.JSON.value - ) - assert rv.status_code == HTTPStatus.OK - mock_add_user_to_group_keycloak.assert_called() - mock_get_user_groups_keycloak.assert_called() - mock_add_attribute_to_user.assert_called() - - with patch.object(StaffUserService, 'add_user_to_group', side_effect=side_effect): - rv = client.post( - f'/api/user/{user.external_id}/groups?group=Administrator', - headers=headers, - content_type=ContentType.JSON.value - ) - assert rv.status_code == expected_status - - with patch.object(StaffUserService, 'add_user_to_group', - side_effect=BusinessException('Test error', status_code=HTTPStatus.INTERNAL_SERVER_ERROR)): - rv = client.post( - f'/api/user/{user.external_id}/groups?group=Administrator', - headers=headers, - content_type=ContentType.JSON.value - ) - assert rv.status_code == HTTPStatus.INTERNAL_SERVER_ERROR - - -def test_add_user_to_reviewer_group(mocker, client, jwt, session, - setup_admin_user_and_claims): - """Assert that a user can be added to the reviewer group.""" - user = factory_staff_user_model() - - mock_add_user_to_group_keycloak, mock_get_user_groups_keycloak, mock_add_attribute_to_user = mock_add_user_to_group( - mocker, - [KeycloakGroupName.EAO_IT_VIEWER.value] - ) - - user, claims = setup_admin_user_and_claims - headers = factory_auth_header(jwt=jwt, claims=claims) - rv = client.post( - f'/api/user/{user.external_id}/groups?group=Reviewer', - headers=headers, - content_type=ContentType.JSON.value - ) - assert rv.status_code == HTTPStatus.OK - mock_add_user_to_group_keycloak.assert_called() - mock_get_user_groups_keycloak.assert_called() - - -def test_add_user_to_team_member_group(mocker, client, jwt, session, - setup_admin_user_and_claims): - """Assert that a user can be added to the team member group.""" - user = factory_staff_user_model() - - mock_add_user_to_group_keycloak, mock_get_user_groups_keycloak, mock_add_attribute_to_user = mock_add_user_to_group( - mocker, - [KeycloakGroupName.EAO_IT_VIEWER.value] - ) - - user, claims = setup_admin_user_and_claims - headers = factory_auth_header(jwt=jwt, claims=claims) - rv = client.post( - f'/api/user/{user.external_id}/groups?group=TeamMember', - headers=headers, - content_type=ContentType.JSON.value - ) - assert rv.status_code == HTTPStatus.OK - mock_add_user_to_group_keycloak.assert_called() - mock_get_user_groups_keycloak.assert_called() - - -def test_add_user_to_team_member_group_across_tenants(mocker, client, jwt, session): - """Assert that a user can be added to the team member group.""" - set_global_tenant(tenant_id=1) - user = factory_staff_user_model() - - mock_add_user_to_group_keycloak, mock_get_user_groups_keycloak, mock_add_attribute_to_user = mock_add_user_to_group( - mocker, - [KeycloakGroupName.EAO_IT_VIEWER.value] - ) - - claims = copy.deepcopy(TestJwtClaims.staff_admin_role.value) - # sets a different tenant id in the request - claims['tenant_id'] = 2 - headers = factory_auth_header(jwt=jwt, claims=claims) - rv = client.post( - f'/api/user/{user.external_id}/groups?group=TeamMember', - headers=headers, - content_type=ContentType.JSON.value - ) - # assert staff admin cant do cross tenant operation - assert rv.status_code == HTTPStatus.FORBIDDEN - - claims = copy.deepcopy(TestJwtClaims.met_admin_role.value) - # sets a different tenant id in the request - claims['tenant_id'] = 2 - headers = factory_auth_header(jwt=jwt, claims=claims) - rv = client.post( - f'/api/user/{user.external_id}/groups?group=TeamMember', - headers=headers, - content_type=ContentType.JSON.value - ) - # assert MET admin can do cross tenant operation - assert rv.status_code == HTTPStatus.OK - - mock_add_user_to_group_keycloak.assert_called() - mock_get_user_groups_keycloak.assert_called() +# TODO: Replace/modify the next series of tests so they support composite roles instead of groups +# @pytest.mark.parametrize('side_effect, expected_status', [ +# (KeyError('Test error'), HTTPStatus.INTERNAL_SERVER_ERROR), +# (ValueError('Test error'), HTTPStatus.INTERNAL_SERVER_ERROR), +# ]) +# def test_add_user_to_admin_group(mocker, client, jwt, session, side_effect, expected_status, +# setup_admin_user_and_claims): +# """Assert that a user can be added to the admin group.""" +# user = factory_staff_user_model() + +# mock_add_user_to_group_keycloak, mock_get_user_groups_keycloak, mock_add_attribute_to_user = mock_add_user_to_group( # noqa: E501 +# mocker, +# [KeycloakGroupName.EAO_IT_VIEWER.value] +# ) + +# user, claims = setup_admin_user_and_claims +# headers = factory_auth_header(jwt=jwt, claims=claims) +# rv = client.post( +# f'/api/user/{user.external_id}/groups?group=Administrator', +# headers=headers, +# content_type=ContentType.JSON.value +# ) +# assert rv.status_code == HTTPStatus.OK +# mock_add_user_to_group_keycloak.assert_called() +# mock_get_user_groups_keycloak.assert_called() +# mock_add_attribute_to_user.assert_called() + +# with patch.object(StaffUserService, 'add_user_to_group', side_effect=side_effect): +# rv = client.post( +# f'/api/user/{user.external_id}/groups?group=Administrator', +# headers=headers, +# content_type=ContentType.JSON.value +# ) +# assert rv.status_code == expected_status + +# with patch.object(StaffUserService, 'add_user_to_group', +# side_effect=BusinessException('Test error', status_code=HTTPStatus.INTERNAL_SERVER_ERROR)): +# rv = client.post( +# f'/api/user/{user.external_id}/groups?group=Administrator', +# headers=headers, +# content_type=ContentType.JSON.value +# ) +# assert rv.status_code == HTTPStatus.INTERNAL_SERVER_ERROR + +# TODO: Replace/modify the next series of tests so they support composite roles instead of groups +# def test_add_user_to_reviewer_group(mocker, client, jwt, session, +# setup_admin_user_and_claims): +# """Assert that a user can be added to the reviewer group.""" +# user = factory_staff_user_model() + +# mock_add_user_to_group_keycloak, mock_get_user_groups_keycloak, mock_add_attribute_to_user = mock_add_user_to_group( # noqa: E501 +# mocker, +# [KeycloakGroupName.EAO_IT_VIEWER.value] +# ) + +# user, claims = setup_admin_user_and_claims +# headers = factory_auth_header(jwt=jwt, claims=claims) +# rv = client.post( +# f'/api/user/{user.external_id}/groups?group=Reviewer', +# headers=headers, +# content_type=ContentType.JSON.value +# ) +# assert rv.status_code == HTTPStatus.OK +# mock_add_user_to_group_keycloak.assert_called() +# mock_get_user_groups_keycloak.assert_called() + +# TODO: Replace/modify the next series of tests so they support composite +# roles instead of groups +# def test_add_user_to_team_member_group(mocker, client, jwt, session, +# setup_admin_user_and_claims): +# """Assert that a user can be added to the team member group.""" +# user = factory_staff_user_model() + +# mock_add_user_to_group_keycloak, mock_get_user_groups_keycloak, +# mock_add_attribute_to_user = mock_add_user_to_group( +# mocker, +# [KeycloakGroupName.EAO_IT_VIEWER.value] +# ) + +# user, claims = setup_admin_user_and_claims +# headers = factory_auth_header(jwt=jwt, claims=claims) +# rv = client.post( +# f'/api/user/{user.external_id}/groups?group=TeamMember', +# headers=headers, +# content_type=ContentType.JSON.value +# ) +# assert rv.status_code == HTTPStatus.OK +# mock_add_user_to_group_keycloak.assert_called() +# mock_get_user_groups_keycloak.assert_called() + +# TODO: Replace/modify the next series of tests so they support composite +# roles instead of groups +# def test_add_user_to_team_member_group_across_tenants(mocker, client, jwt, session): +# """Assert that a user can be added to the team member group.""" +# set_global_tenant(tenant_id=1) +# user = factory_staff_user_model() + +# mock_add_user_to_group_keycloak, mock_get_user_groups_keycloak, mock_add_attribute_to_user = mock_add_user_to_group( # noqa: E501 +# mocker, +# [KeycloakGroupName.EAO_IT_VIEWER.value] +# ) + +# claims = copy.deepcopy(TestJwtClaims.staff_admin_role.value) +# # sets a different tenant id in the request +# claims['tenant_id'] = 2 +# headers = factory_auth_header(jwt=jwt, claims=claims) +# rv = client.post( +# f'/api/user/{user.external_id}/groups?group=TeamMember', +# headers=headers, +# content_type=ContentType.JSON.value +# ) +# # assert staff admin cant do cross tenant operation +# assert rv.status_code == HTTPStatus.FORBIDDEN + +# claims = copy.deepcopy(TestJwtClaims.met_admin_role.value) +# # sets a different tenant id in the request +# claims['tenant_id'] = 2 +# headers = factory_auth_header(jwt=jwt, claims=claims) +# rv = client.post( +# f'/api/user/{user.external_id}/groups?group=TeamMember', +# headers=headers, +# content_type=ContentType.JSON.value +# ) +# # assert MET admin can do cross tenant operation +# assert rv.status_code == HTTPStatus.OK + +# mock_add_user_to_group_keycloak.assert_called() +# mock_get_user_groups_keycloak.assert_called() def mock_toggle_user_status(mocker): @@ -229,14 +243,15 @@ def mock_toggle_user_status(mocker): mock_toggle_user_status = mocker.patch( f'{KEYCLOAK_SERVICE_MODULE}.toggle_user_enabled_status', - return_value=mock_response + return_value=mock_response, ) return mock_toggle_user_status -def test_toggle_user_active_status(mocker, client, jwt, session, - setup_admin_user_and_claims): +def test_toggle_user_active_status( + mocker, client, jwt, session, setup_admin_user_and_claims +): """Assert that a user can be toggled.""" user = factory_staff_user_model() mocked_toggle_user_status = mock_toggle_user_status(mocker) @@ -248,15 +263,16 @@ def test_toggle_user_active_status(mocker, client, jwt, session, f'/api/user/{user.external_id}/status', headers=headers, json={'active': False}, - content_type=ContentType.JSON.value + content_type=ContentType.JSON.value, ) assert rv.status_code == HTTPStatus.OK assert rv.json.get('status_id') == UserStatus.INACTIVE.value mocked_toggle_user_status.assert_called() -def test_team_member_cannot_toggle_user_active_status(mocker, client, jwt, session, - setup_team_member_and_claims): +def test_team_member_cannot_toggle_user_active_status( + mocker, client, jwt, session, setup_team_member_and_claims +): """Assert that a team member cannot toggle user status.""" user = factory_staff_user_model() mocked_toggle_user_status = mock_toggle_user_status(mocker) @@ -268,14 +284,15 @@ def test_team_member_cannot_toggle_user_active_status(mocker, client, jwt, sessi f'/api/user/{user.external_id}/status', headers=headers, json={'active': False}, - content_type=ContentType.JSON.value + content_type=ContentType.JSON.value, ) assert rv.status_code == HTTPStatus.UNAUTHORIZED mocked_toggle_user_status.assert_not_called() -def test_reviewer_cannot_toggle_user_active_status(mocker, client, jwt, session, - setup_reviewer_and_claims): +def test_reviewer_cannot_toggle_user_active_status( + mocker, client, jwt, session, setup_reviewer_and_claims +): """Assert that a reviewer cannot toggle user status.""" user = factory_staff_user_model() mocked_toggle_user_status = mock_toggle_user_status(mocker) @@ -287,14 +304,15 @@ def test_reviewer_cannot_toggle_user_active_status(mocker, client, jwt, session, f'/api/user/{user.external_id}/status', headers=headers, json={'active': False}, - content_type=ContentType.JSON.value + content_type=ContentType.JSON.value, ) assert rv.status_code == HTTPStatus.UNAUTHORIZED mocked_toggle_user_status.assert_not_called() -def test_toggle_user_active_status_empty_body(mocker, client, jwt, session, - setup_admin_user_and_claims): +def test_toggle_user_active_status_empty_body( + mocker, client, jwt, session, setup_admin_user_and_claims +): """Assert that returns bad request if bad request body.""" user = factory_staff_user_model() mocked_toggle_user_status = mock_toggle_user_status(mocker) @@ -305,32 +323,48 @@ def test_toggle_user_active_status_empty_body(mocker, client, jwt, session, rv = client.patch( f'/api/user/{user.external_id}/status', headers=headers, - content_type=ContentType.JSON.value + content_type=ContentType.JSON.value, ) assert rv.status_code == HTTPStatus.BAD_REQUEST mocked_toggle_user_status.assert_not_called() -def test_get_staff_users_by_id(client, jwt, session, - setup_admin_user_and_claims): +def test_get_staff_users_by_id( + client, jwt, session, setup_admin_user_and_claims +): """Assert that a user can be fetched.""" user, claims = setup_admin_user_and_claims headers = factory_auth_header(jwt=jwt, claims=claims) - rv = client.put('/api/user/', headers=headers, content_type=ContentType.JSON.value) + rv = client.put( + '/api/user/', headers=headers, content_type=ContentType.JSON.value + ) assert rv.status_code == HTTPStatus.OK user_id = rv.json.get('id') - rv = client.get(f'/api/user/{user_id}', headers=headers, content_type=ContentType.JSON.value) + rv = client.get( + f'/api/user/{user_id}', + headers=headers, + content_type=ContentType.JSON.value, + ) assert rv.status_code == HTTPStatus.OK assert rv.json.get('id') == user_id -@pytest.mark.parametrize('side_effect, expected_status', [ - (KeyError('Test error'), HTTPStatus.BAD_REQUEST), - (ValueError('Test error'), HTTPStatus.BAD_REQUEST), -]) -def test_errors_on_toggle_user_active_status(client, jwt, session, side_effect, expected_status, - setup_admin_user_and_claims): +@pytest.mark.parametrize( + 'side_effect, expected_status', + [ + (KeyError('Test error'), HTTPStatus.BAD_REQUEST), + (ValueError('Test error'), HTTPStatus.BAD_REQUEST), + ], +) +def test_errors_on_toggle_user_active_status( + client, + jwt, + session, + side_effect, + expected_status, + setup_admin_user_and_claims, +): """Assert that a user can be toggled.""" user = factory_staff_user_model() @@ -338,11 +372,15 @@ def test_errors_on_toggle_user_active_status(client, jwt, session, side_effect, user, claims = setup_admin_user_and_claims headers = factory_auth_header(jwt=jwt, claims=claims) - with patch.object(StaffUserMembershipService, 'reactivate_deactivate_user', side_effect=side_effect): + with patch.object( + StaffUserMembershipService, + 'reactivate_deactivate_user', + side_effect=side_effect, + ): rv = client.patch( f'/api/user/{user.external_id}/status', headers=headers, json={'active': False}, - content_type=ContentType.JSON.value + content_type=ContentType.JSON.value, ) assert rv.status_code == expected_status diff --git a/met-api/tests/unit/api/test_user_membership.py b/met-api/tests/unit/api/test_user_membership.py index f91fb16c8..808b1428f 100644 --- a/met-api/tests/unit/api/test_user_membership.py +++ b/met-api/tests/unit/api/test_user_membership.py @@ -14,19 +14,19 @@ """Tests to verify the user membership operations. -Test-Suite to ensure that the user membership endpoints are working as expected. +Test-Suite to ensure that the user membership endpoints are working as expected. # noqa: E501 """ from http import HTTPStatus -from unittest.mock import MagicMock, patch -import pytest +from unittest.mock import MagicMock +# import pytest -from met_api.exceptions.business_exception import BusinessException -from met_api.models.membership import Membership as MembershipModel -from met_api.services.staff_user_membership_service import StaffUserMembershipService -from met_api.utils.enums import ContentType, KeycloakGroupName, MembershipStatus, UserStatus -from tests.utilities.factory_scenarios import TestJwtClaims -from tests.utilities.factory_utils import ( - factory_auth_header, factory_engagement_model, factory_membership_model, factory_staff_user_model) +# from met_api.exceptions.business_exception import BusinessException +# from met_api.models.membership import Membership as MembershipModel +# from met_api.services.staff_user_membership_service import StaffUserMembershipService # noqa: E501 +# from met_api.utils.enums import ContentType, KeycloakGroupName, MembershipStatus, UserStatus # noqa: E501 +# from tests.utilities.factory_scenarios import TestJwtClaims +# from tests.utilities.factory_utils import ( +# factory_auth_header, factory_engagement_model, factory_membership_model, factory_staff_user_model) # noqa: E501 KEYCLOAK_SERVICE_MODULE = 'met_api.services.keycloak.KeycloakService' @@ -42,10 +42,11 @@ def mock_keycloak_methods(mocker, mock_group_names): return_value=mock_response ) - mock_get_user_groups_keycloak = mocker.patch( - f'{KEYCLOAK_SERVICE_MODULE}.get_user_groups', - return_value=[{'name': group_name} for group_name in mock_group_names] - ) + # TODO: Restore this patch but for composite roles and not groups + # mock_get_user_groups_keycloak = mocker.patch( + # f'{KEYCLOAK_SERVICE_MODULE}.get_user_groups', + # return_value=[{'name': group_name} for group_name in mock_group_names] + # ) mock_add_attribute_to_user = mocker.patch( f'{KEYCLOAK_SERVICE_MODULE}.add_attribute_to_user', @@ -59,74 +60,74 @@ def mock_keycloak_methods(mocker, mock_group_names): return ( mock_add_user_to_group_keycloak, - mock_get_user_groups_keycloak, + # mock_get_user_groups_keycloak, mock_add_attribute_to_user, mock_remove_user_from_group_keycloak ) - -@pytest.mark.parametrize('side_effect, expected_status', [ - (KeyError('Test error'), HTTPStatus.INTERNAL_SERVER_ERROR), - (ValueError('Test error'), HTTPStatus.INTERNAL_SERVER_ERROR), -]) -def test_reassign_user_reviewer_team_member(mocker, client, jwt, session, side_effect, expected_status): - """Assert that returns bad request if bad request body.""" - user = factory_staff_user_model() - eng = factory_engagement_model() - current_membership = factory_membership_model(user_id=user.id, engagement_id=eng.id) - assert current_membership.status == MembershipStatus.ACTIVE.value - mock_response = MagicMock() - mock_response.status_code = HTTPStatus.NO_CONTENT - - ( - mock_add_user_to_group_keycloak, - mock_get_user_groups_keycloak, - mock_add_attribute_to_user, - mock_remove_user_from_group_keycloak - ) = mock_keycloak_methods( - mocker, - [KeycloakGroupName.EAO_REVIEWER.value] - ) - - mock_get_users_groups_keycloak = mocker.patch( - f'{KEYCLOAK_SERVICE_MODULE}.get_users_groups', - return_value={user.external_id: [KeycloakGroupName.EAO_REVIEWER.value]} - ) - - assert user.status_id == UserStatus.ACTIVE.value - claims = TestJwtClaims.staff_admin_role - headers = factory_auth_header(jwt=jwt, claims=claims) - - rv = client.put( - f'/api/user/{user.id}/groups?group=EAO_TEAM_MEMBER', - headers=headers, - content_type=ContentType.JSON.value - ) - - assert rv.status_code == HTTPStatus.OK - mock_add_user_to_group_keycloak.assert_called() - mock_get_user_groups_keycloak.assert_called() - mock_add_attribute_to_user.assert_called() - mock_remove_user_from_group_keycloak.assert_called() - mock_get_users_groups_keycloak.assert_called() - - memberships = MembershipModel.find_by_user_id(user.id) - assert len(memberships) == 1 - assert memberships[0].status == MembershipStatus.REVOKED.value - - with patch.object(StaffUserMembershipService, 'reassign_user', side_effect=side_effect): - rv = client.put( - f'/api/user/{user.id}/groups?group=EAO_TEAM_MEMBER', - headers=headers, - content_type=ContentType.JSON.value - ) - assert rv.status_code == expected_status - - with patch.object(StaffUserMembershipService, 'reassign_user', - side_effect=BusinessException('Test error', status_code=HTTPStatus.INTERNAL_SERVER_ERROR)): - rv = client.put( - f'/api/user/{user.id}/groups?group=EAO_TEAM_MEMBER', - headers=headers, - content_type=ContentType.JSON.value - ) - assert rv.status_code == HTTPStatus.INTERNAL_SERVER_ERROR +# TODO: Restore this test to support composite roles instead of groups +# @pytest.mark.parametrize('side_effect, expected_status', [ +# (KeyError('Test error'), HTTPStatus.INTERNAL_SERVER_ERROR), +# (ValueError('Test error'), HTTPStatus.INTERNAL_SERVER_ERROR), +# ]) +# def test_reassign_user_reviewer_team_member(mocker, client, jwt, session, side_effect, expected_status): # noqa: E501 +# """Assert that returns bad request if bad request body.""" +# user = factory_staff_user_model() +# eng = factory_engagement_model() +# current_membership = factory_membership_model(user_id=user.id, engagement_id=eng.id) # noqa: E501 +# assert current_membership.status == MembershipStatus.ACTIVE.value +# mock_response = MagicMock() +# mock_response.status_code = HTTPStatus.NO_CONTENT + +# ( +# mock_add_user_to_group_keycloak, +# mock_get_user_groups_keycloak, +# mock_add_attribute_to_user, +# mock_remove_user_from_group_keycloak +# ) = mock_keycloak_methods( +# mocker, +# [KeycloakGroupName.EAO_REVIEWER.value] +# ) + +# mock_get_users_groups_keycloak = mocker.patch( +# f'{KEYCLOAK_SERVICE_MODULE}.get_users_groups', +# return_value={user.external_id: [KeycloakGroupName.EAO_REVIEWER.value]} # noqa: E501 +# ) + +# assert user.status_id == UserStatus.ACTIVE.value +# claims = TestJwtClaims.staff_admin_role +# headers = factory_auth_header(jwt=jwt, claims=claims) + +# rv = client.put( +# f'/api/user/{user.id}/groups?group=EAO_TEAM_MEMBER', +# headers=headers, +# content_type=ContentType.JSON.value +# ) + +# assert rv.status_code == HTTPStatus.OK +# mock_add_user_to_group_keycloak.assert_called() +# mock_get_user_groups_keycloak.assert_called() +# mock_add_attribute_to_user.assert_called() +# mock_remove_user_from_group_keycloak.assert_called() +# mock_get_users_groups_keycloak.assert_called() + +# memberships = MembershipModel.find_by_user_id(user.id) +# assert len(memberships) == 1 +# assert memberships[0].status == MembershipStatus.REVOKED.value + +# with patch.object(StaffUserMembershipService, 'reassign_user', side_effect=side_effect): +# rv = client.put( +# f'/api/user/{user.id}/groups?group=EAO_TEAM_MEMBER', +# headers=headers, +# content_type=ContentType.JSON.value +# ) +# assert rv.status_code == expected_status + +# with patch.object(StaffUserMembershipService, 'reassign_user', +# side_effect=BusinessException('Test error', status_code=HTTPStatus.INTERNAL_SERVER_ERROR)): +# rv = client.put( +# f'/api/user/{user.id}/groups?group=EAO_TEAM_MEMBER', +# headers=headers, +# content_type=ContentType.JSON.value +# ) +# assert rv.status_code == HTTPStatus.INTERNAL_SERVER_ERROR diff --git a/met-api/tests/unit/services/test_keycloak.py b/met-api/tests/unit/services/test_keycloak.py index 1266de9db..87cdfb842 100644 --- a/met-api/tests/unit/services/test_keycloak.py +++ b/met-api/tests/unit/services/test_keycloak.py @@ -36,18 +36,18 @@ def test_keycloak_get_user_by_username(session): user = KEYCLOAK_SERVICE.get_user_by_username(request.get('username')) assert user.get('username') == request.get('username') - -def test_keycloak_get_user_groups(session): - """Get user by username. Assert get a user with the same username as the username in request.""" - request = KeycloakScenario.create_user_request() - group_name = 'admins' - KEYCLOAK_SERVICE.add_user(request) - user = KEYCLOAK_SERVICE.get_user_by_username(request.get('username')) - user_id = user.get('id') - user_group = KEYCLOAK_SERVICE.get_users_groups([user_id]) - - assert group_name not in user_group.get(user_id) - # add the group - KEYCLOAK_SERVICE.add_user_to_group(user_id, '%s' % group_name) - user_group = KEYCLOAK_SERVICE.get_users_groups([user_id]) - assert group_name in user_group.get(user_id) +# TODO: Replace this test with one that gets user composite roles +# def test_keycloak_get_user_groups(session): +# """Get user by username. Assert get a user with the same username as the username in request.""" +# request = KeycloakScenario.create_user_request() +# group_name = 'admins' +# KEYCLOAK_SERVICE.add_user(request) +# user = KEYCLOAK_SERVICE.get_user_by_username(request.get('username')) +# user_id = user.get('id') +# user_group = KEYCLOAK_SERVICE.get_users_groups([user_id]) + +# assert group_name not in user_group.get(user_id) +# # add the group +# KEYCLOAK_SERVICE.add_user_to_group(user_id, '%s' % group_name) +# user_group = KEYCLOAK_SERVICE.get_users_groups([user_id]) +# assert group_name in user_group.get(user_id) diff --git a/met-web/src/components/engagement/form/EngagementFormTabs/UserManagement/AddTeamMemberModal.tsx b/met-web/src/components/engagement/form/EngagementFormTabs/UserManagement/AddTeamMemberModal.tsx index 4d5d1d552..ea27d66ad 100644 --- a/met-web/src/components/engagement/form/EngagementFormTabs/UserManagement/AddTeamMemberModal.tsx +++ b/met-web/src/components/engagement/form/EngagementFormTabs/UserManagement/AddTeamMemberModal.tsx @@ -63,7 +63,7 @@ export const AddTeamMemberModal = () => { setUsersLoading(true); const response = await getUserList({ search_text: searchText, - include_groups: false, + include_roles: false, }); setUsers(response.items); setUsersLoading(false); diff --git a/met-web/src/components/userManagement/listing/UserManagementContext.tsx b/met-web/src/components/userManagement/listing/UserManagementContext.tsx index eabef5a37..60b1c3dd8 100644 --- a/met-web/src/components/userManagement/listing/UserManagementContext.tsx +++ b/met-web/src/components/userManagement/listing/UserManagementContext.tsx @@ -101,7 +101,7 @@ export const UserManagementContextProvider = ({ children }: { children: JSX.Elem size, sort_key: nested_sort_key || sort_key, sort_order, - include_groups: true, + include_roles: true, search_text: searchText, include_inactive: true, }); diff --git a/met-web/src/components/userManagement/userDetails/UserDetailsContext.tsx b/met-web/src/components/userManagement/userDetails/UserDetailsContext.tsx index e97be33b3..72626dc03 100644 --- a/met-web/src/components/userManagement/userDetails/UserDetailsContext.tsx +++ b/met-web/src/components/userManagement/userDetails/UserDetailsContext.tsx @@ -76,7 +76,7 @@ export const UserDetailsContextProvider = ({ children }: { children: JSX.Element const getUserDetails = async () => { setUserLoading(true); - const fetchedUser = await getUser({ user_id: Number(userId), include_groups: true }); + const fetchedUser = await getUser({ user_id: Number(userId), include_roles: true }); setSavedUser(fetchedUser); setUserLoading(false); }; diff --git a/met-web/src/models/user.ts b/met-web/src/models/user.ts index 235e24dbb..53420ecde 100644 --- a/met-web/src/models/user.ts +++ b/met-web/src/models/user.ts @@ -1,20 +1,20 @@ -export type UserGroup = 'EAO_IT_ADMIN' | 'EAO_IT_VIEWER' | 'EAO_TEAM_MEMBER' | 'EAO_REVIEWER'; +export type UserGroup = 'IT_ADMIN' | 'IT_VIEWER' | 'TEAM_MEMBER' | 'REVIEWER'; export const USER_GROUP: { [x: string]: { value: UserGroup; label: string } } = { ADMIN: { - value: 'EAO_IT_ADMIN', + value: 'IT_ADMIN', label: 'Administrator', }, VIEWER: { - value: 'EAO_IT_VIEWER', + value: 'IT_VIEWER', label: 'Viewer', }, TEAM_MEMBER: { - value: 'EAO_TEAM_MEMBER', + value: 'TEAM_MEMBER', label: 'Team Member', }, REVIEWER: { - value: 'EAO_REVIEWER', + value: 'REVIEWER', label: 'Reviewer', }, }; diff --git a/met-web/src/routes/AuthGate.tsx b/met-web/src/routes/AuthGate.tsx index 46b35d0e8..c8be2ff32 100644 --- a/met-web/src/routes/AuthGate.tsx +++ b/met-web/src/routes/AuthGate.tsx @@ -5,7 +5,6 @@ import { USER_GROUP } from 'models/user'; const AuthGate = ({ allowedRoles }: { allowedRoles: string[] }) => { const permissions = useAppSelector((state) => state.user.roles); - const userGroups = useAppSelector((state) => state.user.userDetail.groups); const location = useLocation(); const scopesMap: { [scope: string]: boolean } = {}; @@ -14,7 +13,7 @@ const AuthGate = ({ allowedRoles }: { allowedRoles: string[] }) => { }); return permissions.some((permission) => scopesMap[permission]) || - userGroups?.includes('/' + USER_GROUP.TEAM_MEMBER.value) ? ( + permissions?.includes('/' + USER_GROUP.TEAM_MEMBER.value) ? ( ) : ( diff --git a/met-web/src/services/userService/api/index.tsx b/met-web/src/services/userService/api/index.tsx index 06fc5384d..8f1430352 100644 --- a/met-web/src/services/userService/api/index.tsx +++ b/met-web/src/services/userService/api/index.tsx @@ -12,7 +12,7 @@ interface GetUserListParams { sort_order?: 'asc' | 'desc'; search_text?: string; // If yes, user groups will be fetched as well from keycloak - include_groups?: boolean; + include_roles?: boolean; include_inactive?: boolean; } export const getUserList = async (params: GetUserListParams = {}): Promise> => { @@ -28,7 +28,7 @@ export const getUserList = async (params: GetUserListParams = {}): Promise => { const url = replaceUrl(Endpoints.User.GET, 'user_id', String(params.user_id)); From be8dad448ac8177a457c30ad3892bcff4445f3f9 Mon Sep 17 00:00:00 2001 From: Ratheesh kumar R <108045773+ratheesh-aot@users.noreply.github.com> Date: Fri, 9 Feb 2024 13:55:24 -0800 Subject: [PATCH 03/42] DESENG-452 : Applying pending migrations (#2378) * DESENG-452 : Applying pending migrations * Updating unit test * Updated changelog * Fixed lint issue --- CHANGELOG.MD | 7 ++ met-api/migrations/versions/37176ea4708d_.py | 91 +++++++++++++++++++ met-api/src/met_api/models/submission.py | 2 +- met-api/src/met_api/models/timeline_event.py | 6 +- met-api/src/met_api/models/widget_timeline.py | 4 +- .../api/test_email_verification_service.py | 5 +- .../test_email_verification_service.py | 7 +- met-api/tests/utilities/factory_utils.py | 10 +- 8 files changed, 120 insertions(+), 12 deletions(-) create mode 100644 met-api/migrations/versions/37176ea4708d_.py diff --git a/CHANGELOG.MD b/CHANGELOG.MD index 80c4e5899..5d8efaba8 100644 --- a/CHANGELOG.MD +++ b/CHANGELOG.MD @@ -1,3 +1,10 @@ +## February 08, 2024 +- **Task**Consolidate and re-write old migration files [DESENG-452](https://apps.itsm.gov.bc.ca/jira/browse/DESENG-452) + - Change some foreign key field to nullbale false in model files + - Change `rejected_reason_other` to nullable true in `submission` model + - Generated new migration file based on the pending model changes which confirmed to be valid + - Updated Unit test of email verfication to send type to the api + ## February 06, 2024 - **Task**Convert keycloak groups to composite roles for permission levels [DESENG-447](https://apps.itsm.gov.bc.ca/jira/browse/DESENG-447) - Commented out unit test related to Keycloak groups diff --git a/met-api/migrations/versions/37176ea4708d_.py b/met-api/migrations/versions/37176ea4708d_.py new file mode 100644 index 000000000..3ae7a8545 --- /dev/null +++ b/met-api/migrations/versions/37176ea4708d_.py @@ -0,0 +1,91 @@ +"""empty message + +Revision ID: 37176ea4708d +Revises: ec0128056a33 +Create Date: 2024-02-08 12:40:09.456210 + +""" +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql + +# revision identifiers, used by Alembic. +revision = '37176ea4708d' +down_revision = 'ec0128056a33' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.alter_column('email_verification', 'type', + existing_type=postgresql.ENUM('Survey', 'RejectedComment', 'Subscribe', name='emailverificationtype'), + nullable=False) + op.create_index(op.f('ix_engagement_metadata_engagement_id'), 'engagement_metadata', ['engagement_id'], unique=False) + op.create_index(op.f('ix_engagement_metadata_taxon_id'), 'engagement_metadata', ['taxon_id'], unique=False) + op.create_index(op.f('ix_engagement_metadata_value'), 'engagement_metadata', ['value'], unique=False) + op.create_index(op.f('ix_engagement_metadata_taxa_tenant_id'), 'engagement_metadata_taxa', ['tenant_id'], unique=False) + op.create_unique_constraint(None, 'engagement_metadata_taxa', ['id']) + op.execute('UPDATE membership_status_codes SET created_date = CURRENT_TIMESTAMP WHERE created_date IS NULL;') + op.alter_column('membership_status_codes', 'created_date', + existing_type=postgresql.TIMESTAMP(), + nullable=False) + op.drop_index('ix_participant_email_address', table_name='participant') + op.alter_column('timeline_event', 'widget_id', + existing_type=sa.INTEGER(), + nullable=False) + op.alter_column('timeline_event', 'status', + existing_type=postgresql.ENUM('Pending', 'InProgress', 'Completed', name='timelineeventstatus'), + nullable=False) + op.alter_column('timeline_event', 'position', + existing_type=sa.INTEGER(), + nullable=False) + op.alter_column('widget_documents', 'is_uploaded', + existing_type=sa.BOOLEAN(), + nullable=True, + existing_server_default=sa.text('false')) + op.alter_column('widget_timeline', 'widget_id', + existing_type=sa.INTEGER(), + nullable=False) + op.execute('UPDATE widget_type SET created_date = CURRENT_TIMESTAMP WHERE created_date IS NULL;') + op.alter_column('widget_type', 'created_date', + existing_type=postgresql.TIMESTAMP(), + nullable=False) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.alter_column('widget_type', 'created_date', + existing_type=postgresql.TIMESTAMP(), + nullable=True) + op.alter_column('widget_timeline', 'widget_id', + existing_type=sa.INTEGER(), + nullable=True) + op.alter_column('widget_documents', 'is_uploaded', + existing_type=sa.BOOLEAN(), + nullable=False, + existing_server_default=sa.text('false')) + op.alter_column('timeline_event', 'position', + existing_type=sa.INTEGER(), + nullable=True) + op.alter_column('timeline_event', 'status', + existing_type=postgresql.ENUM('Pending', 'InProgress', 'Completed', name='timelineeventstatus'), + nullable=True) + op.alter_column('timeline_event', 'widget_id', + existing_type=sa.INTEGER(), + nullable=True) + op.create_index('ix_participant_email_address', 'participant', ['email_address'], unique=False) + + op.alter_column('membership_status_codes', 'created_date', + existing_type=postgresql.TIMESTAMP(), + nullable=True) + op.drop_constraint(None, 'engagement_metadata_taxa', type_='unique') + op.drop_index(op.f('ix_engagement_metadata_taxa_tenant_id'), table_name='engagement_metadata_taxa') + op.drop_index(op.f('ix_engagement_metadata_value'), table_name='engagement_metadata') + op.drop_index(op.f('ix_engagement_metadata_taxon_id'), table_name='engagement_metadata') + op.drop_index(op.f('ix_engagement_metadata_engagement_id'), table_name='engagement_metadata') + op.alter_column('email_verification', 'type', + existing_type=postgresql.ENUM('Survey', 'RejectedComment', 'Subscribe', name='emailverificationtype'), + nullable=True) + # ### end Alembic commands ### diff --git a/met-api/src/met_api/models/submission.py b/met-api/src/met_api/models/submission.py index 40a0cb559..0698b9ff3 100644 --- a/met-api/src/met_api/models/submission.py +++ b/met-api/src/met_api/models/submission.py @@ -35,7 +35,7 @@ class Submission(BaseModel): # pylint: disable=too-few-public-methods comment_status_id = db.Column(db.Integer, ForeignKey('comment_status.id', ondelete='SET NULL')) has_personal_info = db.Column(db.Boolean, nullable=True) has_profanity = db.Column(db.Boolean, nullable=True) - rejected_reason_other = db.Column(db.String(500), nullable=False) + rejected_reason_other = db.Column(db.String(500), nullable=True) has_threat = db.Column(db.Boolean, nullable=True) notify_email = db.Column(db.Boolean(), default=True) comments = db.relationship('Comment', backref='submission', cascade='all, delete') diff --git a/met-api/src/met_api/models/timeline_event.py b/met-api/src/met_api/models/timeline_event.py index 7a80a12dc..cdd6e9b8f 100644 --- a/met-api/src/met_api/models/timeline_event.py +++ b/met-api/src/met_api/models/timeline_event.py @@ -16,9 +16,9 @@ class TimelineEvent(BaseModel): __tablename__ = 'timeline_event' id = db.Column(db.Integer, primary_key=True, autoincrement=True) - engagement_id = db.Column(db.Integer, ForeignKey('engagement.id', ondelete='CASCADE'), nullable=True) - widget_id = db.Column(db.Integer, ForeignKey('widget.id', ondelete='CASCADE'), nullable=True) - timeline_id = db.Column(db.Integer, ForeignKey('widget_timeline.id', ondelete='CASCADE'), nullable=True) + engagement_id = db.Column(db.Integer, ForeignKey('engagement.id', ondelete='CASCADE'), nullable=False) + widget_id = db.Column(db.Integer, ForeignKey('widget.id', ondelete='CASCADE'), nullable=False) + timeline_id = db.Column(db.Integer, ForeignKey('widget_timeline.id', ondelete='CASCADE'), nullable=False) status = db.Column(db.Enum(TimelineEventStatus), nullable=False) position = db.Column(db.Integer, nullable=False) description = db.Column(db.Text(), nullable=True) diff --git a/met-api/src/met_api/models/widget_timeline.py b/met-api/src/met_api/models/widget_timeline.py index 18cb732be..7de9743ef 100755 --- a/met-api/src/met_api/models/widget_timeline.py +++ b/met-api/src/met_api/models/widget_timeline.py @@ -14,8 +14,8 @@ class WidgetTimeline(BaseModel): # pylint: disable=too-few-public-methods, too- __tablename__ = 'widget_timeline' id = db.Column(db.Integer, primary_key=True, autoincrement=True) - engagement_id = db.Column(db.Integer, ForeignKey('engagement.id', ondelete='CASCADE'), nullable=True) - widget_id = db.Column(db.Integer, ForeignKey('widget.id', ondelete='CASCADE'), nullable=True) + engagement_id = db.Column(db.Integer, ForeignKey('engagement.id', ondelete='CASCADE'), nullable=False) + widget_id = db.Column(db.Integer, ForeignKey('widget.id', ondelete='CASCADE'), nullable=False) title = db.Column(db.String(255), nullable=True) description = db.Column(db.Text(), nullable=True) diff --git a/met-api/tests/unit/api/test_email_verification_service.py b/met-api/tests/unit/api/test_email_verification_service.py index 87bef2173..325425485 100644 --- a/met-api/tests/unit/api/test_email_verification_service.py +++ b/met-api/tests/unit/api/test_email_verification_service.py @@ -41,7 +41,8 @@ def test_email_verification(client, jwt, session, notify_mock, ): # pylint:disa survey, eng = factory_survey_and_eng_model() to_dict = { 'email_address': fake.email(), - 'survey_id': survey.id + 'survey_id': survey.id, + 'type': EmailVerificationType.Survey, } headers = factory_auth_header(jwt=jwt, claims=claims) rv = client.post('/api/email_verification/', data=json.dumps(to_dict), @@ -87,7 +88,7 @@ def test_patch_email_verification_by_token(client, jwt, session): # pylint:disa claims = TestJwtClaims.public_user_role set_global_tenant() survey, eng = factory_survey_and_eng_model() - email_verification = factory_email_verification(survey.id) + email_verification = factory_email_verification(survey.id, EmailVerificationType.Subscribe) headers = factory_auth_header(jwt=jwt, claims=claims) rv = client.put(f'/api/email_verification/{email_verification.verification_token}', diff --git a/met-api/tests/unit/services/test_email_verification_service.py b/met-api/tests/unit/services/test_email_verification_service.py index 2f96cdd14..813cd1b27 100644 --- a/met-api/tests/unit/services/test_email_verification_service.py +++ b/met-api/tests/unit/services/test_email_verification_service.py @@ -23,6 +23,7 @@ from met_api.exceptions.business_exception import BusinessException from met_api.services.email_verification_service import EmailVerificationService +from met_api.constants.email_verification import EmailVerificationType from met_api.utils import notification from tests.utilities.factory_scenarios import TestEngagementSlugInfo from tests.utilities.factory_utils import factory_engagement_slug_model, factory_survey_and_eng_model, set_global_tenant @@ -43,7 +44,8 @@ def test_create_email_verification(client, jwt, session, ): # pylint:disable=un email = fake.email() to_dict = { 'email_address': email, - 'survey_id': survey.id + 'survey_id': survey.id, + 'type': EmailVerificationType.Survey } with patch.object(notification, 'send_email', return_value=False) as mock_mail: EmailVerificationService().create(to_dict) @@ -66,7 +68,8 @@ def test_create_email_verification_exception(client, jwt, session, ): # pylint: email = fake.email() to_dict = { 'email_address': email, - 'survey_id': survey.id + 'survey_id': survey.id, + 'type': EmailVerificationType.Survey } with pytest.raises(BusinessException) as exception: with patch.object(notification, 'send_email', side_effect=Exception('mocked error')): diff --git a/met-api/tests/utilities/factory_utils.py b/met-api/tests/utilities/factory_utils.py index 7d2b8809c..a04528468 100644 --- a/met-api/tests/utilities/factory_utils.py +++ b/met-api/tests/utilities/factory_utils.py @@ -50,6 +50,7 @@ from met_api.models.widget_video import WidgetVideo as WidgetVideoModel from met_api.utils.constants import TENANT_ID_HEADER from met_api.utils.enums import MembershipStatus +from met_api.constants.email_verification import EmailVerificationType from tests.utilities.factory_scenarios import ( TestCommentInfo, TestEngagementInfo, TestEngagementMetadataInfo, TestEngagementMetadataTaxonInfo, TestEngagementSlugInfo, TestFeedbackInfo, TestJwtClaims, TestParticipantInfo, TestPollAnswerInfo, @@ -121,12 +122,17 @@ def factory_subscription_model(): return subscription -def factory_email_verification(survey_id): +def factory_email_verification(survey_id, type=None): """Produce a EmailVerification model.""" email_verification = EmailVerificationModel( verification_token=fake.uuid4(), - is_active=True + is_active=True, ) + if type: + email_verification.type = type + else: + email_verification.type = EmailVerificationType.Survey + if survey_id: email_verification.survey_id = survey_id From 11e14f9200d3468729fafa3a919fbd040f749684 Mon Sep 17 00:00:00 2001 From: VineetBala-AOT <90332175+VineetBala-AOT@users.noreply.github.com> Date: Mon, 12 Feb 2024 10:25:17 -0800 Subject: [PATCH 04/42] DESENG-484: Adding max age for cors (#2379) * DESENG-484: Adding max age for cors (#2377) --- CHANGELOG.MD | 3 +++ met-api/src/met_api/config.py | 6 ++++++ met-api/src/met_api/utils/util.py | 8 +++++++- met-api/tests/unit/utils/test_util_cors.py | 4 ++-- 4 files changed, 18 insertions(+), 3 deletions(-) diff --git a/CHANGELOG.MD b/CHANGELOG.MD index 5d8efaba8..493ab6359 100644 --- a/CHANGELOG.MD +++ b/CHANGELOG.MD @@ -1,4 +1,7 @@ ## February 08, 2024 +- **Task**Cache CORS preflight responses with the browser for a given period of time [DESENG-484](https://apps.itsm.gov.bc.ca/jira/browse/DESENG-484) + - Introduces a new configuration variable to specify the maximum age for Cross-Origin Resource Sharing (CORS) + - Modified the CORS preflight method to utilize this newly introduced variable. - **Task**Consolidate and re-write old migration files [DESENG-452](https://apps.itsm.gov.bc.ca/jira/browse/DESENG-452) - Change some foreign key field to nullbale false in model files - Change `rejected_reason_other` to nullable true in `submission` model diff --git a/met-api/src/met_api/config.py b/met-api/src/met_api/config.py index e47321e4a..630c86e27 100644 --- a/met-api/src/met_api/config.py +++ b/met-api/src/met_api/config.py @@ -163,6 +163,12 @@ def SQLALCHEMY_DATABASE_URI(self) -> str: # CORS settings CORS_ORIGINS = os.getenv('CORS_ORIGINS', '').split(',') + # CORS_MAX_AGE defines the maximum age (in seconds) for Cross-Origin Resource Sharing (CORS) settings. + # This value is used to indicate how long the results of a preflight request (OPTIONS) can be cached + # by the client, reducing the frequency of preflight requests for the specified HTTP methods. + # Adjust this value based on security considerations. + CORS_MAX_AGE = os.getenv('CORS_MAX_AGE', None) # Default: 0 seconds + EPIC_CONFIG = { 'ENABLED': env_truthy('EPIC_INTEGRATION_ENABLED'), 'JWT_OIDC_ISSUER': os.getenv('EPIC_JWT_OIDC_ISSUER'), diff --git a/met-api/src/met_api/utils/util.py b/met-api/src/met_api/utils/util.py index b6367d6fc..d6e8cbb85 100644 --- a/met-api/src/met_api/utils/util.py +++ b/met-api/src/met_api/utils/util.py @@ -29,12 +29,18 @@ def cors_preflight(methods): def wrapper(f): def options(self, *args, **kwargs): # pylint: disable=unused-argument - return {'Allow': 'GET, DELETE, PUT, POST'}, 200, { + headers = { + 'Allow': 'GET, DELETE, PUT, POST', 'Access-Control-Allow-Origin': '*', 'Access-Control-Allow-Methods': methods, 'Access-Control-Allow-Headers': 'Authorization, Content-Type, ' 'registries-trace-id, invitation_token' } + max_age = os.getenv('CORS_MAX_AGE') + if max_age is not None: + headers['Access-Control-Max-Age'] = str(max_age) + + return headers, 200, {} setattr(f, 'options', options) return f diff --git a/met-api/tests/unit/utils/test_util_cors.py b/met-api/tests/unit/utils/test_util_cors.py index 681b27f13..672229129 100644 --- a/met-api/tests/unit/utils/test_util_cors.py +++ b/met-api/tests/unit/utils/test_util_cors.py @@ -40,5 +40,5 @@ class TestCors(): pass rv = TestCors().options() # pylint: disable=no-member - assert rv[2]['Access-Control-Allow-Origin'] == '*' - assert rv[2]['Access-Control-Allow-Methods'] == methods + assert rv[0]['Access-Control-Allow-Origin'] == '*' + assert rv[0]['Access-Control-Allow-Methods'] == methods From 1bac2460707a5746e8ed2bac7302c1cd772d854b Mon Sep 17 00:00:00 2001 From: Ratheesh kumar R <108045773+ratheesh-aot@users.noreply.github.com> Date: Mon, 12 Feb 2024 12:46:44 -0800 Subject: [PATCH 05/42] DESENG-452 - Refactoring migrations (#2381) * DESENG-452: Delete old migration files * DESENG-452: Group migrations together * Updated Changelog * Unit test fix comment component length is max 10 * DESENG-452: Inserted initial data for Staff user table --- CHANGELOG.MD | 6 + ...added_url_path_column_to_feedback_table.py | 27 - .../versions/0329d7d10a5f_settings.py | 36 - met-api/migrations/versions/03ee1815f6a6_.py | 28 - met-api/migrations/versions/04e6c48187da_.py | 41 - met-api/migrations/versions/05d014ff7410_.py | 40 - .../08f69642b7ae_adding_widget_poll.py | 83 -- .../versions/0d863f773838_survey_block.py | 41 - .../0e043f976e2e_add_notes_to_submission.py | 38 - .../1113e0ad66c3_create_submission_table.py | 40 - met-api/migrations/versions/13985af9eca0_.py | 34 - .../versions/155c64768a99_create_cac_form.py | 44 -- ...ae2d586fb_add_generated_document_tables.py | 64 -- .../196b0abc23b6_tenant_id_in_user.py | 37 - met-api/migrations/versions/1c5883959156_.py | 35 - ...92c7b4_alter_engagement_add_is_internal.py | 28 - .../224b70277ac4_alter_commenttype.py | 28 - met-api/migrations/versions/2253a00e73bf_.py | 166 ---- met-api/migrations/versions/242c9f0364df_.py | 32 - ..._alter_engagement_add_created_by_upd_by.py | 37 - ...6609cb4db_update_report_settings_column.py | 46 -- .../2aa7554dde59_contact_adress_size.py | 34 - .../2b12a6cd987a_add_phases_widget_type.py | 33 - .../2b75eb893e6b_tenant_id_in_contact.py | 30 - .../31041fb90d53_membership_versioning.py | 37 - met-api/migrations/versions/326419c08f59_.py | 34 - .../36c315ec5801_add_encrypted_email.py | 92 --- met-api/migrations/versions/37176ea4708d_.py | 91 --- .../37176ea4708d_data_until_feb_09_2024.py | 536 +++++++++++++ ..._add_rich_description_to_subscribe_item.py | 37 - .../3e4dc76a96ab_added_the_timeline_widget.py | 44 -- .../versions/3f11e9145a08_events_table.py | 78 -- ...c_add_widget_id_to_widget_timeline_and_.py | 27 - .../45f89f245e3d_engagement_metadata.py | 37 - ...dc02be5_added_new_columns_to_user_table.py | 75 -- .../versions/47fc88fe0477_video_widget.py | 52 -- .../4c72047de4d3_add_staff_note_table.py | 41 - .../versions/4f5f91937f5c_merge_heads.py | 24 - .../5110026db916_create_feedback_table.py | 31 - .../versions/5423fd515e04_added_shapefile.py | 29 - .../versions/587badc69491_email_queue.py | 39 - .../5880bead8f03_email_verification_type.py | 30 - .../5a1258a76598_add_subscribe_to_project.py | 31 - .../642925969c53_added_documents_table.py | 43 -- met-api/migrations/versions/6764af39864e_.py | 27 - ...1704cb_create_widget_and_contact_tables.py | 79 -- .../6d3c33a79c5e_add_email_verification.py | 41 - .../779d9125c8cb_add_reviewer_type.py | 32 - .../7bf7394a517c_feedback_ratingtype.py | 36 - met-api/migrations/versions/7cb3da03c2a2_.py | 28 - .../versions/7d8897c412de_tenant_model.py | 78 -- met-api/migrations/versions/7ebd9ecfccdd_.py | 55 -- ...759_alter_submission_add_comment_status.py | 60 -- .../versions/8595172f9d96_add_map_type.py | 36 - .../88aba309bc23_add_engagement_slug.py | 70 -- ...f6e60d_alter_user_table_add_external_id.py | 32 - .../8ca063aafc01_create_comment_table.py | 56 -- .../8def759e43d9_add_participant_table.py | 93 --- .../versions/904c1ebca3e3_merge_heads.py | 24 - met-api/migrations/versions/9536f547cdd5_.py | 30 - .../versions/9714d7f8d7cc_size_increase.py | 30 - .../9f86fdcfb248_add_widget_type_document.py | 33 - met-api/migrations/versions/a1237c8a3df9_.py | 43 -- met-api/migrations/versions/a2d20b31e275_.py | 78 -- .../ac4a505ed1e3_add_feedback_source.py | 30 - .../b1196306955f_tracking_id_for_metadata.py | 28 - .../versions/b3b5c66cea4b_report_setting.py | 45 -- .../ba02399c821c_add_closed_status.py | 41 - ...25caf_adding_engagement_consent_message.py | 28 - .../versions/be3880132244_merge_heads.py | 24 - ...08_added_enum_value_for_timeline_widget.py | 61 -- ...c1af9f2b_alter_engagement_add_scheduled.py | 38 - .../cad222167ce7_memberships_added.py | 58 -- .../cb965cb4a3ad_update_comment_status.py | 27 - ...9ba77_alter_engagement_alter_banner_url.py | 30 - .../d152f85734f9_subscription_models.py | 40 - .../d2e7baa531ce_edit_subscription_models.py | 32 - .../versions/d5a6d9bb804b_merge_heads.py | 24 - ...f35c9_add_engagement_unpublished_status.py | 34 - .../versions/d86a682d7096_merge_heads.py | 24 - .../d9777850eb98_add_proponent_template.py | 56 -- met-api/migrations/versions/db3ffa0dd6ad_.py | 29 - met-api/migrations/versions/db737a0db061_.py | 43 -- .../versions/df73727dc6d9b7_add_sub_tabl.py | 55 -- met-api/migrations/versions/df842dc6d0b7_.py | 27 - .../e2d5d38220d9_add_revoked_membership.py | 46 -- .../e37d79be3a05_added_sort_for_widgets.py | 28 - .../e5d3bbb8d2f0_add_survey_hidden_column.py | 30 - .../versions/e69d7ac92afb_alter_submission.py | 47 -- ...ec0128056a33_rework_engagement_metadata.py | 164 ---- .../ec0128056a33_table_until_feb_09_2024.py | 709 ++++++++++++++++++ .../ec504565fab3_update_status_and_user.py | 53 -- met-api/migrations/versions/eef9cc71cca7_.py | 28 - .../f037908194df_kc_attribute_script.py | 39 - ...08c8d60_alter_comment_add_submission_id.py | 51 -- .../f40da1b8f3e0_initialize_user_status.py | 28 - ...f6f480b5b664_add_survey_template_column.py | 30 - met-api/migrations/versions/f99eb7f53041_.py | 34 - .../versions/fc570c0faace_survey_migration.py | 41 - .../fda10461892d_basemodel_column_change.py | 52 -- .../ffac8f5b4288_add_reject_reason.py | 30 - met-api/tests/utilities/factory_scenarios.py | 2 +- 102 files changed, 1252 insertions(+), 4331 deletions(-) delete mode 100644 met-api/migrations/versions/02ff8ecc6b91_added_url_path_column_to_feedback_table.py delete mode 100644 met-api/migrations/versions/0329d7d10a5f_settings.py delete mode 100644 met-api/migrations/versions/03ee1815f6a6_.py delete mode 100644 met-api/migrations/versions/04e6c48187da_.py delete mode 100644 met-api/migrations/versions/05d014ff7410_.py delete mode 100644 met-api/migrations/versions/08f69642b7ae_adding_widget_poll.py delete mode 100644 met-api/migrations/versions/0d863f773838_survey_block.py delete mode 100644 met-api/migrations/versions/0e043f976e2e_add_notes_to_submission.py delete mode 100644 met-api/migrations/versions/1113e0ad66c3_create_submission_table.py delete mode 100644 met-api/migrations/versions/13985af9eca0_.py delete mode 100644 met-api/migrations/versions/155c64768a99_create_cac_form.py delete mode 100644 met-api/migrations/versions/17bae2d586fb_add_generated_document_tables.py delete mode 100644 met-api/migrations/versions/196b0abc23b6_tenant_id_in_user.py delete mode 100644 met-api/migrations/versions/1c5883959156_.py delete mode 100644 met-api/migrations/versions/21e24b92c7b4_alter_engagement_add_is_internal.py delete mode 100644 met-api/migrations/versions/224b70277ac4_alter_commenttype.py delete mode 100644 met-api/migrations/versions/2253a00e73bf_.py delete mode 100644 met-api/migrations/versions/242c9f0364df_.py delete mode 100644 met-api/migrations/versions/2545d45bb29c_alter_engagement_add_created_by_upd_by.py delete mode 100644 met-api/migrations/versions/25e6609cb4db_update_report_settings_column.py delete mode 100644 met-api/migrations/versions/2aa7554dde59_contact_adress_size.py delete mode 100644 met-api/migrations/versions/2b12a6cd987a_add_phases_widget_type.py delete mode 100644 met-api/migrations/versions/2b75eb893e6b_tenant_id_in_contact.py delete mode 100644 met-api/migrations/versions/31041fb90d53_membership_versioning.py delete mode 100644 met-api/migrations/versions/326419c08f59_.py delete mode 100644 met-api/migrations/versions/36c315ec5801_add_encrypted_email.py delete mode 100644 met-api/migrations/versions/37176ea4708d_.py create mode 100644 met-api/migrations/versions/37176ea4708d_data_until_feb_09_2024.py delete mode 100644 met-api/migrations/versions/3a88de1e3e7f_add_rich_description_to_subscribe_item.py delete mode 100644 met-api/migrations/versions/3e4dc76a96ab_added_the_timeline_widget.py delete mode 100644 met-api/migrations/versions/3f11e9145a08_events_table.py delete mode 100644 met-api/migrations/versions/4114001e1a4c_add_widget_id_to_widget_timeline_and_.py delete mode 100644 met-api/migrations/versions/45f89f245e3d_engagement_metadata.py delete mode 100644 met-api/migrations/versions/46490dc02be5_added_new_columns_to_user_table.py delete mode 100644 met-api/migrations/versions/47fc88fe0477_video_widget.py delete mode 100644 met-api/migrations/versions/4c72047de4d3_add_staff_note_table.py delete mode 100644 met-api/migrations/versions/4f5f91937f5c_merge_heads.py delete mode 100644 met-api/migrations/versions/5110026db916_create_feedback_table.py delete mode 100644 met-api/migrations/versions/5423fd515e04_added_shapefile.py delete mode 100644 met-api/migrations/versions/587badc69491_email_queue.py delete mode 100644 met-api/migrations/versions/5880bead8f03_email_verification_type.py delete mode 100644 met-api/migrations/versions/5a1258a76598_add_subscribe_to_project.py delete mode 100644 met-api/migrations/versions/642925969c53_added_documents_table.py delete mode 100644 met-api/migrations/versions/6764af39864e_.py delete mode 100644 met-api/migrations/versions/6ce7831704cb_create_widget_and_contact_tables.py delete mode 100644 met-api/migrations/versions/6d3c33a79c5e_add_email_verification.py delete mode 100644 met-api/migrations/versions/779d9125c8cb_add_reviewer_type.py delete mode 100644 met-api/migrations/versions/7bf7394a517c_feedback_ratingtype.py delete mode 100644 met-api/migrations/versions/7cb3da03c2a2_.py delete mode 100644 met-api/migrations/versions/7d8897c412de_tenant_model.py delete mode 100644 met-api/migrations/versions/7ebd9ecfccdd_.py delete mode 100644 met-api/migrations/versions/7faee53e6759_alter_submission_add_comment_status.py delete mode 100644 met-api/migrations/versions/8595172f9d96_add_map_type.py delete mode 100644 met-api/migrations/versions/88aba309bc23_add_engagement_slug.py delete mode 100644 met-api/migrations/versions/8ab640f6e60d_alter_user_table_add_external_id.py delete mode 100644 met-api/migrations/versions/8ca063aafc01_create_comment_table.py delete mode 100644 met-api/migrations/versions/8def759e43d9_add_participant_table.py delete mode 100644 met-api/migrations/versions/904c1ebca3e3_merge_heads.py delete mode 100644 met-api/migrations/versions/9536f547cdd5_.py delete mode 100644 met-api/migrations/versions/9714d7f8d7cc_size_increase.py delete mode 100644 met-api/migrations/versions/9f86fdcfb248_add_widget_type_document.py delete mode 100644 met-api/migrations/versions/a1237c8a3df9_.py delete mode 100644 met-api/migrations/versions/a2d20b31e275_.py delete mode 100644 met-api/migrations/versions/ac4a505ed1e3_add_feedback_source.py delete mode 100644 met-api/migrations/versions/b1196306955f_tracking_id_for_metadata.py delete mode 100644 met-api/migrations/versions/b3b5c66cea4b_report_setting.py delete mode 100644 met-api/migrations/versions/ba02399c821c_add_closed_status.py delete mode 100644 met-api/migrations/versions/bd0eb0d25caf_adding_engagement_consent_message.py delete mode 100644 met-api/migrations/versions/be3880132244_merge_heads.py delete mode 100644 met-api/migrations/versions/c09e77fde608_added_enum_value_for_timeline_widget.py delete mode 100644 met-api/migrations/versions/c19bc1af9f2b_alter_engagement_add_scheduled.py delete mode 100644 met-api/migrations/versions/cad222167ce7_memberships_added.py delete mode 100644 met-api/migrations/versions/cb965cb4a3ad_update_comment_status.py delete mode 100644 met-api/migrations/versions/d0f92ae9ba77_alter_engagement_alter_banner_url.py delete mode 100644 met-api/migrations/versions/d152f85734f9_subscription_models.py delete mode 100644 met-api/migrations/versions/d2e7baa531ce_edit_subscription_models.py delete mode 100644 met-api/migrations/versions/d5a6d9bb804b_merge_heads.py delete mode 100644 met-api/migrations/versions/d822eacf35c9_add_engagement_unpublished_status.py delete mode 100644 met-api/migrations/versions/d86a682d7096_merge_heads.py delete mode 100644 met-api/migrations/versions/d9777850eb98_add_proponent_template.py delete mode 100644 met-api/migrations/versions/db3ffa0dd6ad_.py delete mode 100644 met-api/migrations/versions/db737a0db061_.py delete mode 100644 met-api/migrations/versions/df73727dc6d9b7_add_sub_tabl.py delete mode 100644 met-api/migrations/versions/df842dc6d0b7_.py delete mode 100644 met-api/migrations/versions/e2d5d38220d9_add_revoked_membership.py delete mode 100644 met-api/migrations/versions/e37d79be3a05_added_sort_for_widgets.py delete mode 100644 met-api/migrations/versions/e5d3bbb8d2f0_add_survey_hidden_column.py delete mode 100644 met-api/migrations/versions/e69d7ac92afb_alter_submission.py delete mode 100644 met-api/migrations/versions/ec0128056a33_rework_engagement_metadata.py create mode 100644 met-api/migrations/versions/ec0128056a33_table_until_feb_09_2024.py delete mode 100644 met-api/migrations/versions/ec504565fab3_update_status_and_user.py delete mode 100644 met-api/migrations/versions/eef9cc71cca7_.py delete mode 100644 met-api/migrations/versions/f037908194df_kc_attribute_script.py delete mode 100644 met-api/migrations/versions/f2b3f08c8d60_alter_comment_add_submission_id.py delete mode 100644 met-api/migrations/versions/f40da1b8f3e0_initialize_user_status.py delete mode 100644 met-api/migrations/versions/f6f480b5b664_add_survey_template_column.py delete mode 100644 met-api/migrations/versions/f99eb7f53041_.py delete mode 100644 met-api/migrations/versions/fc570c0faace_survey_migration.py delete mode 100644 met-api/migrations/versions/fda10461892d_basemodel_column_change.py delete mode 100644 met-api/migrations/versions/ffac8f5b4288_add_reject_reason.py diff --git a/CHANGELOG.MD b/CHANGELOG.MD index 493ab6359..7aefb3db2 100644 --- a/CHANGELOG.MD +++ b/CHANGELOG.MD @@ -1,3 +1,9 @@ +## February 09, 2024 +- **Task**Consolidate and re-write old migration files [DESENG-452](https://apps.itsm.gov.bc.ca/jira/browse/DESENG-452) + - Deleted old migration files + - Created [ec0128056a33_table_until_feb_09_2024.py](met-api/migrations/versions/ec0128056a33_table_until_feb_09_2024.py) with all tables until feb 09 2024 + - Created [37176ea4708d_data_until_feb_09_2024.py](met-api/migrations/versions/37176ea4708d_data_until_feb_09_2024.py) with all initial data until feb 09 2024 + ## February 08, 2024 - **Task**Cache CORS preflight responses with the browser for a given period of time [DESENG-484](https://apps.itsm.gov.bc.ca/jira/browse/DESENG-484) - Introduces a new configuration variable to specify the maximum age for Cross-Origin Resource Sharing (CORS) diff --git a/met-api/migrations/versions/02ff8ecc6b91_added_url_path_column_to_feedback_table.py b/met-api/migrations/versions/02ff8ecc6b91_added_url_path_column_to_feedback_table.py deleted file mode 100644 index 138f01cf5..000000000 --- a/met-api/migrations/versions/02ff8ecc6b91_added_url_path_column_to_feedback_table.py +++ /dev/null @@ -1,27 +0,0 @@ -"""Added URL path column to feedback table. - -Revision ID: 02ff8ecc6b91 -Revises: 25e6609cb4db -Create Date: 2023-11-10 10:33:06.780841 - -""" -from alembic import op -import sqlalchemy as sa -from sqlalchemy.dialects import postgresql - -# revision identifiers, used by Alembic. -revision = '02ff8ecc6b91' -down_revision = '25e6609cb4db' -branch_labels = None -depends_on = None - - -def upgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.add_column('feedback', sa.Column('submission_path', sa.String())) - # ### end Alembic commands ### - -def downgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.drop_column('feedback', 'submission_path') - # ### end Alembic commands ### diff --git a/met-api/migrations/versions/0329d7d10a5f_settings.py b/met-api/migrations/versions/0329d7d10a5f_settings.py deleted file mode 100644 index eed7b9cb2..000000000 --- a/met-api/migrations/versions/0329d7d10a5f_settings.py +++ /dev/null @@ -1,36 +0,0 @@ -""" Add engagement settings table - -Revision ID: 0329d7d10a5f -Revises: df842dc6d0b7 -Create Date: 2023-07-21 13:51:38.752604 - -""" -from alembic import op -import sqlalchemy as sa - -# revision identifiers, used by Alembic. -revision = '0329d7d10a5f' -down_revision = 'df842dc6d0b7' -branch_labels = None -depends_on = None - - -def upgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.create_table('engagement_settings', - sa.Column('created_date', sa.DateTime(), nullable=False), - sa.Column('updated_date', sa.DateTime(), nullable=True), - sa.Column('engagement_id', sa.Integer(), nullable=False), - sa.Column('send_report', sa.Boolean(), nullable=False), - sa.Column('created_by', sa.String(length=50), nullable=True), - sa.Column('updated_by', sa.String(length=50), nullable=True), - sa.ForeignKeyConstraint(['engagement_id'], ['engagement.id'], ondelete='CASCADE'), - sa.PrimaryKeyConstraint('engagement_id') - ) - # ### end Alembic commands ### - - -def downgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.drop_table('engagement_settings') - # ### end Alembic commands ### diff --git a/met-api/migrations/versions/03ee1815f6a6_.py b/met-api/migrations/versions/03ee1815f6a6_.py deleted file mode 100644 index 085b3b392..000000000 --- a/met-api/migrations/versions/03ee1815f6a6_.py +++ /dev/null @@ -1,28 +0,0 @@ -"""Add avatar_filename to contact - -Revision ID: 03ee1815f6a6 -Revises: 9536f547cdd5 -Create Date: 2022-11-10 14:21:02.844093 - -""" -from alembic import op -import sqlalchemy as sa - - -# revision identifiers, used by Alembic. -revision = '03ee1815f6a6' -down_revision = '9536f547cdd5' -branch_labels = None -depends_on = None - - -def upgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.add_column('contact', sa.Column('avatar_filename', sa.String(), nullable=True)) - # ### end Alembic commands ### - - -def downgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.drop_column('contact', 'avatar_filename') - # ### end Alembic commands ### diff --git a/met-api/migrations/versions/04e6c48187da_.py b/met-api/migrations/versions/04e6c48187da_.py deleted file mode 100644 index bd77fb09b..000000000 --- a/met-api/migrations/versions/04e6c48187da_.py +++ /dev/null @@ -1,41 +0,0 @@ -""" - -Revision ID: 04e6c48187da -Revises: f40da1b8f3e0 -Create Date: 2023-08-18 12:45:30.620941 - -""" -from alembic import op -import sqlalchemy as sa -from sqlalchemy.dialects import postgresql - -# revision identifiers, used by Alembic. -revision = '04e6c48187da' -down_revision = 'f40da1b8f3e0' -branch_labels = None -depends_on = None - -# Define the Enum type for feedback status -feedback_status_enum = sa.Enum( - 'Unreviewed', 'Archived', name='feedbackstatustype') - - -def upgrade(): - # ### commands auto generated by Alembic - please adjust! ### - - # Create the Enum type in the database - feedback_status_enum.create(op.get_bind()) - - op.add_column('feedback', sa.Column('status', sa.Enum( - 'Unreviewed', 'Archived', name='feedbackstatustype'), nullable=True)) - op.execute('UPDATE "feedback" SET status = \'Unreviewed\'') - # ### end Alembic commands ### - - -def downgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.drop_column('feedback', 'status') - - # Drop the Enum type from the database - feedback_status_enum.drop(op.get_bind()) - # ### end Alembic commands ### diff --git a/met-api/migrations/versions/05d014ff7410_.py b/met-api/migrations/versions/05d014ff7410_.py deleted file mode 100644 index cd43a62f8..000000000 --- a/met-api/migrations/versions/05d014ff7410_.py +++ /dev/null @@ -1,40 +0,0 @@ -"""empty message - -Revision ID: 05d014ff7410 -Revises: 8ab640f6e60d -Create Date: 2022-06-20 11:21:12.300505 - -""" -from alembic import op -import sqlalchemy as sa -from sqlalchemy.dialects import postgresql - -# revision identifiers, used by Alembic. -revision = '05d014ff7410' -down_revision = '8ab640f6e60d' -branch_labels = None -depends_on = None - - -def upgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.add_column('engagement', sa.Column('content', sa.Text(), nullable=False, server_default="")) - op.add_column('engagement', sa.Column('rich_content', postgresql.JSON(astext_type=sa.Text()), nullable=False, server_default="{}")) - op.add_column('engagement', sa.Column('banner_url', sa.String(), nullable=True)) - op.alter_column('engagement', 'description', - existing_type=sa.VARCHAR(), - nullable=False, - server_default="" - ) - # ### end Alembic commands ### - - -def downgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.alter_column('engagement', 'description', - existing_type=sa.VARCHAR(), - nullable=True) - op.drop_column('engagement', 'banner_url') - op.drop_column('engagement', 'rich_content') - op.drop_column('engagement', 'content') - # ### end Alembic commands ### diff --git a/met-api/migrations/versions/08f69642b7ae_adding_widget_poll.py b/met-api/migrations/versions/08f69642b7ae_adding_widget_poll.py deleted file mode 100644 index 4f87cbe95..000000000 --- a/met-api/migrations/versions/08f69642b7ae_adding_widget_poll.py +++ /dev/null @@ -1,83 +0,0 @@ -"""adding_widget_poll - -Revision ID: 08f69642b7ae -Revises: bd0eb0d25caf -Create Date: 2024-01-16 14:25:07.611485 - -""" -from alembic import op -import sqlalchemy as sa -from sqlalchemy.dialects import postgresql - -# revision identifiers, used by Alembic. -revision = '08f69642b7ae' -down_revision = 'bd0eb0d25caf' -branch_labels = None -depends_on = None - - -def upgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.create_table('widget_polls', - sa.Column('created_date', sa.DateTime(), nullable=False), - sa.Column('updated_date', sa.DateTime(), nullable=True), - sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), - sa.Column('title', sa.String(length=255), nullable=False), - sa.Column('description', sa.String(length=2048), nullable=True), - sa.Column('status', sa.Enum('active', 'inactive', name='poll_status'), nullable=True), - sa.Column('widget_id', sa.Integer(), nullable=False), - sa.Column('engagement_id', sa.Integer(), nullable=False), - sa.Column('created_by', sa.String(length=50), nullable=True), - sa.Column('updated_by', sa.String(length=50), nullable=True), - sa.ForeignKeyConstraint(['engagement_id'], ['engagement.id'], ondelete='CASCADE'), - sa.ForeignKeyConstraint(['widget_id'], ['widget.id'], ondelete='CASCADE'), - sa.PrimaryKeyConstraint('id') - ) - op.create_table('poll_answers', - sa.Column('created_date', sa.DateTime(), nullable=False), - sa.Column('updated_date', sa.DateTime(), nullable=True), - sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), - sa.Column('answer_text', sa.String(length=255), nullable=False), - sa.Column('poll_id', sa.Integer(), nullable=False), - sa.Column('created_by', sa.String(length=50), nullable=True), - sa.Column('updated_by', sa.String(length=50), nullable=True), - sa.ForeignKeyConstraint(['poll_id'], ['widget_polls.id'], ondelete='CASCADE'), - sa.PrimaryKeyConstraint('id') - ) - op.create_table('poll_responses', - sa.Column('created_date', sa.DateTime(), nullable=False), - sa.Column('updated_date', sa.DateTime(), nullable=True), - sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), - sa.Column('participant_id', sa.String(length=255), nullable=False), - sa.Column('selected_answer_id', sa.Integer(), nullable=False), - sa.Column('poll_id', sa.Integer(), nullable=False), - sa.Column('widget_id', sa.Integer(), nullable=False), - sa.Column('is_deleted', sa.Boolean(), nullable=True), - sa.Column('created_by', sa.String(length=50), nullable=True), - sa.Column('updated_by', sa.String(length=50), nullable=True), - sa.ForeignKeyConstraint(['poll_id'], ['widget_polls.id'], ondelete='CASCADE'), - sa.ForeignKeyConstraint(['selected_answer_id'], ['poll_answers.id'], ondelete='CASCADE'), - sa.ForeignKeyConstraint(['widget_id'], ['widget.id'], ondelete='CASCADE'), - sa.PrimaryKeyConstraint('id') - ) - widget_type_table = sa.table('widget_type', - sa.Column('id', sa.Integer), - sa.Column('name', sa.String), - sa.Column('description', sa.String)) - - op.bulk_insert(widget_type_table, [ - {'id': 10, 'name': 'Poll', 'description': 'The Poll Widget enables real-time polling and feedback collection from public.'} - ]) - # ### end Alembic commands ### - - -def downgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.drop_table('poll_responses') - op.drop_table('poll_answers') - op.drop_table('widget_polls') - - conn = op.get_bind() - - conn.execute('DELETE FROM widget_type WHERE id=10') - # ### end Alembic commands ### diff --git a/met-api/migrations/versions/0d863f773838_survey_block.py b/met-api/migrations/versions/0d863f773838_survey_block.py deleted file mode 100644 index eb264dbff..000000000 --- a/met-api/migrations/versions/0d863f773838_survey_block.py +++ /dev/null @@ -1,41 +0,0 @@ -"""survey_block - -Revision ID: 0d863f773838 -Revises: 6764af39864e -Create Date: 2022-12-21 20:23:24.907724 - -""" -from alembic import op -import sqlalchemy as sa -from sqlalchemy.dialects import postgresql - -# revision identifiers, used by Alembic. -revision = '0d863f773838' -down_revision = '6764af39864e' -branch_labels = None -depends_on = None - - -def upgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.create_table('engagement_status_block', - sa.Column('created_date', sa.DateTime(), nullable=False), - sa.Column('updated_date', sa.DateTime(), nullable=True), - sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), - sa.Column('engagement_id', sa.Integer(), nullable=True), - sa.Column('survey_status', sa.Enum('Upcoming', 'Open', 'Closed', name='submissionstatus'), nullable=False), - sa.Column('block_text', postgresql.JSON(astext_type=sa.Text()), nullable=False), - sa.Column('created_by', sa.String(length=50), nullable=True), - sa.Column('updated_by', sa.String(length=50), nullable=True), - sa.ForeignKeyConstraint(['engagement_id'], ['engagement.id'], ondelete='CASCADE'), - sa.PrimaryKeyConstraint('id'), - sa.UniqueConstraint('engagement_id', 'survey_status', name='unique_engagement_status_block') - ) - # ### end Alembic commands ### - - -def downgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.drop_table('engagement_status_block') - op.execute('DROP TYPE submissionstatus;') - # ### end Alembic commands ### diff --git a/met-api/migrations/versions/0e043f976e2e_add_notes_to_submission.py b/met-api/migrations/versions/0e043f976e2e_add_notes_to_submission.py deleted file mode 100644 index ecfd727ce..000000000 --- a/met-api/migrations/versions/0e043f976e2e_add_notes_to_submission.py +++ /dev/null @@ -1,38 +0,0 @@ -"""add_notes_to_submission - -Revision ID: 0e043f976e2e -Revises: 17bae2d586fb -Create Date: 2023-01-10 12:18:43.395103 - -""" -from alembic import op -import sqlalchemy as sa -from datetime import datetime - - -# revision identifiers, used by Alembic. -revision = '0e043f976e2e' -down_revision = '17bae2d586fb' -branch_labels = None -depends_on = None - - -def upgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.add_column('submission', sa.Column('notify_email', sa.Boolean(), nullable=True)) - comment_status_table = sa.table('comment_status', - sa.Column('id', sa.Integer()), - sa.Column('status_name', sa.String(length=50)), - sa.Column('description', sa.String(length=50)), - sa.Column('created_date', sa.DateTime()), - sa.Column('updated_date', sa.DateTime())) - op.bulk_insert(comment_status_table, [ - {'id': 4, 'status_name': 'Needs further review', 'description': 'Comment needs further review', 'created_date': datetime.utcnow(), 'updated_date': datetime.utcnow()}, - ]) - # ### end Alembic commands ### - - -def downgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.drop_column('submission', 'notify_email') - # ### end Alembic commands ### diff --git a/met-api/migrations/versions/1113e0ad66c3_create_submission_table.py b/met-api/migrations/versions/1113e0ad66c3_create_submission_table.py deleted file mode 100644 index c34abbc86..000000000 --- a/met-api/migrations/versions/1113e0ad66c3_create_submission_table.py +++ /dev/null @@ -1,40 +0,0 @@ -"""Create submission table - -Revision ID: 1113e0ad66c3 -Revises: fc570c0faace -Create Date: 2022-07-06 15:33:00.098038 - -""" -from alembic import op -import sqlalchemy as sa -from sqlalchemy.dialects import postgresql - -# revision identifiers, used by Alembic. -revision = '1113e0ad66c3' -down_revision = 'fc570c0faace' -branch_labels = None -depends_on = None - - -def upgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.create_table('submission', - sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), - sa.Column('submission_json', postgresql.JSONB(astext_type=sa.Text()), server_default='{}', nullable=False), - sa.Column('survey_id', sa.Integer(), nullable=False), - sa.Column('user_id', sa.Integer(), nullable=True), - sa.Column('created_date', sa.DateTime(), nullable=True), - sa.Column('updated_date', sa.DateTime(), nullable=True), - sa.Column('created_by', sa.String(length=50), nullable=True), - sa.Column('updated_by', sa.String(length=50), nullable=True), - sa.ForeignKeyConstraint(['survey_id'], ['survey.id'], ondelete='CASCADE'), - sa.ForeignKeyConstraint(['user_id'], ['user.id']), - sa.PrimaryKeyConstraint('id') - ) - # ### end Alembic commands ### - - -def downgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.drop_table('submission') - # ### end Alembic commands ### diff --git a/met-api/migrations/versions/13985af9eca0_.py b/met-api/migrations/versions/13985af9eca0_.py deleted file mode 100644 index b9b6dbada..000000000 --- a/met-api/migrations/versions/13985af9eca0_.py +++ /dev/null @@ -1,34 +0,0 @@ -""" - -Revision ID: 13985af9eca0 -Revises: 21e24b92c7b4 -Create Date: 2023-05-17 09:51:00.350393 - -""" -from alembic import op -import sqlalchemy as sa -from sqlalchemy.dialects import postgresql - -# revision identifiers, used by Alembic. -revision = '13985af9eca0' -down_revision = '21e24b92c7b4' -branch_labels = None -depends_on = None - - -def upgrade(): - op.add_column('tenant', sa.Column('title', sa.String(length=30), nullable=True)) - op.add_column('tenant', sa.Column('logo_url', sa.String(length=300), nullable=True)) - op.alter_column('tenant', 'description', - existing_type=sa.String(100), - type_=sa.String(300)) - op.execute('UPDATE tenant SET title = \'Modern Engagement\';') - op.execute('UPDATE tenant SET description = \'British Columbia\'\'s environmental assessment process provides opportunities for Indigenous Nations, government agencies and the public to influence the outcome of environmental assessments in British Columbia.\' WHERE short_name = \'EAO\';') - op.alter_column('tenant', 'title', - existing_type=sa.String(), - nullable=False) - - -def downgrade(): - op.drop_column('tenant', 'logo_url') - op.drop_column('tenant', 'title') diff --git a/met-api/migrations/versions/155c64768a99_create_cac_form.py b/met-api/migrations/versions/155c64768a99_create_cac_form.py deleted file mode 100644 index 33ae0141d..000000000 --- a/met-api/migrations/versions/155c64768a99_create_cac_form.py +++ /dev/null @@ -1,44 +0,0 @@ -""" create cac form table - -Revision ID: 155c64768a99 -Revises: 9714d7f8d7cc -Create Date: 2023-09-05 16:23:11.567528 - -""" -from alembic import op -import sqlalchemy as sa - -# revision identifiers, used by Alembic. -revision = '155c64768a99' -down_revision = '9714d7f8d7cc' -branch_labels = None -depends_on = None - - -def upgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.create_table('cac_form', - sa.Column('created_date', sa.DateTime(), nullable=False), - sa.Column('updated_date', sa.DateTime(), nullable=True), - sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), - sa.Column('engagement_id', sa.Integer(), nullable=False), - sa.Column('tenant_id', sa.Integer(), nullable=True), - sa.Column('understand', sa.Boolean(), nullable=False), - sa.Column('terms_of_reference', sa.Boolean(), nullable=False), - sa.Column('first_name', sa.String(length=50), nullable=False), - sa.Column('last_name', sa.String(length=50), nullable=False), - sa.Column('city', sa.String(length=50), nullable=False), - sa.Column('email', sa.String(length=50), nullable=False), - sa.Column('created_by', sa.String(length=50), nullable=True), - sa.Column('updated_by', sa.String(length=50), nullable=True), - sa.ForeignKeyConstraint(['engagement_id'], ['engagement.id'], ondelete='CASCADE'), - sa.ForeignKeyConstraint(['tenant_id'], ['tenant.id'], ), - sa.PrimaryKeyConstraint('id') - ) - # ### end Alembic commands ### - - -def downgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.drop_table('cac_form') - # ### end Alembic commands ### diff --git a/met-api/migrations/versions/17bae2d586fb_add_generated_document_tables.py b/met-api/migrations/versions/17bae2d586fb_add_generated_document_tables.py deleted file mode 100644 index 1a16fc3e4..000000000 --- a/met-api/migrations/versions/17bae2d586fb_add_generated_document_tables.py +++ /dev/null @@ -1,64 +0,0 @@ -"""Add generated document models - -Revision ID: 17bae2d586fb -Revises: 904c1ebca3e3 -Create Date: 2022-12-27 16:16:50.850377 - -""" -from alembic import op -import sqlalchemy as sa -from datetime import datetime - - -# revision identifiers, used by Alembic. -revision = '17bae2d586fb' -down_revision = '904c1ebca3e3' -branch_labels = None -depends_on = None - - -def upgrade(): - # ### commands auto generated by Alembic - please adjust! ### - document_type = op.create_table('generated_document_type', - sa.Column('created_date', sa.DateTime(), nullable=False), - sa.Column('updated_date', sa.DateTime(), nullable=True), - sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), - sa.Column('name', sa.String(length=30), nullable=False), - sa.Column('description', sa.String(length=100), nullable=True), - sa.Column('created_by', sa.String(length=50), nullable=True), - sa.Column('updated_by', sa.String(length=50), nullable=True), - sa.PrimaryKeyConstraint('id') - ) - document_template = op.create_table('generated_document_template', - sa.Column('created_date', sa.DateTime(), nullable=False), - sa.Column('updated_date', sa.DateTime(), nullable=True), - sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), - sa.Column('type_id', sa.Integer(), nullable=False), - sa.Column('hash_code', sa.String(length=64), nullable=True), - sa.Column('extension', sa.String(length=10), nullable=False), - sa.Column('created_by', sa.String(length=50), nullable=True), - sa.Column('updated_by', sa.String(length=50), nullable=True), - sa.ForeignKeyConstraint(['type_id'], ['generated_document_type.id'], ), - sa.PrimaryKeyConstraint('id'), - sa.UniqueConstraint('hash_code') - ) - op.drop_index('ix_user_username', table_name='met_users') - op.create_index(op.f('ix_met_users_username'), 'met_users', ['username'], unique=False) - - op.bulk_insert(document_type, [ - {'id': 1, 'name': 'comment_sheet', 'description': 'Comments export for staff', "created_date": datetime.utcnow()} - ]) - - op.bulk_insert(document_template, [ - {'id': 1, 'type_id': 1, 'hash_code': None, "extension": "xlsx", "created_date": datetime.utcnow()} - ]) - # ### end Alembic commands ### - - -def downgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.drop_index(op.f('ix_met_users_username'), table_name='met_users') - op.create_index('ix_user_username', 'met_users', ['username'], unique=False) - op.drop_table('generated_document_template') - op.drop_table('generated_document_type') - # ### end Alembic commands ### diff --git a/met-api/migrations/versions/196b0abc23b6_tenant_id_in_user.py b/met-api/migrations/versions/196b0abc23b6_tenant_id_in_user.py deleted file mode 100644 index 260f8f3fb..000000000 --- a/met-api/migrations/versions/196b0abc23b6_tenant_id_in_user.py +++ /dev/null @@ -1,37 +0,0 @@ -"""tenant id in user - -Revision ID: 196b0abc23b6 -Revises: 13985af9eca0 -Create Date: 2023-05-23 05:09:51.963357 - -""" -import sqlalchemy as sa -from alembic import op -from flask import current_app - -from met_api.models import Tenant as TenantModel - -# revision identifiers, used by Alembic. -revision = '196b0abc23b6' -down_revision = '13985af9eca0' -branch_labels = None -depends_on = None - - -def upgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.add_column('met_users', sa.Column('tenant_id', sa.Integer(), nullable=True)) - op.create_foreign_key('met_users_tenant_fk', 'met_users', 'tenant', ['tenant_id'], ['id']) - op.execute("commit") - - default_short_name = current_app.config.get('DEFAULT_TENANT_SHORT_NAME') - update_stmt = sa.text('UPDATE met_users SET tenant_id = (SELECT tenant.id FROM tenant WHERE short_name = :default_short_name)') - op.execute(update_stmt.params(default_short_name=default_short_name)) - # ### end Alembic commands ### - - -def downgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.drop_constraint('met_users_tenant_fk', 'met_users', type_='foreignkey') - op.drop_column('met_users', 'tenant_id') - # ### end Alembic commands ### diff --git a/met-api/migrations/versions/1c5883959156_.py b/met-api/migrations/versions/1c5883959156_.py deleted file mode 100644 index dfbafa148..000000000 --- a/met-api/migrations/versions/1c5883959156_.py +++ /dev/null @@ -1,35 +0,0 @@ -""" - -Revision ID: 1c5883959156 -Revises: f037908194df -Create Date: 2023-08-29 20:37:58.652800 - -""" -from alembic import op -import sqlalchemy as sa -from sqlalchemy.dialects import postgresql - -# revision identifiers, used by Alembic. -revision = '1c5883959156' -down_revision = 'f037908194df' -branch_labels = None -depends_on = None - - -def upgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.alter_column('feedback', 'status', - existing_type=postgresql.ENUM( - 'Unreviewed', 'Archived', name='feedbackstatustype'), - nullable=False) - op.execute('UPDATE "feedback" SET status = \'Unreviewed\'') - # ### end Alembic commands ### - - -def downgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.alter_column('feedback', 'status', - existing_type=postgresql.ENUM( - 'Unreviewed', 'Archived', name='feedbackstatustype'), - nullable=True) - # ### end Alembic commands ### diff --git a/met-api/migrations/versions/21e24b92c7b4_alter_engagement_add_is_internal.py b/met-api/migrations/versions/21e24b92c7b4_alter_engagement_add_is_internal.py deleted file mode 100644 index e38347439..000000000 --- a/met-api/migrations/versions/21e24b92c7b4_alter_engagement_add_is_internal.py +++ /dev/null @@ -1,28 +0,0 @@ -"""Alter engagement Add is_internal - -Revision ID: 21e24b92c7b4 -Revises: 2b75eb893e6b -Create Date: 2023-05-12 13:26:05.950118 - -""" -from alembic import op -import sqlalchemy as sa -from sqlalchemy.dialects import postgresql - -# revision identifiers, used by Alembic. -revision = '21e24b92c7b4' -down_revision = '2b75eb893e6b' -branch_labels = None -depends_on = None - - -def upgrade(): - op.add_column('engagement', sa.Column('is_internal', sa.Boolean(), nullable=True)) - op.execute('UPDATE engagement SET is_internal = false') - op.alter_column('engagement', 'is_internal', - existing_type=sa.Boolean(), - nullable=False) - - -def downgrade(): - op.drop_column('engagement', 'is_internal') diff --git a/met-api/migrations/versions/224b70277ac4_alter_commenttype.py b/met-api/migrations/versions/224b70277ac4_alter_commenttype.py deleted file mode 100644 index 9ad62451f..000000000 --- a/met-api/migrations/versions/224b70277ac4_alter_commenttype.py +++ /dev/null @@ -1,28 +0,0 @@ -"""alter commenttype - -Revision ID: 224b70277ac4 -Revises: 5110026db916 -Create Date: 2022-10-05 11:00:13.349968 - -""" -from alembic import op -import sqlalchemy as sa - - -# revision identifiers, used by Alembic. -revision = '224b70277ac4' -down_revision = '5110026db916' -branch_labels = None -depends_on = None - - -def upgrade(): - op.alter_column('feedback', 'comment_type', type_=sa.Text()) - op.execute('DROP TYPE commenttype;') - op.execute('CREATE TYPE commenttype AS ENUM (\'Issue\', \'Idea\', \'Else\', \'NONE\');') - op.alter_column('feedback', 'comment_type', type_=sa.Enum('Issue', 'Idea', 'Else', 'NONE', name='commenttype'), postgresql_using='comment_type::commenttype') - - -def downgrade(): - # alter type will not be reverted since it does not impact the structure. - pass diff --git a/met-api/migrations/versions/2253a00e73bf_.py b/met-api/migrations/versions/2253a00e73bf_.py deleted file mode 100644 index 363874994..000000000 --- a/met-api/migrations/versions/2253a00e73bf_.py +++ /dev/null @@ -1,166 +0,0 @@ -""" - -Revision ID: 2253a00e73bf -Revises: cad222167ce7 -Create Date: 2023-02-06 10:57:24.811178 - -""" -from alembic import op -import sqlalchemy as sa -from sqlalchemy.dialects import postgresql - -# revision identifiers, used by Alembic. -revision = '2253a00e73bf' -down_revision = 'cad222167ce7' -branch_labels = None -depends_on = None - - -def upgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.execute('UPDATE comment SET created_date = CURRENT_TIMESTAMP WHERE created_date IS NULL;') - op.alter_column('comment', 'created_date', - existing_type=postgresql.TIMESTAMP(), - nullable=False) - op.add_column('comment_status', sa.Column('created_by', sa.String(length=50), nullable=True)) - op.add_column('comment_status', sa.Column('updated_by', sa.String(length=50), nullable=True)) - op.alter_column('comment_status', 'created_date', - existing_type=postgresql.TIMESTAMP(), - nullable=False) - op.alter_column('contact', 'updated_date', - existing_type=postgresql.TIMESTAMP(), - nullable=True) - op.alter_column('contact', 'created_by', - existing_type=sa.VARCHAR(length=50), - nullable=True) - op.alter_column('contact', 'updated_by', - existing_type=sa.VARCHAR(length=50), - nullable=True) - op.alter_column('email_verification', 'created_date', - existing_type=postgresql.TIMESTAMP(), - nullable=False) - op.add_column('engagement_status', sa.Column('created_by', sa.String(length=50), nullable=True)) - op.add_column('engagement_status', sa.Column('updated_by', sa.String(length=50), nullable=True)) - op.alter_column('engagement_status', 'created_date', - existing_type=postgresql.TIMESTAMP(), - nullable=False) - op.add_column('feedback', sa.Column('updated_date', sa.DateTime(), nullable=True)) - op.add_column('feedback', sa.Column('created_by', sa.String(length=50), nullable=True)) - op.add_column('feedback', sa.Column('updated_by', sa.String(length=50), nullable=True)) - op.alter_column('feedback', 'created_date', - existing_type=postgresql.TIMESTAMP(), - nullable=False) - op.add_column('membership_status_codes', sa.Column('created_date', sa.DateTime(), nullable=True)) - op.add_column('membership_status_codes', sa.Column('updated_date', sa.DateTime(), nullable=True)) - op.add_column('membership_status_codes', sa.Column('created_by', sa.String(length=50), nullable=True)) - op.add_column('membership_status_codes', sa.Column('updated_by', sa.String(length=50), nullable=True)) - op.add_column('met_users', sa.Column('created_by', sa.String(length=50), nullable=True)) - op.add_column('met_users', sa.Column('updated_by', sa.String(length=50), nullable=True)) - op.alter_column('met_users', 'created_date', - existing_type=postgresql.TIMESTAMP(), - nullable=False) - op.alter_column('submission', 'created_date', - existing_type=postgresql.TIMESTAMP(), - nullable=False) - op.alter_column('survey', 'created_date', - existing_type=postgresql.TIMESTAMP(), - nullable=False) - op.alter_column('widget', 'updated_date', - existing_type=postgresql.TIMESTAMP(), - nullable=True) - op.alter_column('widget', 'created_by', - existing_type=sa.VARCHAR(length=50), - nullable=True) - op.alter_column('widget', 'updated_by', - existing_type=sa.VARCHAR(length=50), - nullable=True) - op.alter_column('widget_item', 'updated_date', - existing_type=postgresql.TIMESTAMP(), - nullable=True) - op.alter_column('widget_item', 'created_by', - existing_type=sa.VARCHAR(length=50), - nullable=True) - op.alter_column('widget_item', 'updated_by', - existing_type=sa.VARCHAR(length=50), - nullable=True) - op.add_column('widget_type', sa.Column('created_date', sa.DateTime(), nullable=True)) - op.add_column('widget_type', sa.Column('updated_date', sa.DateTime(), nullable=True)) - op.add_column('widget_type', sa.Column('created_by', sa.String(length=50), nullable=True)) - op.add_column('widget_type', sa.Column('updated_by', sa.String(length=50), nullable=True)) - # ### end Alembic commands ### - - -def downgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.drop_column('widget_type', 'updated_by') - op.drop_column('widget_type', 'created_by') - op.drop_column('widget_type', 'updated_date') - op.drop_column('widget_type', 'created_date') - op.alter_column('widget_item', 'updated_by', - existing_type=sa.VARCHAR(length=50), - nullable=False) - op.alter_column('widget_item', 'created_by', - existing_type=sa.VARCHAR(length=50), - nullable=False) - op.alter_column('widget_item', 'updated_date', - existing_type=postgresql.TIMESTAMP(), - nullable=False) - op.alter_column('widget', 'updated_by', - existing_type=sa.VARCHAR(length=50), - nullable=False) - op.alter_column('widget', 'created_by', - existing_type=sa.VARCHAR(length=50), - nullable=False) - op.alter_column('widget', 'updated_date', - existing_type=postgresql.TIMESTAMP(), - nullable=False) - op.alter_column('survey', 'created_date', - existing_type=postgresql.TIMESTAMP(), - nullable=True) - op.alter_column('submission', 'created_date', - existing_type=postgresql.TIMESTAMP(), - nullable=True) - op.alter_column('met_users', 'created_date', - existing_type=postgresql.TIMESTAMP(), - nullable=True) - op.drop_column('met_users', 'updated_by') - op.drop_column('met_users', 'created_by') - op.drop_column('membership_status_codes', 'updated_by') - op.drop_column('membership_status_codes', 'created_by') - op.drop_column('membership_status_codes', 'updated_date') - op.drop_column('membership_status_codes', 'created_date') - op.alter_column('feedback', 'created_date', - existing_type=postgresql.TIMESTAMP(), - nullable=True) - op.drop_column('feedback', 'updated_by') - op.drop_column('feedback', 'created_by') - op.drop_column('feedback', 'updated_date') - op.alter_column('engagement_status', 'created_date', - existing_type=postgresql.TIMESTAMP(), - nullable=True) - op.drop_column('engagement_status', 'updated_by') - op.drop_column('engagement_status', 'created_by') - op.alter_column('email_verification', 'type', - existing_type=postgresql.ENUM('Survey', 'RejectedComment', 'Subscribe', name='emailverificationtype'), - nullable=True) - op.alter_column('email_verification', 'created_date', - existing_type=postgresql.TIMESTAMP(), - nullable=True) - op.alter_column('contact', 'updated_by', - existing_type=sa.VARCHAR(length=50), - nullable=False) - op.alter_column('contact', 'created_by', - existing_type=sa.VARCHAR(length=50), - nullable=False) - op.alter_column('contact', 'updated_date', - existing_type=postgresql.TIMESTAMP(), - nullable=False) - op.alter_column('comment_status', 'created_date', - existing_type=postgresql.TIMESTAMP(), - nullable=True) - op.drop_column('comment_status', 'updated_by') - op.drop_column('comment_status', 'created_by') - op.alter_column('comment', 'created_date', - existing_type=postgresql.TIMESTAMP(), - nullable=True) - # ### end Alembic commands ### diff --git a/met-api/migrations/versions/242c9f0364df_.py b/met-api/migrations/versions/242c9f0364df_.py deleted file mode 100644 index 64becace5..000000000 --- a/met-api/migrations/versions/242c9f0364df_.py +++ /dev/null @@ -1,32 +0,0 @@ -"""add subscribe widget type - -Revision ID: 242c9f0364df -Revises: 17bae2d586fb -Create Date: 2023-01-12 12:17:27.360418 - -""" -from alembic import op -import sqlalchemy as sa - - -# revision identifiers, used by Alembic. -revision = '242c9f0364df' -down_revision = '17bae2d586fb' -branch_labels = None -depends_on = None - - -def upgrade(): - widget_type_table = sa.table('widget_type', - sa.Column('id', sa.Integer), - sa.Column('name', sa.String), - sa.Column('description', sa.String)) - - op.bulk_insert(widget_type_table, [ - {'id': 4, 'name': 'Subscribe', 'description': 'Allows users to subscribe to an engagement'} - ]) - - -def downgrade(): - conn = op.get_bind() - conn.execute('DELETE FROM widget_type WHERE id=4') diff --git a/met-api/migrations/versions/2545d45bb29c_alter_engagement_add_created_by_upd_by.py b/met-api/migrations/versions/2545d45bb29c_alter_engagement_add_created_by_upd_by.py deleted file mode 100644 index d282379b3..000000000 --- a/met-api/migrations/versions/2545d45bb29c_alter_engagement_add_created_by_upd_by.py +++ /dev/null @@ -1,37 +0,0 @@ -"""Alter engagement add created_by upd_by - -Revision ID: 2545d45bb29c -Revises: 05d014ff7410 -Create Date: 2022-06-24 09:28:59.551336 - -""" -from alembic import op -import sqlalchemy as sa - - -# revision identifiers, used by Alembic. -revision = '2545d45bb29c' -down_revision = '05d014ff7410' -branch_labels = None -depends_on = None - - -def upgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.add_column('engagement', sa.Column('created_by', sa.String(length=50), nullable=True)) - op.add_column('engagement', sa.Column('updated_by', sa.String(length=50), nullable=True)) - op.execute('UPDATE engagement SET created_by = user_id, updated_by = user_id WHERE created_by IS NULL') - op.drop_constraint('engagement_user_id_fkey', 'engagement', type_='foreignkey') - op.alter_column('engagement', 'created_by', nullable=False) - op.alter_column('engagement', 'updated_by', nullable=False) - op.drop_column('engagement', 'user_id') - # ### end Alembic commands ### - - -def downgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.add_column('engagement', sa.Column('user_id', sa.INTEGER(), autoincrement=False, nullable=True)) - op.create_foreign_key('engagement_user_id_fkey', 'engagement', 'user', ['user_id'], ['id'], ondelete='CASCADE') - op.drop_column('engagement', 'updated_by') - op.drop_column('engagement', 'created_by') - # ### end Alembic commands ### diff --git a/met-api/migrations/versions/25e6609cb4db_update_report_settings_column.py b/met-api/migrations/versions/25e6609cb4db_update_report_settings_column.py deleted file mode 100644 index 1553b0993..000000000 --- a/met-api/migrations/versions/25e6609cb4db_update_report_settings_column.py +++ /dev/null @@ -1,46 +0,0 @@ -"""update_report_settings_column - -Revision ID: 25e6609cb4db -Revises: d5a6d9bb804b -Create Date: 2023-10-04 15:58:26.818989 - -""" -from alembic import op -import sqlalchemy as sa -from sqlalchemy.dialects import postgresql - -# revision identifiers, used by Alembic. -revision = '25e6609cb4db' -down_revision = 'd5a6d9bb804b' -branch_labels = None -depends_on = None - - -def upgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.alter_column('report_setting', 'question_id', - existing_type=sa.String(250), - type_=sa.Text()) - op.alter_column('report_setting', 'question_key', - existing_type=sa.String(250), - type_=sa.Text()) - op.alter_column('report_setting', 'question_type', - existing_type=sa.String(250), - type_=sa.Text()) - op.alter_column('report_setting', 'question', - existing_type=sa.String(250), - type_=sa.Text()) - # ### end Alembic commands ### - - -def downgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.alter_column('report_setting', 'question_id', - type_=sa.String(250)) - op.alter_column('report_setting', 'question_key', - type_=sa.String(250)) - op.alter_column('report_setting', 'question_type', - type_=sa.String(250)) - op.alter_column('report_setting', 'question', - type_=sa.String(250)) - # ### end Alembic commands ### diff --git a/met-api/migrations/versions/2aa7554dde59_contact_adress_size.py b/met-api/migrations/versions/2aa7554dde59_contact_adress_size.py deleted file mode 100644 index fbd7ff63f..000000000 --- a/met-api/migrations/versions/2aa7554dde59_contact_adress_size.py +++ /dev/null @@ -1,34 +0,0 @@ -"""Increase text size for contact address - -Revision ID: 2aa7554dde59 -Revises: 6ce7831704cb -Create Date: 2022-10-24 08:22:19.064120 - -""" -from alembic import op -import sqlalchemy as sa - - -# revision identifiers, used by Alembic. -revision = '2aa7554dde59' -down_revision = '6ce7831704cb' -branch_labels = None -depends_on = None - - -def upgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.alter_column('contact', 'address', - existing_type=sa.VARCHAR(length=50), - type_=sa.String(length=150), - existing_nullable=True) - # ### end Alembic commands ### - - -def downgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.alter_column('contact', 'address', - existing_type=sa.String(length=150), - type_=sa.VARCHAR(length=50), - existing_nullable=True) - # ### end Alembic commands ### diff --git a/met-api/migrations/versions/2b12a6cd987a_add_phases_widget_type.py b/met-api/migrations/versions/2b12a6cd987a_add_phases_widget_type.py deleted file mode 100644 index b84fb46b7..000000000 --- a/met-api/migrations/versions/2b12a6cd987a_add_phases_widget_type.py +++ /dev/null @@ -1,33 +0,0 @@ -"""add phases widget type - -Revision ID: 2b12a6cd987a -Revises: 642925969c53 -Create Date: 2022-11-23 15:34:53.240556 - -""" -from alembic import op -import sqlalchemy as sa - - -# revision identifiers, used by Alembic. -revision = '2b12a6cd987a' -down_revision = '642925969c53' -branch_labels = None -depends_on = None - - -def upgrade(): - widget_type_table = sa.table('widget_type', - sa.Column('id', sa.Integer), - sa.Column('name', sa.String), - sa.Column('description', sa.String)) - - op.bulk_insert(widget_type_table, [ - {'id': 3, 'name': 'Phases', 'description': 'Displays information about the engagement phase'} - ]) - - -def downgrade(): - conn = op.get_bind() - - conn.execute('DELETE FROM widget_type WHERE id=3') \ No newline at end of file diff --git a/met-api/migrations/versions/2b75eb893e6b_tenant_id_in_contact.py b/met-api/migrations/versions/2b75eb893e6b_tenant_id_in_contact.py deleted file mode 100644 index a16dddc6c..000000000 --- a/met-api/migrations/versions/2b75eb893e6b_tenant_id_in_contact.py +++ /dev/null @@ -1,30 +0,0 @@ -"""tenant id in contact - -Revision ID: 2b75eb893e6b -Revises: f6f480b5b664 -Create Date: 2023-05-07 21:24:21.296063 - -""" -from alembic import op -import sqlalchemy as sa -from sqlalchemy.dialects import postgresql - -# revision identifiers, used by Alembic. -revision = '2b75eb893e6b' -down_revision = 'f6f480b5b664' -branch_labels = None -depends_on = None - - -def upgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.add_column('contact', sa.Column('tenant_id', sa.Integer(), nullable=True)) - op.create_foreign_key('contact_tenant_fk', 'contact', 'tenant', ['tenant_id'], ['id']) - # ### end Alembic commands ### - - -def downgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.drop_constraint('contact_tenant_fk', 'contact', type_='foreignkey') - op.drop_column('contact', 'tenant_id') - # ### end Alembic commands ### diff --git a/met-api/migrations/versions/31041fb90d53_membership_versioning.py b/met-api/migrations/versions/31041fb90d53_membership_versioning.py deleted file mode 100644 index c2e206ec8..000000000 --- a/met-api/migrations/versions/31041fb90d53_membership_versioning.py +++ /dev/null @@ -1,37 +0,0 @@ -""" add versioning to membership table - -Revision ID: 31041fb90d53 -Revises: e2d5d38220d9 -Create Date: 2023-08-09 14:18:45.335397 - -""" -from alembic import op -import sqlalchemy as sa - -# revision identifiers, used by Alembic. -revision = '31041fb90d53' -down_revision = 'e2d5d38220d9' -branch_labels = None -depends_on = None - - -def upgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.add_column('membership', sa.Column('version', sa.Integer(), nullable=True)) - op.add_column('membership', sa.Column('is_latest', sa.Boolean(), nullable=True)) - - # Update existing rows with default values - op.execute("UPDATE membership SET version = 1") - op.execute("UPDATE membership SET is_latest = TRUE") - - # Change columns to non-nullable - op.alter_column('membership', 'version', nullable=False) - op.alter_column('membership', 'is_latest', nullable=False) - # ### end Alembic commands ### - - -def downgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.drop_column('membership', 'is_latest') - op.drop_column('membership', 'version') - # ### end Alembic commands ### diff --git a/met-api/migrations/versions/326419c08f59_.py b/met-api/migrations/versions/326419c08f59_.py deleted file mode 100644 index 64ba7840c..000000000 --- a/met-api/migrations/versions/326419c08f59_.py +++ /dev/null @@ -1,34 +0,0 @@ -"""empty message - -Revision ID: 326419c08f59 -Revises: a2d20b31e275 -Create Date: 2022-05-10 16:04:31.565892 - -""" -from alembic import op -import sqlalchemy as sa - - -# revision identifiers, used by Alembic. -revision = '326419c08f59' -down_revision = 'a2d20b31e275' -branch_labels = None -depends_on = None - - -def upgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.add_column('engagement', sa.Column('user_id', sa.Integer(), nullable=True)) - op.create_foreign_key(None, 'engagement', 'user', ['user_id'], ['id'], ondelete='CASCADE') - op.create_foreign_key(None, 'engagement', 'engagement_status', ['status_id'], ['id'], ondelete='CASCADE') - op.drop_column('engagement', 'created_by') - # ### end Alembic commands ### - - -def downgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.add_column('engagement', sa.Column('created_by', sa.VARCHAR(length=50), autoincrement=False, nullable=True)) - op.drop_constraint(None, 'engagement', type_='foreignkey') - op.drop_constraint(None, 'engagement', type_='foreignkey') - op.drop_column('engagement', 'user_id') - # ### end Alembic commands ### diff --git a/met-api/migrations/versions/36c315ec5801_add_encrypted_email.py b/met-api/migrations/versions/36c315ec5801_add_encrypted_email.py deleted file mode 100644 index ee83c0c57..000000000 --- a/met-api/migrations/versions/36c315ec5801_add_encrypted_email.py +++ /dev/null @@ -1,92 +0,0 @@ -"""add encrypted_email - -Revision ID: 36c315ec5801 -Revises: 587badc69491 -Create Date: 2023-05-30 16:21:19.298002 - -""" -from alembic import op -import sqlalchemy as sa -import sqlalchemy_utils - -# revision identifiers, used by Alembic. -revision = '36c315ec5801' -down_revision = '587badc69491' -branch_labels = None -depends_on = None - - -def upgrade(): - op.drop_constraint('membership_user_id_fkey', 'membership', type_='foreignkey') - op.drop_index('ix_met_users_username', table_name='met_users') - op.drop_constraint('user_external_id_key', 'met_users', type_='unique') - op.drop_constraint('user_status_fk', 'met_users', type_='foreignkey') - - op.create_table('staff_users', - sa.Column('created_date', sa.DateTime(), nullable=False), - sa.Column('updated_date', sa.DateTime(), nullable=True), - sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), - sa.Column('first_name', sa.String(length=50), nullable=True), - sa.Column('middle_name', sa.String(length=50), nullable=True), - sa.Column('last_name', sa.String(length=50), nullable=True), - sa.Column('username', sa.String(length=100), nullable=True), - sa.Column('email_address', sa.String(length=100), nullable=True), - sa.Column('contact_number', sa.String(length=50), nullable=True), - sa.Column('external_id', sa.String(length=50), nullable=False), - sa.Column('status_id', sa.Integer(), nullable=True), - sa.Column('tenant_id', sa.Integer(), nullable=True), - sa.Column('created_by', sa.String(length=50), nullable=True), - sa.Column('updated_by', sa.String(length=50), nullable=True), - sa.ForeignKeyConstraint(['status_id'], ['user_status.id'], ), - sa.ForeignKeyConstraint(['tenant_id'], ['tenant.id'], ), - sa.PrimaryKeyConstraint('id'), - sa.UniqueConstraint('external_id') - ) - op.execute('INSERT INTO staff_users \ - (id, created_date, updated_date, first_name, middle_name, last_name, username, contact_number, \ - external_id, status_id, tenant_id, created_by, updated_by) SELECT id, \ - created_date, updated_date, first_name, middle_name, last_name, username, contact_number,\ - external_id, status_id, tenant_id, created_by, updated_by FROM met_users WHERE username IS NOT NULL OR id = 1;') - op.execute('DELETE FROM met_users WHERE id = 1;') - op.execute('DELETE FROM met_users WHERE username is not null;') - op.execute('SELECT setval(\'staff_users_id_seq\', (SELECT MAX(id) + 1 FROM staff_users), true);') - op.create_index(op.f('ix_staff_users_username'), 'staff_users', ['username'], unique=True) - op.create_foreign_key('membership_user_id_fkey', 'membership', 'staff_users', ['user_id'], ['id']) - - op.drop_column('met_users', 'last_name') - op.drop_column('met_users', 'access_type') - op.drop_column('met_users', 'username') - op.drop_column('met_users', 'middle_name') - op.drop_column('met_users', 'first_name') - op.drop_column('met_users', 'contact_number') - op.drop_column('met_users', 'status_id') - op.drop_column('met_users', 'external_id') - op.drop_column('met_users', 'email_id') - op.add_column('met_users', sa.Column('email_address', sa.VARCHAR(length=500), nullable=True)) - - -def downgrade(): - op.drop_column('met_users', 'email_address') - op.add_column('met_users', sa.Column('email_id', sa.VARCHAR(length=200), autoincrement=False, nullable=True)) - op.add_column('met_users', sa.Column('external_id', sa.VARCHAR(length=50), autoincrement=False, nullable=True)) - op.add_column('met_users', sa.Column('status_id', sa.INTEGER(), autoincrement=False, nullable=True)) - op.add_column('met_users', sa.Column('contact_number', sa.VARCHAR(length=50), autoincrement=False, nullable=True)) - op.add_column('met_users', sa.Column('first_name', sa.VARCHAR(length=50), autoincrement=False, nullable=True)) - op.add_column('met_users', sa.Column('middle_name', sa.VARCHAR(length=50), autoincrement=False, nullable=True)) - op.add_column('met_users', sa.Column('username', sa.VARCHAR(length=100), autoincrement=False, nullable=True)) - op.add_column('met_users', sa.Column('access_type', sa.VARCHAR(length=200), autoincrement=False, nullable=True)) - op.add_column('met_users', sa.Column('last_name', sa.VARCHAR(length=50), autoincrement=False, nullable=True)) - - op.execute('INSERT INTO met_users \ - (id, created_date, updated_date, first_name, middle_name, last_name, username, contact_number, \ - external_id, status_id, tenant_id, created_by, updated_by) SELECT id, \ - created_date, updated_date, first_name, middle_name, last_name, username, contact_number,\ - external_id, status_id, tenant_id, created_by, updated_by FROM staff_users;') - - op.create_foreign_key('user_status_fk', 'met_users', 'user_status', ['status_id'], ['id']) - op.create_unique_constraint('user_external_id_key', 'met_users', ['external_id']) - op.create_index('ix_met_users_username', 'met_users', ['username'], unique=False) - op.drop_constraint('membership_user_id_fkey', 'membership', type_='foreignkey') - op.create_foreign_key('membership_user_id_fkey', 'membership', 'met_users', ['user_id'], ['id']) - op.drop_index(op.f('ix_staff_users_username'), table_name='staff_users') - op.drop_table('staff_users') diff --git a/met-api/migrations/versions/37176ea4708d_.py b/met-api/migrations/versions/37176ea4708d_.py deleted file mode 100644 index 3ae7a8545..000000000 --- a/met-api/migrations/versions/37176ea4708d_.py +++ /dev/null @@ -1,91 +0,0 @@ -"""empty message - -Revision ID: 37176ea4708d -Revises: ec0128056a33 -Create Date: 2024-02-08 12:40:09.456210 - -""" -from alembic import op -import sqlalchemy as sa -from sqlalchemy.dialects import postgresql - -# revision identifiers, used by Alembic. -revision = '37176ea4708d' -down_revision = 'ec0128056a33' -branch_labels = None -depends_on = None - - -def upgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.alter_column('email_verification', 'type', - existing_type=postgresql.ENUM('Survey', 'RejectedComment', 'Subscribe', name='emailverificationtype'), - nullable=False) - op.create_index(op.f('ix_engagement_metadata_engagement_id'), 'engagement_metadata', ['engagement_id'], unique=False) - op.create_index(op.f('ix_engagement_metadata_taxon_id'), 'engagement_metadata', ['taxon_id'], unique=False) - op.create_index(op.f('ix_engagement_metadata_value'), 'engagement_metadata', ['value'], unique=False) - op.create_index(op.f('ix_engagement_metadata_taxa_tenant_id'), 'engagement_metadata_taxa', ['tenant_id'], unique=False) - op.create_unique_constraint(None, 'engagement_metadata_taxa', ['id']) - op.execute('UPDATE membership_status_codes SET created_date = CURRENT_TIMESTAMP WHERE created_date IS NULL;') - op.alter_column('membership_status_codes', 'created_date', - existing_type=postgresql.TIMESTAMP(), - nullable=False) - op.drop_index('ix_participant_email_address', table_name='participant') - op.alter_column('timeline_event', 'widget_id', - existing_type=sa.INTEGER(), - nullable=False) - op.alter_column('timeline_event', 'status', - existing_type=postgresql.ENUM('Pending', 'InProgress', 'Completed', name='timelineeventstatus'), - nullable=False) - op.alter_column('timeline_event', 'position', - existing_type=sa.INTEGER(), - nullable=False) - op.alter_column('widget_documents', 'is_uploaded', - existing_type=sa.BOOLEAN(), - nullable=True, - existing_server_default=sa.text('false')) - op.alter_column('widget_timeline', 'widget_id', - existing_type=sa.INTEGER(), - nullable=False) - op.execute('UPDATE widget_type SET created_date = CURRENT_TIMESTAMP WHERE created_date IS NULL;') - op.alter_column('widget_type', 'created_date', - existing_type=postgresql.TIMESTAMP(), - nullable=False) - # ### end Alembic commands ### - - -def downgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.alter_column('widget_type', 'created_date', - existing_type=postgresql.TIMESTAMP(), - nullable=True) - op.alter_column('widget_timeline', 'widget_id', - existing_type=sa.INTEGER(), - nullable=True) - op.alter_column('widget_documents', 'is_uploaded', - existing_type=sa.BOOLEAN(), - nullable=False, - existing_server_default=sa.text('false')) - op.alter_column('timeline_event', 'position', - existing_type=sa.INTEGER(), - nullable=True) - op.alter_column('timeline_event', 'status', - existing_type=postgresql.ENUM('Pending', 'InProgress', 'Completed', name='timelineeventstatus'), - nullable=True) - op.alter_column('timeline_event', 'widget_id', - existing_type=sa.INTEGER(), - nullable=True) - op.create_index('ix_participant_email_address', 'participant', ['email_address'], unique=False) - - op.alter_column('membership_status_codes', 'created_date', - existing_type=postgresql.TIMESTAMP(), - nullable=True) - op.drop_constraint(None, 'engagement_metadata_taxa', type_='unique') - op.drop_index(op.f('ix_engagement_metadata_taxa_tenant_id'), table_name='engagement_metadata_taxa') - op.drop_index(op.f('ix_engagement_metadata_value'), table_name='engagement_metadata') - op.drop_index(op.f('ix_engagement_metadata_taxon_id'), table_name='engagement_metadata') - op.drop_index(op.f('ix_engagement_metadata_engagement_id'), table_name='engagement_metadata') - op.alter_column('email_verification', 'type', - existing_type=postgresql.ENUM('Survey', 'RejectedComment', 'Subscribe', name='emailverificationtype'), - nullable=True) - # ### end Alembic commands ### diff --git a/met-api/migrations/versions/37176ea4708d_data_until_feb_09_2024.py b/met-api/migrations/versions/37176ea4708d_data_until_feb_09_2024.py new file mode 100644 index 000000000..e4385acd8 --- /dev/null +++ b/met-api/migrations/versions/37176ea4708d_data_until_feb_09_2024.py @@ -0,0 +1,536 @@ +"""Data until Feb 09 2024 + +Revision ID: 37176ea4708d +Revises: ec0128056a33 +Create Date: 2024-02-08 12:40:09.456210 + +""" +from alembic import op +import sqlalchemy as sa +from sqlalchemy.sql import table, column +from sqlalchemy import String, Integer, DateTime, Boolean, Text +from datetime import datetime +from flask import current_app + +# revision identifiers, used by Alembic. +revision = '37176ea4708d' +down_revision = 'ec0128056a33' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + + # Set tenant id as 1 + tenant_id = 1 # ID of the default Tenant + + # Create an ad-hoc table for 'tenant' + tenant_table = table( + 'tenant', + column('id', Integer), + column('created_date', DateTime), + column('updated_date', DateTime), + column('short_name', String(10)), + column('name', String(50)), + column('description', String(300)), + column('title', String(30)), + column('logo_url', String(300)), + column('created_by', String(50)), + column('updated_by', String(50)), + ) + + # Assume current_app.config is available and properly configured + tenant_data = [ + { + 'title': 'Modern Engagement', + 'short_name': current_app.config.get('DEFAULT_TENANT_SHORT_NAME'), + 'name': current_app.config.get('DEFAULT_TENANT_NAME'), + 'description': current_app.config.get( + 'DEFAULT_TENANT_DESCRIPTION' + ), + 'created_date': datetime.utcnow(), + } + ] + + # Perform bulk insert + op.bulk_insert(tenant_table, tenant_data) + + # Create an ad-hoc table for 'user_status' + user_status_table = table( + 'user_status', + column('id', Integer), + column('created_date', DateTime), + column('updated_date', DateTime), + column('status_name', String(50)), + column('description', String(50)), + column('created_by', String(50)), + column('updated_by', String(50)), + ) + + # Data for bulk insert + user_status_data = [ + { + 'id': 1, + 'status_name': 'ACTIVE', + 'description': 'Active User', + 'created_date': datetime.utcnow(), + 'updated_date': datetime.utcnow(), + }, + { + 'id': 2, + 'status_name': 'INACTIVE', + 'description': 'Inactive User', + 'created_date': datetime.utcnow(), + 'updated_date': datetime.utcnow(), + }, + ] + + # Perform bulk insert + op.bulk_insert(user_status_table, user_status_data) + + # Create an ad-hoc table for 'staff_users' + staff_users_table = table( + 'staff_users', + column('id', Integer), + column('created_date', DateTime), + column('updated_date', DateTime), + column('first_name', String(50)), + column('middle_name', String(50)), + column('last_name', String(50)), + column('username', String(100)), + column('email_address', String(100)), + column('contact_number', String(50)), + column('external_id', String(50)), + column('status_id', Integer), + column('tenant_id', Integer), + column('created_by', String(50)), + column('updated_by', String(50)) + ) + + # Sample data for insertion + sample_data = { + 'first_name': 'MET', + 'middle_name': '', + 'last_name': 'System', + 'external_id': '1', # Replace with actual external_id value + 'status_id': 1, + 'contact_number': '1', + 'tenant_id': tenant_id, + 'created_date': datetime.utcnow(), + 'updated_date': datetime.utcnow(), + } + + # Perform insert with sample data + op.bulk_insert(staff_users_table, [sample_data]) + + # Create an ad-hoc table for 'widget_type' + widget_type_table = table( + 'widget_type', + column('id', Integer), + column('name', String), + column('description', String), + column('created_date', DateTime), + column('updated_date', DateTime), + column('created_by', String), + column('updated_by', String), + ) + + # Prepare data for bulk insert + widget_data = [ + { + 'id': 1, + 'name': 'Who is Listening', + 'description': 'Displays contact information for someone who is monitoring the engagement', + }, + { + 'id': 3, + 'name': 'Phases', + 'description': 'Displays information about the engagement phase', + }, + { + 'id': 5, + 'name': 'Events', + 'description': 'Displays event details on the engagement', + }, + { + 'id': 2, + 'name': 'Documents', + 'description': 'Displays important documents on the engagement', + }, + { + 'id': 4, + 'name': 'Subscribe', + 'description': 'Allows users to subscribe to an engagement', + }, + { + 'id': 6, + 'name': 'Map', + 'description': 'Display a map that shows the location of the project', + }, + { + 'id': 7, + 'name': 'Video', + 'description': 'Add a link to a hosted video and link preview', + }, + { + 'id': 8, + 'name': 'CAC Form', + 'description': 'Add a CAC Form to your project', + }, + { + 'id': 9, + 'name': 'Timeline', + 'description': 'Create a timeline for a series of events', + }, + { + 'id': 10, + 'name': 'Poll', + 'description': 'The Poll Widget enables real-time polling and feedback collection from public.', + }, + ] + + for widget in widget_data: + widget['created_date'] = datetime.utcnow() + widget['updated_date'] = datetime.utcnow() + + # Perform bulk insert in a single operation + op.bulk_insert(widget_type_table, widget_data) + + # Create an ad-hoc table for 'engagement_metadata_taxa' + engagement_metadata_taxa_table = table( + 'engagement_metadata_taxa', + column('id', Integer), + column('tenant_id', Integer), + column('name', String(64)), + column('description', String(256)), + column('freeform', Boolean), + column('data_type', String(64)), + column('default_value', Text), + column('one_per_engagement', Boolean), + column('position', Integer), + column('created_date', DateTime), + column('updated_date', DateTime), + column('created_by', String(50)), + column('updated_by', String(50)), + ) + + + + # Data to be inserted + taxa_data = [ + { + 'position': 0, + 'tenant_id': tenant_id, + 'name': 'keywords', + 'description': 'Keywords for categorizing the engagement', + 'freeform': True, + 'one_per_engagement': False, + 'data_type': 'text', + }, + { + 'position': 1, + 'tenant_id': tenant_id, + 'name': 'description', + 'description': 'Description of the engagement', + 'freeform': True, + 'data_type': 'long_text', + 'one_per_engagement': True, + }, + { + 'position': 2, + 'tenant_id': tenant_id, + 'name': 'jira_ticket_url', + 'description': 'URL of the Jira ticket for this engagement', + 'freeform': True, + 'data_type': 'text', + 'one_per_engagement': True, + }, + { + 'position': 3, + 'tenant_id': tenant_id, + 'name': 'pmo_project_number', + 'description': 'PMO project number', + 'freeform': True, + 'data_type': 'text', + 'one_per_engagement': True, + }, + { + 'position': 4, + 'tenant_id': tenant_id, + 'name': 'engagement_category', + 'description': 'Category of the engagement', + 'data_type': 'text', + 'freeform': False, + 'one_per_engagement': False, + }, + { + 'position': 5, + 'tenant_id': tenant_id, + 'name': 'engagement_method', + 'description': 'Method of engagement', + 'data_type': 'text', + 'default_value': "Survey", + 'freeform': False, + 'one_per_engagement': False, + }, + { + 'position': 6, + 'tenant_id': tenant_id, + 'name': 'language', + 'description': 'Language of the engagement', + 'data_type': 'text', + 'default_value': "English", + 'freeform': False, + 'one_per_engagement': False, + }, + { + 'position': 7, + 'tenant_id': tenant_id, + 'name': 'ministry', + 'description': 'Ministry of the engagement', + 'freeform': False, + 'data_type': 'text', + 'one_per_engagement': True, + }, + ] + + for taxa in taxa_data: + taxa['created_date'] = datetime.utcnow() + taxa['updated_date'] = datetime.utcnow() + + # Perform bulk insert + op.bulk_insert(engagement_metadata_taxa_table, taxa_data) + + # Create an ad-hoc table for 'generated_document_type' + generated_document_type_table = table( + 'generated_document_type', + column('id', Integer), + column('created_date', DateTime), + column('updated_date', DateTime), + column('name', String(30)), + column('description', String(100)), + column('created_by', String(50)), + column('updated_by', String(50)), + ) + + # Data for bulk insert + document_type_data = [ + { + 'id': 1, + 'name': 'comment_sheet', + 'description': 'Comments export for staff', + 'created_date': datetime.utcnow(), + 'updated_date': datetime.utcnow(), + }, + { + 'id': 2, + 'name': 'cac_form_sheet', + 'description': 'cac form submission export for staff', + 'created_date': datetime.utcnow(), + 'updated_date': datetime.utcnow(), + }, + { + 'id': 3, + 'name': 'proponent_comments_sheet', + 'description': 'Comments export for proponent', + 'created_date': datetime.utcnow(), + 'updated_date': datetime.utcnow(), + }, + ] + + # Perform bulk insert + op.bulk_insert(generated_document_type_table, document_type_data) + + # Create an ad-hoc table for 'generated_document_template' + generated_document_template_table = table( + 'generated_document_template', + column('id', Integer), + column('type_id', Integer), + column('hash_code', String(64)), + column('extension', String(10)), + column('created_date', DateTime), + column('updated_date', DateTime), + column('created_by', String(50)), + column('updated_by', String(50)), + ) + + # Data for bulk insert + document_template_data = [ + { + 'id': 1, + 'type_id': 1, + 'hash_code': None, + 'extension': 'xlsx', + 'created_date': datetime.utcnow(), + 'updated_date': datetime.utcnow(), + }, + { + 'id': 2, + 'type_id': 2, + 'hash_code': None, + 'extension': 'xlsx', + 'created_date': datetime.utcnow(), + 'updated_date': datetime.utcnow(), + }, + { + 'id': 3, + 'type_id': 3, + 'hash_code': None, + 'extension': 'xlsx', + 'created_date': datetime.utcnow(), + 'updated_date': datetime.utcnow(), + }, + ] + + # Perform bulk insert + op.bulk_insert(generated_document_template_table, document_template_data) + + # Create an ad-hoc table for 'engagement_status' + engagement_status_table = table( + 'engagement_status', + column('id', Integer), + column('created_date', DateTime), + column('updated_date', DateTime), + column('status_name', String(50)), + column('description', String(50)), + column('created_by', String(50)), + column('updated_by', String(50)), + ) + + # Data for bulk insert + engagement_status_data = [ + { + 'id': 1, + 'status_name': 'Draft', + 'description': 'Not ready to the public', + 'created_date': datetime.utcnow(), + 'updated_date': datetime.utcnow(), + }, + { + 'id': 2, + 'status_name': 'Published', + 'description': 'Visible to the public', + 'created_date': datetime.utcnow(), + 'updated_date': datetime.utcnow(), + }, + { + 'id': 3, + 'status_name': 'Closed', + 'description': 'The engagement period is over', + 'created_date': datetime.utcnow(), + 'updated_date': datetime.utcnow(), + }, + { + 'id': 4, + 'status_name': 'Scheduled', + 'description': 'Scheduled to be published', + 'created_date': datetime.utcnow(), + 'updated_date': datetime.utcnow(), + }, + { + 'id': 5, + 'status_name': 'Unpublished', + 'description': 'Unpublished and hidden', + 'created_date': datetime.utcnow(), + 'updated_date': datetime.utcnow(), + }, + ] + + # Perform bulk insert + op.bulk_insert(engagement_status_table, engagement_status_data) + + # Create an ad-hoc table for 'comment_status' + comment_status_table = table( + 'comment_status', + column('id', Integer), + column('created_date', DateTime), + column('updated_date', DateTime), + column('status_name', String(50)), + column('description', String(50)), + column('created_by', String(50)), + column('updated_by', String(50)), + ) + + # Data for the initial bulk insert + initial_comment_status_data = [ + { + 'id': 1, + 'status_name': 'Pending', + 'description': 'Comment is pending review', + 'created_date': datetime.utcnow(), + 'updated_date': datetime.utcnow(), + }, + { + 'id': 2, + 'status_name': 'Approved', + 'description': 'Comment is accepted for public view', + 'created_date': datetime.utcnow(), + 'updated_date': datetime.utcnow(), + }, + { + 'id': 3, + 'status_name': 'Rejected', + 'description': 'Comment is rejected and not shown', + 'created_date': datetime.utcnow(), + 'updated_date': datetime.utcnow(), + }, + { + 'id': 4, + 'status_name': 'Needs further review', + 'description': 'Comment needs further review', + 'created_date': datetime.utcnow(), + 'updated_date': datetime.utcnow(), + }, + ] + + # Perform the initial bulk insert + op.bulk_insert(comment_status_table, initial_comment_status_data) + + # Create an ad-hoc table for 'membership_status_codes' + membership_status_codes_table = table( + 'membership_status_codes', + column('id', Integer), + column('created_date', DateTime), + column('updated_date', DateTime), + column('status_name', String(50)), + column('description', String(50)), + column('created_by', String(50)), + column('updated_by', String(50)), + ) + + # Data for bulk insert + membership_status_codes_data = [ + { + 'id': 1, + 'status_name': 'ACTIVE', + 'description': 'Active Membership', + 'created_date': datetime.utcnow(), + 'updated_date': datetime.utcnow(), + }, + { + 'id': 2, + 'status_name': 'INACTIVE', + 'description': 'Inactive Membership', + 'created_date': datetime.utcnow(), + 'updated_date': datetime.utcnow(), + }, + { + 'id': 3, + 'status_name': 'REVOKED', + 'description': 'Revoked Membership', + 'created_date': datetime.utcnow(), + 'updated_date': datetime.utcnow(), + }, + ] + + # Perform bulk insert + op.bulk_insert(membership_status_codes_table, membership_status_codes_data) + + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + pass + # ### end Alembic commands ### \ No newline at end of file diff --git a/met-api/migrations/versions/3a88de1e3e7f_add_rich_description_to_subscribe_item.py b/met-api/migrations/versions/3a88de1e3e7f_add_rich_description_to_subscribe_item.py deleted file mode 100644 index c712af2be..000000000 --- a/met-api/migrations/versions/3a88de1e3e7f_add_rich_description_to_subscribe_item.py +++ /dev/null @@ -1,37 +0,0 @@ -""" Add rich description to subscribe_item table - -Revision ID: 3a88de1e3e7f -Revises: 155c64768a99 -Create Date: 2023-09-08 09:34:06.274642 - -""" -from alembic import op -import sqlalchemy as sa - -# revision identifiers, used by Alembic. -revision = '3a88de1e3e7f' -down_revision = '155c64768a99' -branch_labels = None -depends_on = None - - -def upgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.add_column('subscribe_item', sa.Column('rich_description', sa.Text(), nullable=True)) - op.execute('UPDATE subscribe_item SET rich_description = description') - op.execute('UPDATE subscribe_item SET description = NULL') - op.alter_column('subscribe_item', 'call_to_action_type', - existing_type=sa.VARCHAR(length=25), - nullable=False) - # ### end Alembic commands ### - - -def downgrade(): - # ### commands auto generated by Alembic - please adjust! ### - - op.alter_column('subscribe_item', 'call_to_action_type', - existing_type=sa.VARCHAR(length=25), - nullable=True) - op.drop_column('subscribe_item', 'rich_description') - - # ### end Alembic commands ### diff --git a/met-api/migrations/versions/3e4dc76a96ab_added_the_timeline_widget.py b/met-api/migrations/versions/3e4dc76a96ab_added_the_timeline_widget.py deleted file mode 100644 index e2619da49..000000000 --- a/met-api/migrations/versions/3e4dc76a96ab_added_the_timeline_widget.py +++ /dev/null @@ -1,44 +0,0 @@ -"""Added the Timeline Widget. - -Revision ID: 3e4dc76a96ab -Revises: 02ff8ecc6b91 -Create Date: 2023-12-05 17:04:46.304368 - -""" -from alembic import op -import sqlalchemy as sa -from sqlalchemy.dialects import postgresql -from met_api.constants.timeline_event_status import TimelineEventStatus - -# revision identifiers, used by Alembic. -revision = '3e4dc76a96ab' -down_revision = '02ff8ecc6b91' -branch_labels = None -depends_on = None - - -def upgrade(): - op.create_table('widget_timeline', - sa.Column('id', sa.Integer(), primary_key=True, autoincrement=True, nullable=False), - sa.Column('engagement_id', sa.Integer(), nullable=False), - sa.Column('title', sa.String(length=255), nullable=True), - sa.Column('description', sa.Text(), nullable=True), - sa.ForeignKeyConstraint(['engagement_id'], ['engagement.id'], ondelete='CASCADE'), - sa.PrimaryKeyConstraint('id'), - ) - op.create_table('timeline_event', - sa.Column('id', sa.Integer(), primary_key=True, autoincrement=True, nullable=False), - sa.Column('engagement_id', sa.Integer(), nullable=False), - sa.Column('timeline_id', sa.Integer(), nullable=False), - sa.Column('status', sa.Enum(TimelineEventStatus), nullable=True), - sa.Column('position', sa.Integer(), nullable=True), - sa.Column('description', sa.Text(), nullable=True), - sa.Column('time', sa.String(length=255), nullable=True), - sa.ForeignKeyConstraint(['engagement_id'], ['engagement.id'], ondelete='CASCADE'), - sa.ForeignKeyConstraint(['timeline_id'], ['widget_timeline.id'], ondelete='CASCADE'), - sa.PrimaryKeyConstraint('id'), - ) - -def downgrade(): - op.drop_table('widget_timeline') - op.drop_table('timeline_event') diff --git a/met-api/migrations/versions/3f11e9145a08_events_table.py b/met-api/migrations/versions/3f11e9145a08_events_table.py deleted file mode 100644 index 258eebefe..000000000 --- a/met-api/migrations/versions/3f11e9145a08_events_table.py +++ /dev/null @@ -1,78 +0,0 @@ -"""events_table - -Revision ID: 3f11e9145a08 -Revises: 2253a00e73bf -Create Date: 2023-02-12 16:48:14.120190 - -""" -from alembic import op -import sqlalchemy as sa - -# revision identifiers, used by Alembic. -revision = '3f11e9145a08' -down_revision = '2253a00e73bf' -branch_labels = None -depends_on = None - - -def upgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.create_table('widget_events', - sa.Column('created_date', sa.DateTime(), nullable=False), - sa.Column('updated_date', sa.DateTime(), nullable=True), - sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), - sa.Column('title', sa.String(length=50), nullable=True), - sa.Column('type', sa.Enum('OPENHOUSE', 'MEETUP', 'VIRTUAL', name='eventtypes'), nullable=False), - sa.Column('sort_index', sa.Integer(), nullable=True), - sa.Column('widget_id', sa.Integer(), nullable=True), - sa.Column('created_by', sa.String(length=50), nullable=True), - sa.Column('updated_by', sa.String(length=50), nullable=True), - sa.ForeignKeyConstraint(['widget_id'], ['widget.id'], ondelete='CASCADE'), - sa.PrimaryKeyConstraint('id') - ) - op.create_table('event_item', - sa.Column('created_date', sa.DateTime(), nullable=False), - sa.Column('updated_date', sa.DateTime(), nullable=True), - sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), - sa.Column('description', sa.String(length=500), nullable=True), - sa.Column('location_name', sa.String(length=50), nullable=True), - sa.Column('location_address', sa.String(length=100), nullable=True, comment='The address of the location'), - sa.Column('start_date', sa.DateTime(), nullable=True), - sa.Column('end_date', sa.DateTime(), nullable=True), - sa.Column('url', sa.String(length=500), nullable=True), - sa.Column('url_label', sa.String(length=100), nullable=True, comment='Label to show for href links'), - sa.Column('sort_index', sa.Integer(), nullable=True), - sa.Column('widget_events_id', sa.Integer(), nullable=True), - sa.Column('created_by', sa.String(length=50), nullable=True), - sa.Column('updated_by', sa.String(length=50), nullable=True), - sa.ForeignKeyConstraint(['widget_events_id'], ['widget_events.id'], ondelete='CASCADE'), - sa.PrimaryKeyConstraint('id') - ) - - op.add_column('widget', sa.Column('title', sa.String(length=100), nullable=True, comment='Custom title for the widget.')) - # ### end Alembic commands ### - - widget_type_table = sa.table('widget_type', - sa.Column('id', sa.Integer), - sa.Column('name', sa.String), - sa.Column('description', sa.String)) - conn = op.get_bind() - res = conn.execute( - f"select max(id) from widget_type;") - latest_id = res.fetchall()[0][0] - - op.bulk_insert(widget_type_table, [ - {'id':latest_id+1 ,'name': 'Events', 'description': 'Displays event details on the engagement'} - ]) - - -def downgrade(): - # ### commands auto generated by Alembic - please adjust! ### - - op.drop_column('widget', 'title') - - op.drop_table('event_item') - op.drop_table('widget_events') - op.execute("delete from widget_type where name='Events'") - op.execute('DROP TYPE eventtypes;') - # ### end Alembic commands ### diff --git a/met-api/migrations/versions/4114001e1a4c_add_widget_id_to_widget_timeline_and_.py b/met-api/migrations/versions/4114001e1a4c_add_widget_id_to_widget_timeline_and_.py deleted file mode 100644 index a34d93048..000000000 --- a/met-api/migrations/versions/4114001e1a4c_add_widget_id_to_widget_timeline_and_.py +++ /dev/null @@ -1,27 +0,0 @@ -"""Add widget_id to widget_timeline and timeline_event tables - -Revision ID: 4114001e1a4c -Revises: c09e77fde608 -Create Date: 2023-12-11 15:46:30.773046 - -""" -from alembic import op -import sqlalchemy as sa -from sqlalchemy.dialects import postgresql - -# revision identifiers, used by Alembic. -revision = '4114001e1a4c' -down_revision = 'c09e77fde608' -branch_labels = None -depends_on = None - - -def upgrade(): - op.add_column('widget_timeline', sa.Column('widget_id', sa.Integer())) - op.create_foreign_key('timeline_widget_fk', 'widget_timeline', 'widget', ['widget_id'], ['id'], ondelete='CASCADE') - op.add_column('timeline_event', sa.Column('widget_id', sa.Integer())) - op.create_foreign_key('event_widget_fk', 'timeline_event', 'widget', ['widget_id'], ['id'], ondelete='CASCADE') - -def downgrade(): - op.drop_column('widget_timeline', 'widget_id') - op.drop_column('timeline_event', 'widget_id') diff --git a/met-api/migrations/versions/45f89f245e3d_engagement_metadata.py b/met-api/migrations/versions/45f89f245e3d_engagement_metadata.py deleted file mode 100644 index a9e71b9f0..000000000 --- a/met-api/migrations/versions/45f89f245e3d_engagement_metadata.py +++ /dev/null @@ -1,37 +0,0 @@ -"""engagement metadata - -Revision ID: 45f89f245e3d -Revises: f99eb7f53041 -Create Date: 2023-03-23 12:03:04.558661 - -""" -from alembic import op -import sqlalchemy as sa -from sqlalchemy.dialects import postgresql - -# revision identifiers, used by Alembic. -revision = '45f89f245e3d' -down_revision = 'f99eb7f53041' -branch_labels = None -depends_on = None - - -def upgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.create_table('engagement_metadata', - sa.Column('created_date', sa.DateTime(), nullable=False), - sa.Column('updated_date', sa.DateTime(), nullable=True), - sa.Column('engagement_id', sa.Integer(), nullable=False), - sa.Column('project_metadata', postgresql.JSONB(astext_type=sa.Text()), nullable=True), - sa.Column('created_by', sa.String(length=50), nullable=True), - sa.Column('updated_by', sa.String(length=50), nullable=True), - sa.ForeignKeyConstraint(['engagement_id'], ['engagement.id'], ondelete='CASCADE'), - sa.PrimaryKeyConstraint('engagement_id') - ) - # ### end Alembic commands ### - - -def downgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.drop_table('engagement_metadata') - # ### end Alembic commands ### diff --git a/met-api/migrations/versions/46490dc02be5_added_new_columns_to_user_table.py b/met-api/migrations/versions/46490dc02be5_added_new_columns_to_user_table.py deleted file mode 100644 index b335677a3..000000000 --- a/met-api/migrations/versions/46490dc02be5_added_new_columns_to_user_table.py +++ /dev/null @@ -1,75 +0,0 @@ -"""added new columns to user table - -Revision ID: 46490dc02be5 -Revises: 2b12a6cd987a -Create Date: 2022-12-07 05:38:57.574536 - -""" -from datetime import datetime - -from alembic import op -import sqlalchemy as sa - -# revision identifiers, used by Alembic. -revision = '46490dc02be5' -down_revision = '2b12a6cd987a' -branch_labels = None -depends_on = None - - -def upgrade(): - # ### commands auto generated by Alembic - please adjust! ### - - rename_table_query = 'ALTER TABLE \"user\" rename to met_users' - op.execute(rename_table_query) - - op.create_table('user_status', - sa.Column('created_date', sa.DateTime(), nullable=False), - sa.Column('updated_date', sa.DateTime(), nullable=True), - sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), - sa.Column('status_name', sa.String(length=50), nullable=True), - sa.Column('description', sa.String(length=50), nullable=True), - sa.Column('created_by', sa.String(length=50), nullable=True), - sa.Column('modified_by_id', sa.String(length=50), nullable=True), - sa.PrimaryKeyConstraint('id') - ) - - user_status = sa.table('user_status', - sa.column('id', sa.Integer), - sa.column('status_name', sa.String), - sa.column('description', sa.String), - sa.column('created_date', sa.DateTime), - sa.column('updated_date', sa.DateTime)) - - op.bulk_insert(user_status, [ - {'id': 1, 'status_name': 'ACTIVE', 'description': 'Active User', 'created_date': datetime.utcnow(), - 'updated_date': datetime.utcnow()}, - {'id': 2, 'status_name': 'INACTIVE', 'description': 'Inactive User', 'created_date': datetime.utcnow(), - 'updated_date': datetime.utcnow()} - ]) - - op.add_column('met_users', sa.Column('username', sa.String(length=100), nullable=True)) - op.add_column('met_users', sa.Column('access_type', sa.String(length=200), nullable=True)) - op.add_column('met_users', sa.Column('status_id', sa.Integer(), nullable=True)) - op.create_index(op.f('ix_user_username'), 'met_users', ['username'], unique=False) - op.create_foreign_key('user_status_fk', 'met_users', 'user_status', ['status_id'], ['id']) - - # update everyone to active - update_all_users_to_active = 'UPDATE met_users SET status_id = 1' - op.execute(update_all_users_to_active) - - # ### end Alembic commands ### - - -def downgrade(): - # ### commands auto generated by Alembic - please adjust! ### - update_type_query = 'ALTER TABLE met_users rename to \"user\"' - op.execute(update_type_query) - - op.drop_constraint('user_status_fk', 'user', type_='foreignkey') - op.drop_index(op.f('ix_user_username'), table_name='user') - op.drop_column('user', 'status_id') - op.drop_column('user', 'access_type') - op.drop_column('user', 'username') - op.drop_table('user_status') - # ### end Alembic commands ### diff --git a/met-api/migrations/versions/47fc88fe0477_video_widget.py b/met-api/migrations/versions/47fc88fe0477_video_widget.py deleted file mode 100644 index d31bb95f5..000000000 --- a/met-api/migrations/versions/47fc88fe0477_video_widget.py +++ /dev/null @@ -1,52 +0,0 @@ -"""Add video widget - -Revision ID: 47fc88fe0477 -Revises: b3b5c66cea4b -Create Date: 2023-07-11 10:44:35.980432 - -""" -from alembic import op -import sqlalchemy as sa - -# revision identifiers, used by Alembic. -revision = '47fc88fe0477' -down_revision = 'b3b5c66cea4b' -branch_labels = None -depends_on = None - - -def upgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.create_table('widget_video', - sa.Column('created_date', sa.DateTime(), nullable=False), - sa.Column('updated_date', sa.DateTime(), nullable=True), - sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), - sa.Column('widget_id', sa.Integer(), nullable=True), - sa.Column('engagement_id', sa.Integer(), nullable=True), - sa.Column('video_url', sa.String(length=255), nullable=False), - sa.Column('description', sa.Text(), nullable=True), - sa.Column('created_by', sa.String(length=50), nullable=True), - sa.Column('updated_by', sa.String(length=50), nullable=True), - sa.ForeignKeyConstraint(['engagement_id'], ['engagement.id'], ondelete='CASCADE'), - sa.ForeignKeyConstraint(['widget_id'], ['widget.id'], ondelete='CASCADE'), - sa.PrimaryKeyConstraint('id') - ) - widget_type_table = sa.table('widget_type', - sa.Column('id', sa.Integer), - sa.Column('name', sa.String), - sa.Column('description', sa.String)) - - op.bulk_insert(widget_type_table, [ - {'id': 7, 'name': 'Video', 'description': 'Add a link to a hosted video and link preview'} - ]) - # ### end Alembic commands ### - - -def downgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.drop_table('widget_video') - - conn = op.get_bind() - - conn.execute('DELETE FROM widget_type WHERE id=7') - # ### end Alembic commands ### diff --git a/met-api/migrations/versions/4c72047de4d3_add_staff_note_table.py b/met-api/migrations/versions/4c72047de4d3_add_staff_note_table.py deleted file mode 100644 index 1dd0e74d3..000000000 --- a/met-api/migrations/versions/4c72047de4d3_add_staff_note_table.py +++ /dev/null @@ -1,41 +0,0 @@ -"""add_staff_note_table - -Revision ID: 4c72047de4d3 -Revises: 0e043f976e2e -Create Date: 2023-01-10 12:23:47.566091 - -""" -from alembic import op -import sqlalchemy as sa - - -# revision identifiers, used by Alembic. -revision = '4c72047de4d3' -down_revision = '0e043f976e2e' -branch_labels = None -depends_on = None - - -def upgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.create_table('staff_note', - sa.Column('created_date', sa.DateTime(), nullable=False), - sa.Column('updated_date', sa.DateTime(), nullable=True), - sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), - sa.Column('note', sa.Text(), nullable=True), - sa.Column('note_type', sa.String(length=50), nullable=True), - sa.Column('survey_id', sa.Integer(), nullable=False), - sa.Column('submission_id', sa.Integer(), nullable=False), - sa.Column('created_by', sa.String(length=50), nullable=True), - sa.Column('updated_by', sa.String(length=50), nullable=True), - sa.ForeignKeyConstraint(['submission_id'], ['submission.id'], ondelete='SET NULL'), - sa.ForeignKeyConstraint(['survey_id'], ['survey.id'], ondelete='CASCADE'), - sa.PrimaryKeyConstraint('id') - ) - # ### end Alembic commands ### - - -def downgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.drop_table('staff_note') - # ### end Alembic commands ### diff --git a/met-api/migrations/versions/4f5f91937f5c_merge_heads.py b/met-api/migrations/versions/4f5f91937f5c_merge_heads.py deleted file mode 100644 index c5d408452..000000000 --- a/met-api/migrations/versions/4f5f91937f5c_merge_heads.py +++ /dev/null @@ -1,24 +0,0 @@ -"""merge heads - -Revision ID: 4f5f91937f5c -Revises: d9777850eb98, b1196306955f -Create Date: 2023-09-19 07:33:05.815625 - -""" -from alembic import op -import sqlalchemy as sa - - -# revision identifiers, used by Alembic. -revision = '4f5f91937f5c' -down_revision = ('d9777850eb98', 'b1196306955f') -branch_labels = None -depends_on = None - - -def upgrade(): - pass - - -def downgrade(): - pass diff --git a/met-api/migrations/versions/5110026db916_create_feedback_table.py b/met-api/migrations/versions/5110026db916_create_feedback_table.py deleted file mode 100644 index 0f629a47f..000000000 --- a/met-api/migrations/versions/5110026db916_create_feedback_table.py +++ /dev/null @@ -1,31 +0,0 @@ -"""Create feedback table - -Revision ID: 5110026db916 -Revises: ba02399c821c -Create Date: 2022-10-03 12:35:55.742573 - -""" -from alembic import op -import sqlalchemy as sa - - -# revision identifiers, used by Alembic. -revision = '5110026db916' -down_revision = 'ba02399c821c' -branch_labels = None -depends_on = None - - -def upgrade(): - op.create_table('feedback', - sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), - sa.Column('created_date', sa.DateTime(), nullable=True), - sa.Column('rating', sa.Enum('VerySatisfied', 'Satisfied', 'Neutral', 'Unsatisfied', 'VeryUnsatisfied', name='ratingtype'), nullable=False), - sa.Column('comment_type', sa.Enum('Issue', 'Idea', 'Else', name='commenttype'), nullable=True), - sa.Column('comment', sa.Text(), nullable=True), - sa.PrimaryKeyConstraint('id') - ) - - -def downgrade(): - op.drop_table('feedback') diff --git a/met-api/migrations/versions/5423fd515e04_added_shapefile.py b/met-api/migrations/versions/5423fd515e04_added_shapefile.py deleted file mode 100644 index 5c173b8d8..000000000 --- a/met-api/migrations/versions/5423fd515e04_added_shapefile.py +++ /dev/null @@ -1,29 +0,0 @@ -"""added shapefile - -Revision ID: 5423fd515e04 -Revises: db3ffa0dd6ad -Create Date: 2023-03-21 10:32:22.728335 - -""" -import sqlalchemy as sa -from alembic import op -from sqlalchemy.dialects import postgresql - - -# revision identifiers, used by Alembic. -revision = '5423fd515e04' -down_revision = 'db3ffa0dd6ad' -branch_labels = None -depends_on = None - - -def upgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.add_column('widget_map', sa.Column('geojson', sa.Text(), nullable=True)) - # ### end Alembic commands ### - - -def downgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.drop_column('widget_map', 'geojson') - # ### end Alembic commands ### diff --git a/met-api/migrations/versions/587badc69491_email_queue.py b/met-api/migrations/versions/587badc69491_email_queue.py deleted file mode 100644 index f2419e3f6..000000000 --- a/met-api/migrations/versions/587badc69491_email_queue.py +++ /dev/null @@ -1,39 +0,0 @@ -"""email queue - -Revision ID: 587badc69491 -Revises: d2e7baa531ce -Create Date: 2023-06-05 06:17:14.373765 - -""" -from alembic import op -import sqlalchemy as sa -from sqlalchemy.dialects import postgresql - -# revision identifiers, used by Alembic. -revision = '587badc69491' -down_revision = 'd2e7baa531ce' -branch_labels = None -depends_on = None - - -def upgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.create_table('email_queue', - sa.Column('created_date', sa.DateTime(), nullable=False), - sa.Column('updated_date', sa.DateTime(), nullable=True), - sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), - sa.Column('entity_id', sa.Integer(), nullable=False), - sa.Column('entity_type', sa.String(length=100), nullable=False), - sa.Column('action', sa.String(length=100), nullable=True), - sa.Column('notification_status', sa.Enum('PROCESSING', 'SENT', name='notificationstatus'), nullable=True), - sa.Column('created_by', sa.String(length=50), nullable=True), - sa.Column('updated_by', sa.String(length=50), nullable=True), - sa.PrimaryKeyConstraint('id') - ) - # ### end Alembic commands ### - - -def downgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.drop_table('email_queue') - # ### end Alembic commands ### diff --git a/met-api/migrations/versions/5880bead8f03_email_verification_type.py b/met-api/migrations/versions/5880bead8f03_email_verification_type.py deleted file mode 100644 index a362b3337..000000000 --- a/met-api/migrations/versions/5880bead8f03_email_verification_type.py +++ /dev/null @@ -1,30 +0,0 @@ -"""Email verification type - -Revision ID: 5880bead8f03 -Revises: d86a682d7096 -Create Date: 2023-01-16 14:34:48.541883 - -""" -from alembic import op -import sqlalchemy as sa - - -# revision identifiers, used by Alembic. -revision = '5880bead8f03' -down_revision = 'd86a682d7096' -branch_labels = None -depends_on = None - - -def upgrade(): - op.execute('CREATE TYPE emailverificationtype AS ENUM (\'Survey\', \'RejectedComment\', \'Subscribe\');') - op.add_column('email_verification', sa.Column('type', sa.Enum('Survey', 'RejectedComment', 'Subscribe', name='emailverificationtype'), nullable=True)) - op.execute('UPDATE email_verification SET type = \'RejectedComment\' WHERE submission_id IS NOT NULL;') - op.execute('UPDATE email_verification SET type = \'Survey\' WHERE type IS NULL;') - op.alter_column('email_verification', sa.Column('type', sa.Enum('Survey', 'RejectedComment', 'Subscribe', name='emailverificationtype'), nullable=False)) - - - -def downgrade(): - op.drop_column('email_verification', 'type') - op.execute('DROP TYPE emailverificationtype;') diff --git a/met-api/migrations/versions/5a1258a76598_add_subscribe_to_project.py b/met-api/migrations/versions/5a1258a76598_add_subscribe_to_project.py deleted file mode 100644 index e97bed547..000000000 --- a/met-api/migrations/versions/5a1258a76598_add_subscribe_to_project.py +++ /dev/null @@ -1,31 +0,0 @@ -"""add_subscribe_to_project - -Revision ID: 5a1258a76598 -Revises: 0329d7d10a5f -Create Date: 2023-07-26 11:54:08.922003 - -""" -from alembic import op -import sqlalchemy as sa -from sqlalchemy.dialects import postgresql - -# revision identifiers, used by Alembic. -revision = '5a1258a76598' -down_revision = '0329d7d10a5f' -branch_labels = None -depends_on = None - - -def upgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.execute('CREATE TYPE subscriptiontype AS ENUM (\'ENGAGEMENT\', \'PROJECT\', \'TENANT\');') - op.add_column('subscription', sa.Column('project_id', sa.String(length=50), nullable=True)) - op.add_column('subscription', sa.Column('type', sa.Enum('ENGAGEMENT', 'PROJECT', 'TENANT', name='subscriptiontype'), nullable=True)) - # ### end Alembic commands ### - -def downgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.drop_column('subscription', 'type') - op.drop_column('subscription', 'project_id') - op.execute("""DROP TYPE subscriptiontype""") - # ### end Alembic commands ### diff --git a/met-api/migrations/versions/642925969c53_added_documents_table.py b/met-api/migrations/versions/642925969c53_added_documents_table.py deleted file mode 100644 index 96899edde..000000000 --- a/met-api/migrations/versions/642925969c53_added_documents_table.py +++ /dev/null @@ -1,43 +0,0 @@ -"""added documents table - -Revision ID: 642925969c53 -Revises: e37d79be3a05 -Create Date: 2022-11-22 21:00:39.487971 - -""" -from alembic import op -import sqlalchemy as sa - - -# revision identifiers, used by Alembic. -revision = '642925969c53' -down_revision = 'e37d79be3a05' -branch_labels = None -depends_on = None - - -def upgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.create_table('widget_documents', - sa.Column('created_date', sa.DateTime(), nullable=False), - sa.Column('updated_date', sa.DateTime(), nullable=True), - sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), - sa.Column('title', sa.String(length=50), nullable=True), - sa.Column('type', sa.String(length=50), nullable=True, comment='File or Folder identifier'), - sa.Column('parent_document_id', sa.Integer(), nullable=True), - sa.Column('url', sa.String(length=2000), nullable=True), - sa.Column('sort_index', sa.Integer(), nullable=True), - sa.Column('widget_id', sa.Integer(), nullable=True), - sa.Column('created_by', sa.String(length=50), nullable=True), - sa.Column('modified_by_id', sa.String(length=50), nullable=True), - sa.ForeignKeyConstraint(['parent_document_id'], ['widget_documents.id'], ), - sa.ForeignKeyConstraint(['widget_id'], ['widget.id'], ondelete='CASCADE'), - sa.PrimaryKeyConstraint('id') - ) - # ### end Alembic commands ### - - -def downgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.drop_table('widget_documents') - # ### end Alembic commands ### diff --git a/met-api/migrations/versions/6764af39864e_.py b/met-api/migrations/versions/6764af39864e_.py deleted file mode 100644 index 93904a7f4..000000000 --- a/met-api/migrations/versions/6764af39864e_.py +++ /dev/null @@ -1,27 +0,0 @@ -""" - -Revision ID: 6764af39864e -Revises: ffac8f5b4288 -Create Date: 2022-12-13 19:47:27.460920 - -""" -from alembic import op -import sqlalchemy as sa - -# revision identifiers, used by Alembic. -revision = '6764af39864e' -down_revision = 'ffac8f5b4288' -branch_labels = None -depends_on = None - - -def upgrade(): - # reset users type - update_reset_access_type_query = f"update met_users set access_type = null where 0=0" - op.execute(update_reset_access_type_query) - # ### end Alembic commands ### - - -def downgrade(): - # no downgrade - print ("no downgrade") diff --git a/met-api/migrations/versions/6ce7831704cb_create_widget_and_contact_tables.py b/met-api/migrations/versions/6ce7831704cb_create_widget_and_contact_tables.py deleted file mode 100644 index 20e513158..000000000 --- a/met-api/migrations/versions/6ce7831704cb_create_widget_and_contact_tables.py +++ /dev/null @@ -1,79 +0,0 @@ -"""Create widget, widget item, widget type and contact tables - -Revision ID: 6ce7831704cb -Revises: ac4a505ed1e3 -Create Date: 2022-10-19 17:20:12.126201 - -""" -from alembic import op -import sqlalchemy as sa - - -# revision identifiers, used by Alembic. -revision = '6ce7831704cb' -down_revision = 'ac4a505ed1e3' -branch_labels = None -depends_on = None - - -def upgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.create_table('contact', - sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), - sa.Column('name', sa.String(length=50), nullable=True), - sa.Column('title', sa.String(length=50), nullable=True), - sa.Column('email', sa.String(length=50), nullable=True), - sa.Column('phone_number', sa.String(length=50), nullable=True), - sa.Column('address', sa.String(length=50), nullable=True), - sa.Column('bio', sa.String(length=500), nullable=True, comment='A biography or short biographical profile of someone.'), - sa.Column('created_date', sa.DateTime(), nullable=False), - sa.Column('updated_date', sa.DateTime(), nullable=False), - sa.Column('created_by', sa.String(length=50), nullable=False), - sa.Column('updated_by', sa.String(length=50), nullable=False), - sa.PrimaryKeyConstraint('id') - ) - widget_type_table = op.create_table('widget_type', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('name', sa.String(length=50), nullable=False), - sa.Column('description', sa.String(length=200), nullable=True), - sa.PrimaryKeyConstraint('id'), - sa.UniqueConstraint('name') - ) - op.create_table('widget', - sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), - sa.Column('widget_type_id', sa.Integer(), nullable=True), - sa.Column('engagement_id', sa.Integer(), nullable=True), - sa.Column('created_date', sa.DateTime(), nullable=False), - sa.Column('updated_date', sa.DateTime(), nullable=False), - sa.Column('created_by', sa.String(length=50), nullable=False), - sa.Column('updated_by', sa.String(length=50), nullable=False), - sa.ForeignKeyConstraint(['engagement_id'], ['engagement.id'], ondelete='CASCADE'), - sa.ForeignKeyConstraint(['widget_type_id'], ['widget_type.id'], ondelete='CASCADE'), - sa.PrimaryKeyConstraint('id'), - sa.UniqueConstraint('widget_type_id', 'engagement_id', name='unique_widget_type') - ) - op.create_table('widget_item', - sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), - sa.Column('widget_data_id', sa.Integer(), nullable=False, comment='A dynamic foreign key that could be to any table where the widget data is hosted.'), - sa.Column('widget_id', sa.Integer(), nullable=True), - sa.Column('created_date', sa.DateTime(), nullable=False), - sa.Column('updated_date', sa.DateTime(), nullable=False), - sa.Column('created_by', sa.String(length=50), nullable=False), - sa.Column('updated_by', sa.String(length=50), nullable=False), - sa.ForeignKeyConstraint(['widget_id'], ['widget.id'], ondelete='CASCADE'), - sa.PrimaryKeyConstraint('id'), - sa.UniqueConstraint('widget_data_id', 'widget_id', name='unique_widget_data') - ) - op.bulk_insert(widget_type_table, [ - {'id': 1, 'name': 'Who is Listening', 'description': 'Displays contact information for someone who is monitoring the engagement'} - ]) - # ### end Alembic commands ### - - -def downgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.drop_table('widget_item') - op.drop_table('widget') - op.drop_table('widget_type') - op.drop_table('contact') - # ### end Alembic commands ### diff --git a/met-api/migrations/versions/6d3c33a79c5e_add_email_verification.py b/met-api/migrations/versions/6d3c33a79c5e_add_email_verification.py deleted file mode 100644 index e75146ee6..000000000 --- a/met-api/migrations/versions/6d3c33a79c5e_add_email_verification.py +++ /dev/null @@ -1,41 +0,0 @@ -"""Add email_verification - -Revision ID: 6d3c33a79c5e -Revises: 1113e0ad66c3 -Create Date: 2022-07-28 12:57:25.200851 - -""" -from alembic import op -import sqlalchemy as sa - - -# revision identifiers, used by Alembic. -revision = '6d3c33a79c5e' -down_revision = '1113e0ad66c3' -branch_labels = None -depends_on = None - - -def upgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.create_table('email_verification', - sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), - sa.Column('verification_token', sa.String(length=50), nullable=False), - sa.Column('user_id', sa.Integer(), nullable=True), - sa.Column('survey_id', sa.Integer(), nullable=True), - sa.Column('is_active', sa.Boolean(), nullable=False), - sa.Column('created_date', sa.DateTime(), nullable=True), - sa.Column('updated_date', sa.DateTime(), nullable=True), - sa.Column('created_by', sa.String(length=50), nullable=True), - sa.Column('updated_by', sa.String(length=50), nullable=True), - sa.ForeignKeyConstraint(['user_id'], ['user.id'], ), - sa.ForeignKeyConstraint(['survey_id'], ['survey.id'], ), - sa.PrimaryKeyConstraint('id') - ) - # ### end Alembic commands ### - - -def downgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.drop_table('email_verification') - # ### end Alembic commands ### diff --git a/met-api/migrations/versions/779d9125c8cb_add_reviewer_type.py b/met-api/migrations/versions/779d9125c8cb_add_reviewer_type.py deleted file mode 100644 index 106420e3d..000000000 --- a/met-api/migrations/versions/779d9125c8cb_add_reviewer_type.py +++ /dev/null @@ -1,32 +0,0 @@ -"""add_reviewer_type - -Revision ID: 779d9125c8cb -Revises: 8def759e43d9 -Create Date: 2023-06-12 16:26:29.300751 - -""" -from alembic import op -import sqlalchemy as sa -from sqlalchemy.dialects import postgresql - -# revision identifiers, used by Alembic. -revision = '779d9125c8cb' -down_revision = '8def759e43d9' -branch_labels = None -depends_on = None - - -def upgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.alter_column('membership', 'type', type_=sa.Text()) - op.execute("ALTER TYPE membershiptype ADD VALUE IF NOT EXISTS 'REVIEWER'") - op.alter_column('membership', 'type', type_=sa.Enum('TEAM_MEMBER', 'REVIEWER', name='membershiptype'), postgresql_using='type::membershiptype') - # ### end Alembic commands ### - - -def downgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.alter_column('membership', 'type', - existing_type=postgresql.ENUM('TEAM_MEMBER', name='membershiptype'), - nullable=False) - # ### end Alembic commands ### diff --git a/met-api/migrations/versions/7bf7394a517c_feedback_ratingtype.py b/met-api/migrations/versions/7bf7394a517c_feedback_ratingtype.py deleted file mode 100644 index 5062339b4..000000000 --- a/met-api/migrations/versions/7bf7394a517c_feedback_ratingtype.py +++ /dev/null @@ -1,36 +0,0 @@ -"""feedback_ratingtype - -Revision ID: 7bf7394a517c -Revises: 5423fd515e04 -Create Date: 2023-03-24 13:59:02.463795 - -""" -from alembic import op -import sqlalchemy as sa -from sqlalchemy.dialects import postgresql - -# revision identifiers, used by Alembic. -revision = '7bf7394a517c' -down_revision = '5423fd515e04' -branch_labels = None -depends_on = None - - -def upgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.alter_column('feedback', 'rating', type_=sa.Text()) - op.execute('DROP TYPE ratingtype;') - op.execute('CREATE TYPE ratingtype AS ENUM (\'VerySatisfied\', \'Satisfied\', \'Neutral\', \'Unsatisfied\', \'VeryUnsatisfied\', \'NONE\');') - op.alter_column('feedback', 'rating', - type_=sa.Enum('NONE', 'VerySatisfied', 'Satisfied', 'Neutral', 'Unsatisfied', 'VeryUnsatisfied', name='ratingtype'), - postgresql_using='rating::ratingtype', - nullable=True) - # ### end Alembic commands ### - - -def downgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.alter_column('feedback', 'rating', - existing_type=postgresql.ENUM('VerySatisfied', 'Satisfied', 'Neutral', 'Unsatisfied', 'VeryUnsatisfied', name='ratingtype'), - nullable=False) - # ### end Alembic commands ### diff --git a/met-api/migrations/versions/7cb3da03c2a2_.py b/met-api/migrations/versions/7cb3da03c2a2_.py deleted file mode 100644 index 97955bc24..000000000 --- a/met-api/migrations/versions/7cb3da03c2a2_.py +++ /dev/null @@ -1,28 +0,0 @@ -""" - -Revision ID: 7cb3da03c2a2 -Revises: 45f89f245e3d -Create Date: 2023-03-28 09:50:24.225706 - -""" -from alembic import op -import sqlalchemy as sa -from sqlalchemy.dialects import postgresql - -# revision identifiers, used by Alembic. -revision = '7cb3da03c2a2' -down_revision = '45f89f245e3d' -branch_labels = None -depends_on = None - - -def upgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.add_column('widget_map', sa.Column('file_name', sa.Text(), nullable=True)) - # ### end Alembic commands ### - - -def downgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.drop_column('widget_map', 'file_name') - # ### end Alembic commands ### diff --git a/met-api/migrations/versions/7d8897c412de_tenant_model.py b/met-api/migrations/versions/7d8897c412de_tenant_model.py deleted file mode 100644 index ec1000e46..000000000 --- a/met-api/migrations/versions/7d8897c412de_tenant_model.py +++ /dev/null @@ -1,78 +0,0 @@ -"""Tenant model - -Revision ID: 7d8897c412de -Revises: 7cb3da03c2a2 -Create Date: 2023-04-03 16:05:38.516851 - -""" -from datetime import datetime - -from alembic import op -import sqlalchemy as sa -from flask import current_app -from sqlalchemy.dialects import postgresql - -# revision identifiers, used by Alembic. -revision = '7d8897c412de' -down_revision = '7cb3da03c2a2' -branch_labels = None -depends_on = None - - -def upgrade(): - # ### commands auto generated by Alembic - please adjust! ### - tenant_table = op.create_table('tenant', - sa.Column('created_date', sa.DateTime(), nullable=False), - sa.Column('updated_date', sa.DateTime(), nullable=True), - sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), - sa.Column('short_name', sa.String(length=10), nullable=False, comment='A small code for the tenant ie GDX , EAO.'), - sa.Column('name', sa.String(length=50), nullable=True, comment='Full name of the ministry.ie Env Assessment Office'), - sa.Column('description', sa.String(length=100), nullable=True), - sa.Column('created_by', sa.String(length=50), nullable=True), - sa.Column('updated_by', sa.String(length=50), nullable=True), - sa.PrimaryKeyConstraint('id') - ) - default_id = "1" - op.bulk_insert(tenant_table, [ - { - 'short_name': current_app.config.get('DEFAULT_TENANT_SHORT_NAME') , - 'name': current_app.config.get('DEFAULT_TENANT_NAME'), - 'description': current_app.config.get('DEFAULT_TENANT_DESCRIPTION'), - "created_date": datetime.utcnow()} - ]) - - op.add_column('email_verification', sa.Column('tenant_id', sa.Integer(), nullable=True)) - op.create_foreign_key('email_verification_tenant_fk', 'email_verification', 'tenant', ['tenant_id'], ['id']) - - op.add_column('engagement', sa.Column('tenant_id', sa.Integer(), nullable=True ,server_default = default_id)) - op.create_foreign_key('engagement_tenant_fk', 'engagement', 'tenant', ['tenant_id'], ['id']) - - op.add_column('feedback', sa.Column('tenant_id', sa.Integer(), nullable=True)) - op.create_foreign_key('feedback_tenant_fk', 'feedback', 'tenant', ['tenant_id'], ['id']) - - op.add_column('membership', sa.Column('tenant_id', sa.Integer(), nullable=True)) - op.create_foreign_key('membership_tenant_fk', 'membership', 'tenant', ['tenant_id'], ['id']) - - op.add_column('survey', sa.Column('tenant_id', sa.Integer(), nullable=True)) - op.create_foreign_key('survey_tenant_fk', 'survey', 'tenant', ['tenant_id'], ['id']) - - # ### end Alembic commands ### - - - -def downgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.drop_table('tenant') - op.drop_constraint('survey_tenant_fk', 'survey', type_='foreignkey') - op.drop_column('survey', 'tenant_id') - op.drop_constraint('membership_tenant_fk', 'membership', type_='foreignkey') - op.drop_column('membership', 'tenant_id') - op.drop_constraint('feedback_tenant_fk', 'feedback', type_='foreignkey') - op.drop_column('feedback', 'tenant_id') - op.drop_constraint('engagement_tenant_fk', 'engagement', type_='foreignkey') - op.drop_column('engagement', 'tenant_id') - op.drop_constraint('email_verification_tenant_fk', 'email_verification', type_='foreignkey') - - op.drop_table('tenant') - - # ### end Alembic commands ### diff --git a/met-api/migrations/versions/7ebd9ecfccdd_.py b/met-api/migrations/versions/7ebd9ecfccdd_.py deleted file mode 100644 index 1ab60e6e2..000000000 --- a/met-api/migrations/versions/7ebd9ecfccdd_.py +++ /dev/null @@ -1,55 +0,0 @@ -""" Add new document type and template for cac form submission export - -Revision ID: 7ebd9ecfccdd -Revises: 3a88de1e3e7f -Create Date: 2023-09-12 14:58:03.712784 - -""" -from datetime import datetime -from alembic import op -import sqlalchemy as sa -from sqlalchemy.dialects import postgresql - -# revision identifiers, used by Alembic. -revision = '7ebd9ecfccdd' -down_revision = '3a88de1e3e7f' -branch_labels = None -depends_on = None - - -def upgrade(): - # ### commands auto generated by Alembic - please adjust! ### - document_type = sa.Table( - 'generated_document_type', - sa.MetaData(), - sa.Column('id', sa.Integer), - sa.Column('name', sa.String), - sa.Column('description', sa.String), - sa.Column('created_date', sa.DateTime, default=datetime.utcnow) - ) - - document_template = sa.Table( - 'generated_document_template', - sa.MetaData(), - sa.Column('id', sa.Integer), - sa.Column('type_id', sa.Integer), - sa.Column('hash_code', sa.String), - sa.Column('extension', sa.String), - sa.Column('created_date', sa.DateTime, default=datetime.utcnow) - ) - - op.bulk_insert(document_type, [ - {'id': 2, 'name': 'cac_form_sheet', 'description': 'cac form submission export for staff'} - ]) - - op.bulk_insert(document_template, [ - {'id': 2, 'type_id': 2, 'hash_code': None, "extension": "xlsx"} - ]) - # ### end Alembic commands ### - - -def downgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.execute('DELETE FROM document_type WHERE id = 2') - op.execute('DELETE FROM document_template WHERE id = 2') - # ### end Alembic commands ### diff --git a/met-api/migrations/versions/7faee53e6759_alter_submission_add_comment_status.py b/met-api/migrations/versions/7faee53e6759_alter_submission_add_comment_status.py deleted file mode 100644 index babfc2d87..000000000 --- a/met-api/migrations/versions/7faee53e6759_alter_submission_add_comment_status.py +++ /dev/null @@ -1,60 +0,0 @@ -"""alter submission add comment_status - -Revision ID: 7faee53e6759 -Revises: 03ee1815f6a6 -Create Date: 2022-11-08 10:31:39.240317 - -""" -from alembic import op -import sqlalchemy as sa -from sqlalchemy.dialects import postgresql - -# revision identifiers, used by Alembic. -revision = '7faee53e6759' -down_revision = '03ee1815f6a6' -branch_labels = None -depends_on = None - - -def upgrade(): - conn = op.get_bind() - op.drop_constraint('comment_status_id_fkey', 'comment', type_='foreignkey') - op.add_column('submission', sa.Column('reviewed_by', sa.String(length=50), nullable=True)) - op.add_column('submission', sa.Column('review_date', sa.DateTime(), nullable=True)) - op.add_column('submission', sa.Column('comment_status_id', sa.Integer(), nullable=True)) - op.create_foreign_key('submission_comment_status_id_fkey', 'submission', 'comment_status', ['comment_status_id'], ['id'], ondelete='SET NULL') - - conn.execute('UPDATE submission s \ - SET reviewed_by=c.reviewed_by, \ - review_date=c.review_date,\ - comment_status_id=c.status_id\ - FROM comment c \ - WHERE \ - s.comment_status_id is null AND \ - s.id = c.submission_id') - - op.drop_column('comment', 'reviewed_by') - op.drop_column('comment', 'review_date') - op.drop_column('comment', 'status_id') - - -def downgrade(): - conn = op.get_bind() - op.drop_constraint('submission_comment_status_id_fkey', 'submission', type_='foreignkey') - op.add_column('comment', sa.Column('status_id', sa.Integer(), nullable=True)) - op.add_column('comment', sa.Column('review_date', sa.DateTime(), nullable=True)) - op.add_column('comment', sa.Column('reviewed_by', sa.String(length=50), nullable=True)) - op.create_foreign_key('comment_status_id_fkey', 'comment', 'comment_status', ['status_id'], ['id'], ondelete='SET NULL') - - conn.execute('UPDATE comment c \ - SET reviewed_by=s.reviewed_by, \ - review_date=s.review_date,\ - status_id=s.comment_status_id\ - FROM submission s \ - WHERE \ - c.status_id is null AND \ - c.submission_id = s.id') - - op.drop_column('submission', 'comment_status_id') - op.drop_column('submission', 'review_date') - op.drop_column('submission', 'reviewed_by') diff --git a/met-api/migrations/versions/8595172f9d96_add_map_type.py b/met-api/migrations/versions/8595172f9d96_add_map_type.py deleted file mode 100644 index aed843b49..000000000 --- a/met-api/migrations/versions/8595172f9d96_add_map_type.py +++ /dev/null @@ -1,36 +0,0 @@ -"""Add the map widget - -Revision ID: 8595172f9d96 -Revises: 3f11e9145a08 -Create Date: 2023-03-03 13:42:48.413988 - -""" -from alembic import op -import sqlalchemy as sa - -# revision identifiers, used by Alembic. -revision = '8595172f9d96' -down_revision = '3f11e9145a08' -branch_labels = None -depends_on = None - - -def upgrade(): - # ### commands auto generated by Alembic - please adjust! ### - widget_type_table = sa.table('widget_type', - sa.Column('id', sa.Integer), - sa.Column('name', sa.String), - sa.Column('description', sa.String)) - - op.bulk_insert(widget_type_table, [ - {'id': 6, 'name': 'Map', 'description': 'Display a map that shows the location of the project'} - ]) - # ### end Alembic commands ### - - -def downgrade(): - # ### commands auto generated by Alembic - please adjust! ### - conn = op.get_bind() - - conn.execute('DELETE FROM widget_type WHERE id=6') - # ### end Alembic commands ### diff --git a/met-api/migrations/versions/88aba309bc23_add_engagement_slug.py b/met-api/migrations/versions/88aba309bc23_add_engagement_slug.py deleted file mode 100644 index 08028d4de..000000000 --- a/met-api/migrations/versions/88aba309bc23_add_engagement_slug.py +++ /dev/null @@ -1,70 +0,0 @@ -""" Add engagement slug - -Revision ID: 88aba309bc23 -Revises: 779d9125c8cb -Create Date: 2023-06-20 15:35:49.824000 - -""" -from alembic import op -import sqlalchemy as sa -from datetime import datetime -from met_api.services.slug_generation_service import SlugGenerationService - -# revision identifiers, used by Alembic. -revision = '88aba309bc23' -down_revision = '779d9125c8cb' -branch_labels = None -depends_on = None - - -def upgrade(): - # ### commands auto generated by Alembic - please adjust! ### - engagement_slug_table = op.create_table('engagement_slug', - sa.Column('created_date', sa.DateTime(), nullable=False), - sa.Column('updated_date', sa.DateTime(), nullable=True), - sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), - sa.Column('engagement_id', sa.Integer(), nullable=False), - sa.Column('slug', sa.String(length=200), nullable=False), - sa.Column('created_by', sa.String(length=50), nullable=True), - sa.Column('updated_by', sa.String(length=50), nullable=True), - sa.ForeignKeyConstraint(['engagement_id'], ['engagement.id'], ondelete='CASCADE'), - sa.PrimaryKeyConstraint('id'), - sa.UniqueConstraint('engagement_id'), - sa.UniqueConstraint('slug') - ) - op.create_index('idx_slug', 'engagement_slug', ['slug'], unique=False) - - # Retrieve engagement names that don't have slugs - connection = op.get_bind() - result = connection.execute(""" - SELECT id, name - FROM engagement - """) - - engagements_without_slugs = result.fetchall() - - # Generate unique slugs for each engagement - slugify = SlugGenerationService.create_custom_unique_slugify() - slug_data = [] - for engagement in engagements_without_slugs: - engagement_id = engagement[0] - engagement_name = engagement[1] - slug = slugify(engagement_name) - slug_data.append({"engagement_id": engagement_id, "slug": slug, "created_date": datetime.utcnow()}) - - # Bulk insert the generated slugs into the engagement_slug table - if slug_data: - op.bulk_insert( - engagement_slug_table, - slug_data - ) - - op.execute("COMMIT") - # ### end Alembic commands ### - - -def downgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.drop_index('idx_slug', table_name='engagement_slug') - op.drop_table('engagement_slug') - # ### end Alembic commands ### diff --git a/met-api/migrations/versions/8ab640f6e60d_alter_user_table_add_external_id.py b/met-api/migrations/versions/8ab640f6e60d_alter_user_table_add_external_id.py deleted file mode 100644 index f6c6fb2d3..000000000 --- a/met-api/migrations/versions/8ab640f6e60d_alter_user_table_add_external_id.py +++ /dev/null @@ -1,32 +0,0 @@ -"""Alter user table add external_id - -Revision ID: 8ab640f6e60d -Revises: eef9cc71cca7 -Create Date: 2022-06-16 12:32:10.933832 - -""" -from alembic import op -import sqlalchemy as sa - - -# revision identifiers, used by Alembic. -revision = '8ab640f6e60d' -down_revision = 'eef9cc71cca7' -branch_labels = None -depends_on = None - - -def upgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.add_column('user', sa.Column('external_id', sa.String(length=50), nullable=True)) - op.execute('UPDATE "user" SET external_id = id WHERE external_id IS NULL') - op.alter_column('user', 'external_id', nullable=False) - op.create_unique_constraint(None, 'user', ['external_id']) - # ### end Alembic commands ### - - -def downgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.drop_constraint(None, 'user', type_='unique') - op.drop_column('user', 'external_id') - # ### end Alembic commands ### diff --git a/met-api/migrations/versions/8ca063aafc01_create_comment_table.py b/met-api/migrations/versions/8ca063aafc01_create_comment_table.py deleted file mode 100644 index f287ec2e4..000000000 --- a/met-api/migrations/versions/8ca063aafc01_create_comment_table.py +++ /dev/null @@ -1,56 +0,0 @@ -"""Create comment table - -Revision ID: 8ca063aafc01 -Revises: 6d3c33a79c5e -Create Date: 2022-07-28 15:13:35.368696 - -""" -from alembic import op -import sqlalchemy as sa -from datetime import datetime - - -# revision identifiers, used by Alembic. -revision = '8ca063aafc01' -down_revision = '6d3c33a79c5e' -branch_labels = None -depends_on = None - - -def upgrade(): - # ### commands auto generated by Alembic - please adjust! ### - comment_status_table = op.create_table('comment_status', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('status_name', sa.String(length=50), unique= True, nullable=False), - sa.Column('description', sa.String(length=50), nullable=True), - sa.Column('created_date', sa.DateTime(), nullable=True), - sa.Column('updated_date', sa.DateTime(), nullable=True), - sa.PrimaryKeyConstraint('id') - ) - op.create_table('comment', - sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), - sa.Column('text', sa.Text(), nullable=False), - sa.Column('submission_date', sa.DateTime(), nullable=True), - sa.Column('reviewed_by', sa.String(length=50), nullable=True), - sa.Column('review_date', sa.DateTime(), nullable=True), - sa.Column('status_id', sa.Integer(), nullable=True), - sa.Column('survey_id', sa.Integer(), nullable=False), - sa.Column('user_id', sa.Integer(), nullable=True), - sa.ForeignKeyConstraint(['status_id'], ['comment_status.id'], ondelete='SET NULL'), - sa.ForeignKeyConstraint(['survey_id'], ['survey.id'], ondelete='CASCADE'), - sa.ForeignKeyConstraint(['user_id'], ['user.id'], ondelete='SET NULL'), - sa.PrimaryKeyConstraint('id') - ) - op.bulk_insert(comment_status_table, [ - {'id': 1, 'status_name': 'Pending', 'description': 'Comment is pending review', 'created_date': datetime.utcnow(), 'updated_date': datetime.utcnow()}, - {'id': 2, 'status_name': 'Accepted', 'description': 'Comment is accepted for public view', 'created_date': datetime.utcnow(), 'updated_date': datetime.utcnow()}, - {'id': 3, 'status_name': 'Rejected', 'description': 'Comment is rejected and not shown', 'created_date': datetime.utcnow(), 'updated_date': datetime.utcnow()}, - ]) - # ### end Alembic commands ### - - -def downgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.drop_table('comment') - op.drop_table('comment_status') - # ### end Alembic commands ### diff --git a/met-api/migrations/versions/8def759e43d9_add_participant_table.py b/met-api/migrations/versions/8def759e43d9_add_participant_table.py deleted file mode 100644 index 0edd9a209..000000000 --- a/met-api/migrations/versions/8def759e43d9_add_participant_table.py +++ /dev/null @@ -1,93 +0,0 @@ -"""Add participant table - -Revision ID: 8def759e43d9 -Revises: 36c315ec5801 -Create Date: 2023-06-01 15:48:05.428175 - -""" -from alembic import op -import sqlalchemy as sa -from sqlalchemy.dialects import postgresql -import sqlalchemy_utils - -# revision identifiers, used by Alembic. -revision = '8def759e43d9' -down_revision = '36c315ec5801' -branch_labels = None -depends_on = None - - -def upgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.create_table('participant', - sa.Column('created_date', sa.DateTime(), nullable=False), - sa.Column('updated_date', sa.DateTime(), nullable=True), - sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), - sa.Column('created_by', sa.String(length=50), nullable=True), - sa.Column('updated_by', sa.String(length=50), nullable=True), - sa.Column('email_address', sa.String(length=500), nullable=True), - sa.PrimaryKeyConstraint('id') - ) - - op.execute('INSERT INTO participant (id, email_address, created_date, updated_date, created_by, updated_by) \ - SELECT id, email_address, created_date, updated_date, created_by, updated_by FROM met_users;') - - op.create_index(op.f('ix_participant_email_address'), 'participant', ['email_address'], unique=False) - op.add_column('comment', sa.Column('participant_id', sa.Integer(), nullable=True)) - op.create_foreign_key('comment_participant_id_fkey', 'comment', 'participant', ['participant_id'], ['id'], ondelete='SET NULL') - op.drop_constraint('comment_user_id_fkey', 'comment', type_='foreignkey') - op.execute('UPDATE comment SET participant_id = user_id') - op.drop_column('comment', 'user_id') - - op.add_column('email_verification', sa.Column('participant_id', sa.Integer(), nullable=True)) - op.create_foreign_key('email_verification_participant_id_fkey', 'email_verification', 'participant', ['participant_id'], ['id']) - op.drop_constraint('email_verification_user_id_fkey', 'email_verification', type_='foreignkey') - op.execute('UPDATE email_verification SET participant_id = user_id') - op.drop_column('email_verification', 'user_id') - - op.add_column('submission', sa.Column('participant_id', sa.Integer(), nullable=True)) - op.drop_constraint('submission_user_id_fkey', 'submission', type_='foreignkey') - op.create_foreign_key('submission_participant_id_fkey', 'submission', 'participant', ['participant_id'], ['id']) - op.execute('UPDATE submission SET participant_id = user_id') - op.drop_column('submission', 'user_id') - - op.add_column('subscription', sa.Column('participant_id', sa.Integer(), nullable=True)) - op.drop_constraint('subscription_user_id_fkey', 'subscription', type_='foreignkey') - op.create_foreign_key('subscription_participant_id_fkey', 'subscription', 'participant', ['participant_id'], ['id']) - op.execute('UPDATE subscription SET participant_id = user_id') - op.drop_column('subscription', 'user_id') - - op.execute('SELECT setval(\'participant_id_seq\', (SELECT MAX(id) + 1 FROM participant), true);') - op.drop_table('met_users') - - -def downgrade(): - op.create_table('met_users', - sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False), - sa.Column('created_date', postgresql.TIMESTAMP(), autoincrement=False, nullable=False), - sa.Column('updated_date', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), - sa.Column('created_by', sa.VARCHAR(length=50), autoincrement=False, nullable=True), - sa.Column('updated_by', sa.VARCHAR(length=50), autoincrement=False, nullable=True), - sa.Column('tenant_id', sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column('email_address', sa.VARCHAR(), autoincrement=False, nullable=True), - sa.ForeignKeyConstraint(['tenant_id'], ['tenant.id'], name='met_users_tenant_fk'), - sa.PrimaryKeyConstraint('id', name='user_pkey') - ) - - op.execute('INSERT INTO met_users (id, email_address, created_date, updated_date, created_by, updated_by) \ - SELECT id, email_address, created_date, updated_date, created_by, updated_by FROM participant;') - - op.add_column('submission', sa.Column('user_id', sa.INTEGER(), autoincrement=False, nullable=True)) - op.drop_constraint('submission_participant_id_fkey', 'submission', type_='foreignkey') - op.create_foreign_key('submission_user_id_fkey', 'submission', 'met_users', ['user_id'], ['id']) - op.drop_column('submission', 'participant_id') - op.add_column('email_verification', sa.Column('user_id', sa.INTEGER(), autoincrement=False, nullable=True)) - op.drop_constraint('email_verification_participant_id_fkey', 'email_verification', type_='foreignkey') - op.create_foreign_key('email_verification_user_id_fkey', 'email_verification', 'met_users', ['user_id'], ['id']) - op.drop_column('email_verification', 'participant_id') - op.add_column('comment', sa.Column('user_id', sa.INTEGER(), autoincrement=False, nullable=True)) - op.drop_constraint('comment_participant_id_fkey', 'comment', type_='foreignkey') - op.create_foreign_key('comment_user_id_fkey', 'comment', 'met_users', ['user_id'], ['id'], ondelete='SET NULL') - op.drop_column('comment', 'participant_id') - op.drop_index(op.f('ix_participant_email_address'), table_name='participant') - op.drop_table('participant') diff --git a/met-api/migrations/versions/904c1ebca3e3_merge_heads.py b/met-api/migrations/versions/904c1ebca3e3_merge_heads.py deleted file mode 100644 index f037d964b..000000000 --- a/met-api/migrations/versions/904c1ebca3e3_merge_heads.py +++ /dev/null @@ -1,24 +0,0 @@ -"""merge heads - -Revision ID: 904c1ebca3e3 -Revises: e69d7ac92afb, fda10461892d -Create Date: 2022-12-22 15:13:49.045237 - -""" -from alembic import op -import sqlalchemy as sa - - -# revision identifiers, used by Alembic. -revision = '904c1ebca3e3' -down_revision = ('e69d7ac92afb', 'fda10461892d') -branch_labels = None -depends_on = None - - -def upgrade(): - pass - - -def downgrade(): - pass diff --git a/met-api/migrations/versions/9536f547cdd5_.py b/met-api/migrations/versions/9536f547cdd5_.py deleted file mode 100644 index 4208fae8e..000000000 --- a/met-api/migrations/versions/9536f547cdd5_.py +++ /dev/null @@ -1,30 +0,0 @@ -"""empty message - -Revision ID: 9536f547cdd5 -Revises: f2b3f08c8d60 -Create Date: 2022-11-09 09:07:27.443401 - -""" -from alembic import op -import sqlalchemy as sa - - -# revision identifiers, used by Alembic. -revision = '9536f547cdd5' -down_revision = 'f2b3f08c8d60' -branch_labels = None -depends_on = None - - -def upgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.add_column('widget_item', sa.Column('sort_index', sa.Integer(), nullable=True, default=1)) - op.execute('UPDATE widget_item SET sort_index = 1') - op.alter_column('widget_item', 'sort_index', nullable=False) - # ### end Alembic commands ### - - -def downgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.drop_column('widget_item', 'sort_index') - # ### end Alembic commands ### diff --git a/met-api/migrations/versions/9714d7f8d7cc_size_increase.py b/met-api/migrations/versions/9714d7f8d7cc_size_increase.py deleted file mode 100644 index 9b6b977eb..000000000 --- a/met-api/migrations/versions/9714d7f8d7cc_size_increase.py +++ /dev/null @@ -1,30 +0,0 @@ -"""size increase - -Revision ID: 9714d7f8d7cc -Revises: 1c5883959156 -Create Date: 2023-09-06 11:43:37.142580 - -""" -from alembic import op -import sqlalchemy as sa -from sqlalchemy.dialects import postgresql - -# revision identifiers, used by Alembic. -revision = '9714d7f8d7cc' -down_revision = '1c5883959156' -branch_labels = None -depends_on = None - - -def upgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.alter_column('report_setting', 'question_key', type_=sa.String(length=250)) - op.alter_column('report_setting', 'question_type', type_=sa.String(length=250)) - op.alter_column('report_setting', 'question', type_=sa.String(length=250)) - # ### end Alembic commands ### - - -def downgrade(): - # ### commands auto generated by Alembic - please adjust! ### - pass - # ### end Alembic commands ### diff --git a/met-api/migrations/versions/9f86fdcfb248_add_widget_type_document.py b/met-api/migrations/versions/9f86fdcfb248_add_widget_type_document.py deleted file mode 100644 index e7ba153b6..000000000 --- a/met-api/migrations/versions/9f86fdcfb248_add_widget_type_document.py +++ /dev/null @@ -1,33 +0,0 @@ -"""add_widget_type_document - -Revision ID: 9f86fdcfb248 -Revises: 7faee53e6759 -Create Date: 2022-11-16 13:58:32.284108 - -""" -from alembic import op -import sqlalchemy as sa - - -# revision identifiers, used by Alembic. -revision = '9f86fdcfb248' -down_revision = '7faee53e6759' -branch_labels = None -depends_on = None - - -def upgrade(): - widget_type_table = sa.table('widget_type', - sa.Column('id', sa.Integer), - sa.Column('name', sa.String), - sa.Column('description', sa.String)) - - op.bulk_insert(widget_type_table, [ - {'id': 2, 'name': 'Documents', 'description': 'Displays important documents on the engagement'} - ]) - - -def downgrade(): - conn = op.get_bind() - - conn.execute('DELETE FROM widget_type WHERE id=2') \ No newline at end of file diff --git a/met-api/migrations/versions/a1237c8a3df9_.py b/met-api/migrations/versions/a1237c8a3df9_.py deleted file mode 100644 index ac044912d..000000000 --- a/met-api/migrations/versions/a1237c8a3df9_.py +++ /dev/null @@ -1,43 +0,0 @@ -""" - -Revision ID: a1237c8a3df9 -Revises: 3f11e9145a08 -Create Date: 2023-03-07 12:19:32.133932 - -""" -from alembic import op -import sqlalchemy as sa -from sqlalchemy.dialects import postgresql -from datetime import datetime - -# revision identifiers, used by Alembic. -revision = 'a1237c8a3df9' -down_revision = '3f11e9145a08' -branch_labels = None -depends_on = None - - -def upgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.create_table('widget_map', - sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), - sa.Column('description', sa.String(length=500), nullable=True), - sa.Column('longitude', sa.Float(), nullable=False), - sa.Column('latitude', sa.Float(), nullable=False), - sa.Column('engagement_id', sa.Integer(), nullable=True), - sa.Column('widget_id', sa.Integer(), nullable=True), - sa.Column('created_date', sa.DateTime(), nullable=False), - sa.Column('updated_date', sa.DateTime(), nullable=True), - sa.Column('created_by', sa.String(length=50), nullable=True), - sa.Column('updated_by', sa.String(length=50), nullable=True), - sa.ForeignKeyConstraint(['widget_id'], ['widget.id'], ondelete='CASCADE'), - sa.ForeignKeyConstraint(['engagement_id'], ['engagement.id'], ondelete='CASCADE'), - sa.PrimaryKeyConstraint('id') - ) - - - -def downgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.drop_table('widget_map') - # ### end Alembic commands ### diff --git a/met-api/migrations/versions/a2d20b31e275_.py b/met-api/migrations/versions/a2d20b31e275_.py deleted file mode 100644 index f0dc2d971..000000000 --- a/met-api/migrations/versions/a2d20b31e275_.py +++ /dev/null @@ -1,78 +0,0 @@ -"""empty message - -Revision ID: a2d20b31e275 -Revises: -Create Date: 2022-05-06 11:21:08.457737 - -""" -from alembic import op -import sqlalchemy as sa -from datetime import datetime - -# revision identifiers, used by Alembic. -revision = 'a2d20b31e275' -down_revision = None -branch_labels = None -depends_on = None - - -def upgrade(): - # ### commands auto generated by Alembic - please adjust! ### - user_table = op.create_table('user', - sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), - sa.Column('first_name', sa.String(length=50), nullable=True), - sa.Column('middle_name', sa.String(length=50), nullable=True), - sa.Column('last_name', sa.String(length=50), nullable=True), - sa.Column('email_id', sa.String(length=50), nullable=True), - sa.Column('contact_number', sa.String(length=50), nullable=True), - sa.Column('created_date', sa.DateTime(), nullable=True), - sa.Column('updated_date', sa.DateTime(), nullable=True), - sa.PrimaryKeyConstraint('id') - ) - - op.create_table('engagement', - sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), - sa.Column('name', sa.String(length=50), nullable=True), - sa.Column('description', sa.String(), nullable=True), - sa.Column('start_date', sa.DateTime(), nullable=True), - sa.Column('end_date', sa.DateTime(), nullable=True), - sa.Column('status_id', sa.Integer(), nullable=True), - sa.Column('created_by', sa.String(length=50), nullable=True), - sa.Column('created_date', sa.DateTime(), nullable=True), - sa.Column('updated_date', sa.DateTime(), nullable=True), - sa.Column('published_date', sa.DateTime(), nullable=True), - sa.PrimaryKeyConstraint('id') - ) - - engagement_status_table = op.create_table('engagement_status', - sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), - sa.Column('status_name', sa.String(length=50), nullable=True), - sa.Column('description', sa.String(length=50), nullable=True), - sa.Column('created_date', sa.DateTime(), nullable=True), - sa.Column('updated_date', sa.DateTime(), nullable=True), - sa.PrimaryKeyConstraint('id') - ) - - conn = op.get_bind() - - op.bulk_insert(engagement_status_table, [ - {'id': 1, 'status_name': 'draft', 'description': 'Test Description', 'created_date': datetime.utcnow(), 'updated_date': datetime.utcnow()} - ]) - - conn.execute('SELECT setval(\'engagement_status_id_seq\', 1);') - - op.bulk_insert(user_table, [ - {'id': 1, 'first_name': 'A', 'middle_name': 'B', 'last_name': 'C', 'email_id': 1, 'contact_number': 1, 'created_date': datetime.utcnow(), 'updated_date': datetime.utcnow()} - ]) - - conn.execute('SELECT setval(\'user_id_seq\', 1);') - - # ### end Alembic commands ### - - -def downgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.drop_table('engagement_status') - op.drop_table('engagement') - op.drop_table('user') - # ### end Alembic commands ### diff --git a/met-api/migrations/versions/ac4a505ed1e3_add_feedback_source.py b/met-api/migrations/versions/ac4a505ed1e3_add_feedback_source.py deleted file mode 100644 index b2a8a2ef5..000000000 --- a/met-api/migrations/versions/ac4a505ed1e3_add_feedback_source.py +++ /dev/null @@ -1,30 +0,0 @@ -"""add feedback source - -Revision ID: ac4a505ed1e3 -Revises: 224b70277ac4 -Create Date: 2022-10-05 13:36:41.944071 - -""" -from alembic import op -import sqlalchemy as sa - - -# revision identifiers, used by Alembic. -revision = 'ac4a505ed1e3' -down_revision = '224b70277ac4' -branch_labels = None -depends_on = None - - -def upgrade(): - # ### commands auto generated by Alembic - please adjust! ### - source = sa.Enum('Public', 'Internal', name='feedbacksourcetype') - source.create(op.get_bind()) - op.add_column('feedback', sa.Column('source', sa.Enum('Public', 'Internal', name='feedbacksourcetype'), nullable=True)) - # ### end Alembic commands ### - - -def downgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.drop_column('feedback', 'source') - # ### end Alembic commands ### diff --git a/met-api/migrations/versions/b1196306955f_tracking_id_for_metadata.py b/met-api/migrations/versions/b1196306955f_tracking_id_for_metadata.py deleted file mode 100644 index 6bf2285f1..000000000 --- a/met-api/migrations/versions/b1196306955f_tracking_id_for_metadata.py +++ /dev/null @@ -1,28 +0,0 @@ -"""tracking id for metadata - -Revision ID: b1196306955f -Revises: 7ebd9ecfccdd -Create Date: 2023-09-17 22:11:54.456358 - -""" -from alembic import op -import sqlalchemy as sa -from sqlalchemy.dialects import postgresql - -# revision identifiers, used by Alembic. -revision = 'b1196306955f' -down_revision = '7ebd9ecfccdd' -branch_labels = None -depends_on = None - - -def upgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.add_column('engagement_metadata', sa.Column('project_tracking_id', sa.String(length=100), nullable=True)) - # ### end Alembic commands ### - - -def downgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.drop_column('engagement_metadata', 'project_tracking_id') - # ### end Alembic commands ### diff --git a/met-api/migrations/versions/b3b5c66cea4b_report_setting.py b/met-api/migrations/versions/b3b5c66cea4b_report_setting.py deleted file mode 100644 index dc58967cc..000000000 --- a/met-api/migrations/versions/b3b5c66cea4b_report_setting.py +++ /dev/null @@ -1,45 +0,0 @@ -"""report_setting - -Revision ID: b3b5c66cea4b -Revises: 88aba309bc23 -Create Date: 2023-06-28 12:05:22.187795 - -""" -from alembic import op -import sqlalchemy as sa - -# revision identifiers, used by Alembic. -revision = 'b3b5c66cea4b' -down_revision = '88aba309bc23' -branch_labels = None -depends_on = None - - -def upgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.create_table('report_setting', - sa.Column('created_date', sa.DateTime(), nullable=False), - sa.Column('updated_date', sa.DateTime(), nullable=True), - sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), - sa.Column('survey_id', sa.Integer(), nullable=False), - sa.Column('question_id', sa.String(length=250), nullable=True), - sa.Column('question_key', sa.String(length=100), nullable=True), - sa.Column('question_type', sa.String(length=100), nullable=True), - sa.Column('question', sa.String(length=200), nullable=True), - sa.Column('display', sa.Boolean(), nullable=True, - comment='Flag to identify if the question needs to be diplayed on the dashboard.'), - sa.Column('created_by', sa.String(length=50), nullable=True), - sa.Column('updated_by', sa.String(length=50), nullable=True), - sa.ForeignKeyConstraint(['survey_id'], ['survey.id'], ondelete='CASCADE'), - sa.PrimaryKeyConstraint('id') - ) - op.add_column('survey', sa.Column('generate_dashboard', sa.Boolean(), nullable=True)) - op.execute('UPDATE survey SET generate_dashboard = True') - # ### end Alembic commands ### - - -def downgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.drop_column('survey', 'generate_dashboard') - op.drop_table('report_setting') - # ### end Alembic commands ### diff --git a/met-api/migrations/versions/ba02399c821c_add_closed_status.py b/met-api/migrations/versions/ba02399c821c_add_closed_status.py deleted file mode 100644 index fd4ba1440..000000000 --- a/met-api/migrations/versions/ba02399c821c_add_closed_status.py +++ /dev/null @@ -1,41 +0,0 @@ -"""update status and user - -Revision ID: ec504565fab3 -Revises: d0f92ae9ba77 -Create Date: 2022-06-29 13:34:24.214735 - -""" -from alembic import op -import sqlalchemy as sa -from datetime import datetime - - -# revision identifiers, used by Alembic. -revision = 'ba02399c821c' -down_revision = 'cb965cb4a3ad' -branch_labels = None -depends_on = None - - -def upgrade(): - conn = op.get_bind() - - engagement_status = sa.table('engagement_status', - sa.column('id', sa.Integer), - sa.column('status_name', sa.String), - sa.column('description', sa.String), - sa.column('created_date', sa.DateTime), - sa.column('updated_date', sa.DateTime)) - - op.bulk_insert(engagement_status, [ - {'id': 3, 'status_name': 'Closed', 'description': 'The engagement period is over', 'created_date': datetime.utcnow(), 'updated_date': datetime.utcnow()} - ]) - - conn.execute('SELECT setval(\'engagement_status_id_seq\', 3);') - - -def downgrade(): - conn = op.get_bind() - - conn.execute('DELETE FROM engagement_status WHERE id=3') - conn.execute('SELECT setval(\'engagement_status_id_seq\', 2);') diff --git a/met-api/migrations/versions/bd0eb0d25caf_adding_engagement_consent_message.py b/met-api/migrations/versions/bd0eb0d25caf_adding_engagement_consent_message.py deleted file mode 100644 index 2d2ce9577..000000000 --- a/met-api/migrations/versions/bd0eb0d25caf_adding_engagement_consent_message.py +++ /dev/null @@ -1,28 +0,0 @@ -"""adding_engagement_consent_message - -Revision ID: bd0eb0d25caf -Revises: 4114001e1a4c -Create Date: 2024-01-10 12:21:32.781720 - -""" -from alembic import op -import sqlalchemy as sa -from sqlalchemy.dialects import postgresql - -# revision identifiers, used by Alembic. -revision = 'bd0eb0d25caf' -down_revision = '4114001e1a4c' -branch_labels = None -depends_on = None - - -def upgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.add_column('engagement', sa.Column('consent_message', postgresql.JSON(astext_type=sa.Text()), nullable=True)) - # ### end Alembic commands ### - - -def downgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.drop_column('engagement', 'consent_message') - # ### end Alembic commands ### diff --git a/met-api/migrations/versions/be3880132244_merge_heads.py b/met-api/migrations/versions/be3880132244_merge_heads.py deleted file mode 100644 index 823bc4afd..000000000 --- a/met-api/migrations/versions/be3880132244_merge_heads.py +++ /dev/null @@ -1,24 +0,0 @@ -"""merge heads - -Revision ID: be3880132244 -Revises: 8595172f9d96, a1237c8a3df9 -Create Date: 2023-03-08 15:04:38.905600 - -""" -from alembic import op -import sqlalchemy as sa - - -# revision identifiers, used by Alembic. -revision = 'be3880132244' -down_revision = ('8595172f9d96', 'a1237c8a3df9') -branch_labels = None -depends_on = None - - -def upgrade(): - pass - - -def downgrade(): - pass diff --git a/met-api/migrations/versions/c09e77fde608_added_enum_value_for_timeline_widget.py b/met-api/migrations/versions/c09e77fde608_added_enum_value_for_timeline_widget.py deleted file mode 100644 index 0d55ae41d..000000000 --- a/met-api/migrations/versions/c09e77fde608_added_enum_value_for_timeline_widget.py +++ /dev/null @@ -1,61 +0,0 @@ -"""Added enum value for Timeline Widget. - -Revision ID: c09e77fde608 -Revises: 3e4dc76a96ab -Create Date: 2023-12-06 11:46:20.934373 - -""" -from alembic import op -import sqlalchemy as sa -from sqlalchemy.dialects import postgresql - -# revision identifiers, used by Alembic. -revision = 'c09e77fde608' -down_revision = '3e4dc76a96ab' -branch_labels = None -depends_on = None - - -def upgrade(): - op.add_column('timeline_event', sa.Column('created_date', sa.DateTime(), nullable=False)) - op.add_column('timeline_event', sa.Column('updated_date', sa.DateTime(), nullable=True)) - op.add_column('timeline_event', sa.Column('created_by', sa.String(length=50), nullable=True)) - op.add_column('timeline_event', sa.Column('updated_by', sa.String(length=50), nullable=True)) - - op.add_column('widget_timeline', sa.Column('created_date', sa.DateTime(), nullable=False)) - op.add_column('widget_timeline', sa.Column('updated_date', sa.DateTime(), nullable=True)) - op.add_column('widget_timeline', sa.Column('created_by', sa.String(length=50), nullable=True)) - op.add_column('widget_timeline', sa.Column('updated_by', sa.String(length=50), nullable=True)) - - widget_type_table = sa.table('widget_type', - sa.Column('id', sa.Integer), - sa.Column('name', sa.String), - sa.Column('description', sa.String)) - - op.bulk_insert( - widget_type_table, - [ - { - 'id': 8, - 'name': 'CAC Form', - 'description': 'Add a CAC Form to your project', - }, - { - 'id': 9, - 'name': 'Timeline', - 'description': 'Create a timeline for a series of events', - }, - ] - ) - -def downgrade(): - op.drop_column('widget_timeline', 'updated_by') - op.drop_column('widget_timeline', 'created_by') - op.drop_column('widget_timeline', 'updated_date') - op.drop_column('widget_timeline', 'created_date') - op.drop_column('timeline_event', 'updated_by') - op.drop_column('timeline_event', 'created_by') - op.drop_column('timeline_event', 'updated_date') - op.drop_column('timeline_event', 'created_date') - op.delete(widget_type_table).filter_by(id=8) - op.delete(widget_type_table).filter_by(id=9) diff --git a/met-api/migrations/versions/c19bc1af9f2b_alter_engagement_add_scheduled.py b/met-api/migrations/versions/c19bc1af9f2b_alter_engagement_add_scheduled.py deleted file mode 100644 index 25b01d743..000000000 --- a/met-api/migrations/versions/c19bc1af9f2b_alter_engagement_add_scheduled.py +++ /dev/null @@ -1,38 +0,0 @@ -"""alter engagement add scheduled - -Revision ID: c19bc1af9f2b -Revises: 2aa7554dde59 -Create Date: 2022-10-21 11:43:06.798995 - -""" -from datetime import datetime -from alembic import op -import sqlalchemy as sa - - -# revision identifiers, used by Alembic. -revision = 'c19bc1af9f2b' -down_revision = '2aa7554dde59' -branch_labels = None -depends_on = None - - -def upgrade(): - op.add_column('engagement', sa.Column('scheduled_date', sa.DateTime(), nullable=True)) - - engagement_status = sa.table('engagement_status', - sa.column('id', sa.Integer), - sa.column('status_name', sa.String), - sa.column('description', sa.String), - sa.column('created_date', sa.DateTime), - sa.column('updated_date', sa.DateTime)) - - op.bulk_insert(engagement_status, [ - {'id': 4, 'status_name': 'Scheduled', 'description': 'Scheduled to be published', 'created_date': datetime.utcnow(), 'updated_date': datetime.utcnow()} - ]) - - -def downgrade(): - conn = op.get_bind() - op.drop_column('engagement', 'scheduled_date') - conn.execute('DELETE FROM engagement_status WHERE id=4') diff --git a/met-api/migrations/versions/cad222167ce7_memberships_added.py b/met-api/migrations/versions/cad222167ce7_memberships_added.py deleted file mode 100644 index 174e7c020..000000000 --- a/met-api/migrations/versions/cad222167ce7_memberships_added.py +++ /dev/null @@ -1,58 +0,0 @@ -"""memberships added - -Revision ID: cad222167ce7 -Revises: 5880bead8f03 -Create Date: 2023-01-24 14:31:48.024605 - -""" -from datetime import datetime - -from alembic import op -import sqlalchemy as sa -from sqlalchemy.dialects import postgresql - -# revision identifiers, used by Alembic. -revision = 'cad222167ce7' -down_revision = '5880bead8f03' -branch_labels = None -depends_on = None - - -def upgrade(): - # ### commands auto generated by Alembic - please adjust! ### - membership_status_codes = op.create_table('membership_status_codes', - sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), - sa.Column('status_name', sa.String(length=50), nullable=True), - sa.Column('description', sa.String(length=50), nullable=True), - sa.PrimaryKeyConstraint('id') - ) - op.create_table('membership', - sa.Column('created_date', sa.DateTime(), nullable=False), - sa.Column('updated_date', sa.DateTime(), nullable=True), - sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), - sa.Column('status', sa.Integer(), nullable=True), - sa.Column('engagement_id', sa.Integer(), nullable=False), - sa.Column('user_id', sa.Integer(), nullable=True), - sa.Column('type', sa.Enum('TEAM_MEMBER', name='membershiptype'), nullable=False), - sa.Column('created_by', sa.String(length=50), nullable=True), - sa.Column('updated_by', sa.String(length=50), nullable=True), - sa.ForeignKeyConstraint(['engagement_id'], ['engagement.id'], ondelete='CASCADE'), - sa.ForeignKeyConstraint(['status'], ['membership_status_codes.id'], ), - sa.ForeignKeyConstraint(['user_id'], ['met_users.id'], ), - sa.PrimaryKeyConstraint('id') - ) - op.bulk_insert(membership_status_codes, [ - {'id': 1, 'status_name': 'ACTIVE', 'description': 'Active Membership', 'created_date': datetime.utcnow(), - 'updated_date': datetime.utcnow()}, - {'id': 2, 'status_name': 'INACTIVE', 'description': 'Inactive Membership', 'created_date': datetime.utcnow(), - 'updated_date': datetime.utcnow()} - ]) - # ### end Alembic commands ### - - -def downgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.drop_table('membership') - op.drop_table('membership_status_codes') - op.execute("""DROP TYPE membershiptype""") - # ### end Alembic commands ### diff --git a/met-api/migrations/versions/cb965cb4a3ad_update_comment_status.py b/met-api/migrations/versions/cb965cb4a3ad_update_comment_status.py deleted file mode 100644 index e19d13b12..000000000 --- a/met-api/migrations/versions/cb965cb4a3ad_update_comment_status.py +++ /dev/null @@ -1,27 +0,0 @@ -"""update comment_status status_name column - -Revision ID: cb965cb4a3ad -Revises: 8ca063aafc01 -Create Date: 2022-08-08 14:48:08.856309 - -""" -from alembic import op - - -# revision identifiers, used by Alembic. -revision = 'cb965cb4a3ad' -down_revision = '8ca063aafc01' -branch_labels = None -depends_on = None - - -def upgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.execute('UPDATE comment_status SET status_name = \'Approved\' WHERE id = 2;') - # ### end Alembic commands ### - - -def downgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.execute('UPDATE comment_status SET status_name = \'Accepted\' WHERE id = 2;') - # ### end Alembic commands ### diff --git a/met-api/migrations/versions/d0f92ae9ba77_alter_engagement_alter_banner_url.py b/met-api/migrations/versions/d0f92ae9ba77_alter_engagement_alter_banner_url.py deleted file mode 100644 index 6b4b7523b..000000000 --- a/met-api/migrations/versions/d0f92ae9ba77_alter_engagement_alter_banner_url.py +++ /dev/null @@ -1,30 +0,0 @@ -"""Alter engagement alter banner_url - -Revision ID: d0f92ae9ba77 -Revises: 2545d45bb29c -Create Date: 2022-06-27 13:36:48.323235 - -""" -from alembic import op -import sqlalchemy as sa - - -# revision identifiers, used by Alembic. -revision = 'd0f92ae9ba77' -down_revision = '2545d45bb29c' -branch_labels = None -depends_on = None - - -def upgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.add_column('engagement', sa.Column('banner_filename', sa.String(), nullable=True)) - op.drop_column('engagement', 'banner_url') - # ### end Alembic commands ### - - -def downgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.add_column('engagement', sa.Column('banner_url', sa.VARCHAR(), autoincrement=False, nullable=True)) - op.drop_column('engagement', 'banner_filename') - # ### end Alembic commands ### diff --git a/met-api/migrations/versions/d152f85734f9_subscription_models.py b/met-api/migrations/versions/d152f85734f9_subscription_models.py deleted file mode 100644 index 4716a203b..000000000 --- a/met-api/migrations/versions/d152f85734f9_subscription_models.py +++ /dev/null @@ -1,40 +0,0 @@ -"""subscription_models - -Revision ID: d152f85734f9 -Revises: 196b0abc23b6 -Create Date: 2023-05-31 12:50:57.863157 - -""" -from alembic import op -import sqlalchemy as sa -from sqlalchemy.dialects import postgresql - -# revision identifiers, used by Alembic. -revision = 'd152f85734f9' -down_revision = '196b0abc23b6' -branch_labels = None -depends_on = None - - -def upgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.create_table('subscription', - sa.Column('created_date', sa.DateTime(), nullable=False), - sa.Column('updated_date', sa.DateTime(), nullable=True), - sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), - sa.Column('email_verification_id', sa.Integer(), nullable=True), - sa.Column('user_id', sa.Integer(), nullable=True), - sa.Column('is_subscribed', sa.Boolean(), nullable=False), - sa.Column('created_by', sa.String(length=50), nullable=True), - sa.Column('updated_by', sa.String(length=50), nullable=True), - sa.ForeignKeyConstraint(['email_verification_id'], ['email_verification.id'], ), - sa.ForeignKeyConstraint(['user_id'], ['met_users.id'], ), - sa.PrimaryKeyConstraint('id') - ) - # ### end Alembic commands ### - - -def downgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.drop_table('subscription') - # ### end Alembic commands ### diff --git a/met-api/migrations/versions/d2e7baa531ce_edit_subscription_models.py b/met-api/migrations/versions/d2e7baa531ce_edit_subscription_models.py deleted file mode 100644 index 44c8ac2bd..000000000 --- a/met-api/migrations/versions/d2e7baa531ce_edit_subscription_models.py +++ /dev/null @@ -1,32 +0,0 @@ -"""edit_subscription_models - -Revision ID: d2e7baa531ce -Revises: d152f85734f9 -Create Date: 2023-06-01 21:09:11.767623 - -""" -from alembic import op -import sqlalchemy as sa -from sqlalchemy.dialects import postgresql - -# revision identifiers, used by Alembic. -revision = 'd2e7baa531ce' -down_revision = 'd152f85734f9' -branch_labels = None -depends_on = None - - -def upgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.add_column('subscription', sa.Column('engagement_id', sa.Integer(), nullable=True)) - op.drop_constraint('subscription_email_verification_id_fkey', 'subscription', type_='foreignkey') - op.drop_column('subscription', 'email_verification_id') - # ### end Alembic commands ### - - -def downgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.add_column('subscription', sa.Column('email_verification_id', sa.INTEGER(), autoincrement=False, nullable=True)) - op.create_foreign_key('subscription_email_verification_id_fkey', 'subscription', 'email_verification', ['email_verification_id'], ['id']) - op.drop_column('subscription', 'engagement_id') - # ### end Alembic commands ### diff --git a/met-api/migrations/versions/d5a6d9bb804b_merge_heads.py b/met-api/migrations/versions/d5a6d9bb804b_merge_heads.py deleted file mode 100644 index b029041cf..000000000 --- a/met-api/migrations/versions/d5a6d9bb804b_merge_heads.py +++ /dev/null @@ -1,24 +0,0 @@ -"""merge heads - -Revision ID: d5a6d9bb804b -Revises: 4f5f91937f5c, d822eacf35c9 -Create Date: 2023-09-19 13:12:04.624782 - -""" -from alembic import op -import sqlalchemy as sa - - -# revision identifiers, used by Alembic. -revision = 'd5a6d9bb804b' -down_revision = ('4f5f91937f5c', 'd822eacf35c9') -branch_labels = None -depends_on = None - - -def upgrade(): - pass - - -def downgrade(): - pass diff --git a/met-api/migrations/versions/d822eacf35c9_add_engagement_unpublished_status.py b/met-api/migrations/versions/d822eacf35c9_add_engagement_unpublished_status.py deleted file mode 100644 index 4c8250d32..000000000 --- a/met-api/migrations/versions/d822eacf35c9_add_engagement_unpublished_status.py +++ /dev/null @@ -1,34 +0,0 @@ -""" Add unpublished status to engagement - -Revision ID: d822eacf35c9 -Revises: 7ebd9ecfccdd -Create Date: 2023-09-15 15:34:32.854902 - -""" -from datetime import datetime -from alembic import op -import sqlalchemy as sa - -# revision identifiers, used by Alembic. -revision = 'd822eacf35c9' -down_revision = '7ebd9ecfccdd' -branch_labels = None -depends_on = None - - -def upgrade(): - engagement_status = sa.table('engagement_status', - sa.column('id', sa.Integer), - sa.column('status_name', sa.String), - sa.column('description', sa.String), - sa.column('created_date', sa.DateTime), - sa.column('updated_date', sa.DateTime)) - - op.bulk_insert(engagement_status, [ - {'id': 5, 'status_name': 'Unpublished', 'description': 'Unpublished and hidden', 'created_date': datetime.utcnow(), 'updated_date': datetime.utcnow()} - ]) - - -def downgrade(): - conn = op.get_bind() - conn.execute('DELETE FROM engagement_status WHERE id=5') diff --git a/met-api/migrations/versions/d86a682d7096_merge_heads.py b/met-api/migrations/versions/d86a682d7096_merge_heads.py deleted file mode 100644 index b91273382..000000000 --- a/met-api/migrations/versions/d86a682d7096_merge_heads.py +++ /dev/null @@ -1,24 +0,0 @@ -"""merge heads - -Revision ID: d86a682d7096 -Revises: 242c9f0364df, 4c72047de4d3 -Create Date: 2023-01-12 19:33:57.854477 - -""" -from alembic import op -import sqlalchemy as sa - - -# revision identifiers, used by Alembic. -revision = 'd86a682d7096' -down_revision = ('242c9f0364df', '4c72047de4d3') -branch_labels = None -depends_on = None - - -def upgrade(): - pass - - -def downgrade(): - pass diff --git a/met-api/migrations/versions/d9777850eb98_add_proponent_template.py b/met-api/migrations/versions/d9777850eb98_add_proponent_template.py deleted file mode 100644 index 08e9e0d23..000000000 --- a/met-api/migrations/versions/d9777850eb98_add_proponent_template.py +++ /dev/null @@ -1,56 +0,0 @@ -"""add_proponent_template - -Revision ID: d9777850eb98 -Revises: 7ebd9ecfccdd -Create Date: 2023-09-17 12:47:48.475329 - -""" -from datetime import datetime -from alembic import op -import sqlalchemy as sa -from sqlalchemy.dialects import postgresql - -# revision identifiers, used by Alembic. -revision = 'd9777850eb98' -down_revision = '7ebd9ecfccdd' -branch_labels = None -depends_on = None - - -def upgrade(): - # ### commands auto generated by Alembic - please adjust! ### - document_type = sa.Table( - 'generated_document_type', - sa.MetaData(), - sa.Column('id', sa.Integer), - sa.Column('name', sa.String), - sa.Column('description', sa.String), - sa.Column('created_date', sa.DateTime, default=datetime.utcnow) - ) - - document_template = sa.Table( - 'generated_document_template', - sa.MetaData(), - sa.Column('id', sa.Integer), - sa.Column('type_id', sa.Integer), - sa.Column('hash_code', sa.String), - sa.Column('extension', sa.String), - sa.Column('created_date', sa.DateTime, default=datetime.utcnow) - ) - - op.bulk_insert(document_type, [ - {'id': 3, 'name': 'proponent_comments_sheet', 'description': 'Comments export for proponent'} - ]) - - op.bulk_insert(document_template, [ - {'id': 3, 'type_id': 3, 'hash_code': None, "extension": "xlsx"} - ]) - op.execute('UPDATE generated_document_template SET hash_code = null where id = 1') - # ### end Alembic commands ### - - -def downgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.execute('DELETE FROM document_type WHERE id = 3') - op.execute('DELETE FROM document_template WHERE id = 3') - # ### end Alembic commands ### diff --git a/met-api/migrations/versions/db3ffa0dd6ad_.py b/met-api/migrations/versions/db3ffa0dd6ad_.py deleted file mode 100644 index 5369da924..000000000 --- a/met-api/migrations/versions/db3ffa0dd6ad_.py +++ /dev/null @@ -1,29 +0,0 @@ -"""Add marker label column instead of description -Revision ID: db3ffa0dd6ad -Revises: be3880132244 -Create Date: 2023-03-14 13:14:35.088089 - -""" -from alembic import op -import sqlalchemy as sa -from sqlalchemy.dialects import postgresql - -# revision identifiers, used by Alembic. -revision = 'db3ffa0dd6ad' -down_revision = 'be3880132244' -branch_labels = None -depends_on = None - - -def upgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.add_column('widget_map', sa.Column('marker_label', sa.String(length=30), nullable=True)) - op.drop_column('widget_map', 'description') - # ### end Alembic commands ### - - -def downgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.add_column('widget_map', sa.Column('description', sa.VARCHAR(length=500), autoincrement=False, nullable=True)) - op.drop_column('widget_map', 'marker_label') - # ### end Alembic commands ### diff --git a/met-api/migrations/versions/db737a0db061_.py b/met-api/migrations/versions/db737a0db061_.py deleted file mode 100644 index ec56787f0..000000000 --- a/met-api/migrations/versions/db737a0db061_.py +++ /dev/null @@ -1,43 +0,0 @@ -""" Fill empty widget titles -Revision ID: db737a0db061 -Revises: df73727dc6d9b7_add_sub_tbl -Create Date: 2023-08-04 14:11:01.993136 -""" - -from alembic import op - -# revision identifiers, used by Alembic. -revision = 'db737a0db061' -down_revision = 'df73727dc6d9b7_add_sub_tbl' -branch_labels = None -depends_on = None - -def upgrade(): - # ### commands auto generated by Alembic - please adjust! ### - - # Execute an UPDATE statement to set 'title' based on 'widget_type_id' - op.execute(""" - UPDATE widget - SET title = - CASE - WHEN widget_type_id = 1 THEN 'Who is Listening' - WHEN widget_type_id = 2 THEN 'Documents' - WHEN widget_type_id = 3 THEN 'Environmental Assessment Process' - WHEN widget_type_id = 4 THEN 'Sign Up for Updates' - WHEN widget_type_id = 5 THEN 'Events' - WHEN widget_type_id = 6 THEN 'Map' - WHEN widget_type_id = 7 THEN 'Video' - ELSE 'Default Title' -- This is for any widget_type_id not covered above - END - WHERE title IS NULL; - """) - - # ### end Alembic commands ### - -def downgrade(): - # ### commands auto generated by Alembic - please adjust! ### - - # Execute an UPDATE statement to set 'title' to NULL for all rows - op.execute("UPDATE widget SET title = NULL;") - - # ### end Alembic commands ### diff --git a/met-api/migrations/versions/df73727dc6d9b7_add_sub_tabl.py b/met-api/migrations/versions/df73727dc6d9b7_add_sub_tabl.py deleted file mode 100644 index 5d3d625d3..000000000 --- a/met-api/migrations/versions/df73727dc6d9b7_add_sub_tabl.py +++ /dev/null @@ -1,55 +0,0 @@ -""" Add subscribe_item table to database - -Revision ID: df73727dc6d9b7_add_sub_tbl -Revises: 5a1258a76598 -Create Date: 2023-07-26 13:03:24.113767 - -""" -from alembic import op -import sqlalchemy as sa - -# revision identifiers, used by Alembic. -revision = 'df73727dc6d9b7_add_sub_tbl' -down_revision = '5a1258a76598' -branch_labels = None -depends_on = None - - -def upgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.create_table('widget_subscribe', - sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), - sa.Column('type', sa.Enum('EMAIL_LIST', 'SIGN_UP', name='subscribetypes'), nullable=False), - sa.Column('sort_index', sa.Integer(), nullable=True), - sa.Column('widget_id', sa.Integer(), nullable=True), - sa.Column('created_by', sa.String(length=50), nullable=True), - sa.Column('updated_by', sa.String(length=50), nullable=True), - sa.Column('created_date', sa.DateTime(), nullable=False), - sa.Column('updated_date', sa.DateTime(), nullable=True), - sa.ForeignKeyConstraint(['widget_id'], ['widget.id'], ondelete='CASCADE'), - sa.PrimaryKeyConstraint('id') - ) - op.create_table('subscribe_item', - sa.Column('created_date', sa.DateTime(), nullable=False), - sa.Column('updated_date', sa.DateTime(), nullable=True), - sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), - sa.Column('description', sa.String(length=500), nullable=True), - sa.Column('call_to_action_text', sa.String(length=25), nullable=True), - sa.Column('call_to_action_type', sa.String(length=25), nullable=True), - sa.Column('sort_index', sa.Integer(), nullable=True), - sa.Column('widget_subscribe_id', sa.Integer(), nullable=True), - sa.Column('created_by', sa.String(length=50), nullable=True), - sa.Column('updated_by', sa.String(length=50), nullable=True), - sa.ForeignKeyConstraint(['widget_subscribe_id'], ['widget_subscribe.id'], ondelete='CASCADE'), - sa.PrimaryKeyConstraint('id') - ) - # ### end Alembic commands ### - - -def downgrade(): - # ### commands auto generated by Alembic - please adjust! ### - - op.drop_table('subscribe_item') - op.drop_table('widget_subscribe') - op.execute('DROP TYPE subscribetypes;') - # ### end Alembic commands ### \ No newline at end of file diff --git a/met-api/migrations/versions/df842dc6d0b7_.py b/met-api/migrations/versions/df842dc6d0b7_.py deleted file mode 100644 index 0fd73adbf..000000000 --- a/met-api/migrations/versions/df842dc6d0b7_.py +++ /dev/null @@ -1,27 +0,0 @@ -""" Add is_uploaded column to widget_documents table - -Revision ID: df842dc6d0b7 -Revises: 47fc88fe0477 -Create Date: 2023-07-14 13:03:24.113767 - -""" -from alembic import op -import sqlalchemy as sa - -# revision identifiers, used by Alembic. -revision = 'df842dc6d0b7' -down_revision = '47fc88fe0477' -branch_labels = None -depends_on = None - - -def upgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.add_column('widget_documents', sa.Column('is_uploaded', sa.Boolean(), nullable=False, server_default=sa.false())) - # ### end Alembic commands ### - - -def downgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.drop_column('widget_documents', 'is_uploaded') - # ### end Alembic commands ### diff --git a/met-api/migrations/versions/e2d5d38220d9_add_revoked_membership.py b/met-api/migrations/versions/e2d5d38220d9_add_revoked_membership.py deleted file mode 100644 index 63b769715..000000000 --- a/met-api/migrations/versions/e2d5d38220d9_add_revoked_membership.py +++ /dev/null @@ -1,46 +0,0 @@ -""" Add revoked_date and revoked status for membership - -Revision ID: e2d5d38220d9 -Revises: db737a0db061 -Create Date: 2023-08-09 07:21:47.043458 - -""" -from datetime import datetime -from alembic import op -import sqlalchemy as sa -# revision identifiers, used by Alembic. -revision = 'e2d5d38220d9' -down_revision = 'db737a0db061' -branch_labels = None -depends_on = None - - -def upgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.add_column('membership', sa.Column('revoked_date', sa.DateTime(), nullable=True)) - - membership_status_codes = sa.table( - 'membership_status_codes', - sa.Column('id', sa.Integer), - sa.Column('status_name', sa.String), - sa.Column('description', sa.String), - sa.Column('created_date', sa.DateTime), - sa.Column('updated_date', sa.DateTime) - ) - op.execute( - membership_status_codes.insert().values( - id=3, - status_name='REVOKED', - description='Revoked Membership', - created_date=datetime.utcnow(), - updated_date=datetime.utcnow() - ) - ) - - # ### end Alembic commands ### - - -def downgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.drop_column('membership', 'revoked_date') - # ### end Alembic commands ### diff --git a/met-api/migrations/versions/e37d79be3a05_added_sort_for_widgets.py b/met-api/migrations/versions/e37d79be3a05_added_sort_for_widgets.py deleted file mode 100644 index 3ff1f201f..000000000 --- a/met-api/migrations/versions/e37d79be3a05_added_sort_for_widgets.py +++ /dev/null @@ -1,28 +0,0 @@ -"""added sort for widgets - -Revision ID: e37d79be3a05 -Revises: 9f86fdcfb248 -Create Date: 2022-11-20 17:25:42.697782 - -""" -from alembic import op -import sqlalchemy as sa - - -# revision identifiers, used by Alembic. -revision = 'e37d79be3a05' -down_revision = '9f86fdcfb248' -branch_labels = None -depends_on = None - - -def upgrade(): - # ### Add Sort index to widget.### - op.add_column('widget', sa.Column('sort_index', sa.Integer(), nullable=False, server_default='1')) - # ### end Alembic commands ### - - -def downgrade(): - # ### Remove the sort index column. ### - op.drop_column('widget', 'sort_index') - # ### end Alembic commands ### diff --git a/met-api/migrations/versions/e5d3bbb8d2f0_add_survey_hidden_column.py b/met-api/migrations/versions/e5d3bbb8d2f0_add_survey_hidden_column.py deleted file mode 100644 index 4cfb1cd1e..000000000 --- a/met-api/migrations/versions/e5d3bbb8d2f0_add_survey_hidden_column.py +++ /dev/null @@ -1,30 +0,0 @@ -"""add_survey_hidden_column - -Revision ID: e5d3bbb8d2f0 -Revises: 7d8897c412de -Create Date: 2023-04-05 16:47:19.185256 - -""" -from alembic import op -import sqlalchemy as sa -from sqlalchemy.dialects import postgresql - -# revision identifiers, used by Alembic. -revision = 'e5d3bbb8d2f0' -down_revision = '7d8897c412de' -branch_labels = None -depends_on = None - - -def upgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.add_column('survey', sa.Column('is_hidden', sa.Boolean(), nullable=True)) - op.execute('UPDATE survey SET is_hidden = false') - op.alter_column('survey', 'is_hidden', nullable=False, server_default=False) - # ### end Alembic commands ### - - -def downgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.drop_column('survey', 'is_hidden') - # ### end Alembic commands ### diff --git a/met-api/migrations/versions/e69d7ac92afb_alter_submission.py b/met-api/migrations/versions/e69d7ac92afb_alter_submission.py deleted file mode 100644 index 6e5e08667..000000000 --- a/met-api/migrations/versions/e69d7ac92afb_alter_submission.py +++ /dev/null @@ -1,47 +0,0 @@ -"""alter submission - -Revision ID: e69d7ac92afb -Revises: 6764af39864e -Create Date: 2022-12-20 17:08:59.040079 - -""" -from alembic import op -import sqlalchemy as sa - - -# revision identifiers, used by Alembic. -revision = 'e69d7ac92afb' -down_revision = '6764af39864e' -branch_labels = None -depends_on = None - - -def upgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.add_column('comment', sa.Column('created_date', sa.DateTime(), nullable=True)) - op.add_column('comment', sa.Column('updated_date', sa.DateTime(), nullable=True)) - op.add_column('comment', sa.Column('created_by', sa.String(length=50), nullable=True)) - op.add_column('comment', sa.Column('updated_by', sa.String(length=50), nullable=True)) - op.add_column('email_verification', sa.Column('submission_id', sa.Integer(), nullable=True)) - op.create_foreign_key('email_verification_submission_id_fkey', 'email_verification', 'submission', ['submission_id'], ['id']) - op.add_column('submission', sa.Column('engagement_id', sa.Integer(), nullable=True)) - op.create_foreign_key('submission_engagement_id_fkey', 'submission', 'engagement', ['engagement_id'], ['id'], ondelete='CASCADE') - - op.execute('UPDATE submission SET engagement_id = (select survey.engagement_id from survey where survey.id = submission.survey_id) WHERE engagement_id IS NULL') - op.alter_column('submission', 'engagement_id', nullable=False) - # ### end Alembic commands ### - - -def downgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.drop_constraint('submission_engagement_id_fkey', 'submission', type_='foreignkey') - op.drop_column('submission', 'engagement_id') - op.drop_index(op.f('ix_met_users_username'), table_name='met_users') - op.create_index('ix_user_username', 'met_users', ['username'], unique=False) - op.drop_constraint('email_verification_submission_id_fkey', 'email_verification', type_='foreignkey') - op.drop_column('email_verification', 'submission_id') - op.drop_column('comment', 'updated_by') - op.drop_column('comment', 'created_by') - op.drop_column('comment', 'updated_date') - op.drop_column('comment', 'created_date') - # ### end Alembic commands ### diff --git a/met-api/migrations/versions/ec0128056a33_rework_engagement_metadata.py b/met-api/migrations/versions/ec0128056a33_rework_engagement_metadata.py deleted file mode 100644 index db404b097..000000000 --- a/met-api/migrations/versions/ec0128056a33_rework_engagement_metadata.py +++ /dev/null @@ -1,164 +0,0 @@ -"""Rework engagement metadata - -Revision ID: ec0128056a33 -Revises: 08f69642b7ae -Create Date: 2023-12-18 18:37:08.781433 - -""" -from enum import auto -from alembic import op -from regex import F -import sqlalchemy as sa -from sqlalchemy.dialects import postgresql -from flask import current_app - -from met_api.models.tenant import Tenant as TenantModel - - -# revision identifiers, used by Alembic. -revision = 'ec0128056a33' -down_revision = '08f69642b7ae' -branch_labels = None -depends_on = None - - -def upgrade(): - op.create_table('engagement_metadata_taxa', - sa.Column('created_date', sa.DateTime(), nullable=False), - sa.Column('updated_date', sa.DateTime(), nullable=True), - sa.Column('id', sa.Integer(), nullable=False, unique=True, autoincrement=True), - sa.Column('tenant_id', sa.Integer(), nullable=False), - sa.Column('name', sa.String(length=64), nullable=True), - sa.Column('description', sa.String(length=256), nullable=True), - sa.Column('freeform', sa.Boolean(), nullable=False), - sa.Column('data_type', sa.String(length=64), nullable=True), - sa.Column('default_value', sa.Text(), nullable=True), - sa.Column('one_per_engagement', sa.Boolean(), nullable=True), - sa.Column('position', sa.Integer(), nullable=False), - sa.Column('created_by', sa.String(length=50), nullable=True), - sa.Column('updated_by', sa.String(length=50), nullable=True), - sa.ForeignKeyConstraint(['tenant_id'], ['tenant.id'], ondelete='CASCADE'), - sa.PrimaryKeyConstraint('id', name='pk_engagement_metadata_taxa'), - sa.UniqueConstraint('id', name='uq_engagement_metadata_taxa_id') - ) - # remove old data from engagement_metadata table - op.execute('DELETE FROM engagement_metadata') - - # Drop the existing primary key constraint - op.drop_constraint('engagement_metadata_pkey', 'engagement_metadata', type_='primary') - - # Create a new index on engagement_metadata_taxa - op.create_index(op.f('ix_engagement_metadata_taxa_position'), 'engagement_metadata_taxa', ['position'], unique=False) - - # Add new columns to engagement_metadata - op.add_column('engagement_metadata', sa.Column('id', sa.Integer(), nullable=False)) - op.add_column('engagement_metadata', sa.Column('taxon_id', sa.Integer(), nullable=False)) - op.add_column('engagement_metadata', sa.Column('value', sa.Text(), nullable=False)) - - # Create a new primary key constraint on the 'id' column - op.create_primary_key('pk_engagement_metadata', 'engagement_metadata', ['id']) - - # Create a new sequence and set it as the default for the 'id' column - op.execute('CREATE SEQUENCE engagement_metadata_id_seq START 1') - op.execute('ALTER TABLE engagement_metadata ALTER COLUMN id SET DEFAULT nextval(\'engagement_metadata_id_seq\')') - op.execute('ALTER SEQUENCE engagement_metadata_id_seq OWNED BY engagement_metadata.id') - - # Remove the not-null constraint from 'engagement_id' - op.alter_column('engagement_metadata', 'engagement_id', existing_type=sa.INTEGER(), nullable=True) - - # Create a foreign key constraint - op.create_foreign_key('fk_engagement_meta_taxon', 'engagement_metadata', 'engagement_metadata_taxa', ['taxon_id'], ['id'], ondelete='CASCADE') - - # Remove the 'project_tracking_id' and 'project_metadata' column - op.drop_column('engagement_metadata', 'project_tracking_id') - op.drop_column('engagement_metadata', 'project_metadata') - - # add default taxa for default tenant - default_short_name = current_app.config.get('DEFAULT_TENANT_SHORT_NAME') - tenant_id = TenantModel.find_by_short_name(default_short_name).id - taxa = [ - { - 'name': 'keywords', - 'description': 'Keywords for categorizing the engagement', - 'freeform': True, - 'one_per_engagement': False, - 'data_type': 'text', - }, - { - 'name': 'description', - 'description': 'Description of the engagement', - 'freeform': True, - 'data_type': 'long_text', - }, - { - 'name': 'jira_ticket_url', - 'description': 'URL of the Jira ticket for this engagement', - 'freeform': True, - 'data_type': 'text', - }, - { - 'name': 'pmo_project_number', - 'description': 'PMO project number', - 'freeform': True, - 'data_type': 'text', - }, - { - 'name': 'engagement_category', - 'description': 'Category of the engagement', - 'data_type': 'text', - 'one_per_engagement': False, - }, - { - 'name': 'engagement_method', - 'description': 'Method of engagement', - 'data_type': 'text', - 'default_value': "Survey", - 'one_per_engagement': False, - }, - { - 'name': 'language', - 'description': 'Language of the engagement', - 'data_type': 'text', - 'default_value': "English", - 'one_per_engagement': False, - }, - { - 'name': 'ministry', - 'description': 'Ministry of the engagement', - 'data_type': 'text', - } - ] - for index, taxon in enumerate(taxa): - op.execute( - sa.text('INSERT INTO engagement_metadata_taxa (tenant_id, name, description, freeform, data_type, default_value, one_per_engagement, position, created_date, updated_date) ' - 'VALUES (:tenant_id, :name, :description, :freeform, :data_type, :default_value, :one_per_engagement, :position, now(), now())') - .params( - tenant_id=tenant_id, - name=taxon['name'], - description=taxon['description'], - freeform=taxon.get('freeform', False), - data_type=taxon['data_type'], - default_value=taxon.get('default_value'), - one_per_engagement=taxon.get('one_per_engagement', True), - position=index + 1, - ) - ) - - # ### end Alembic commands ### - - -def downgrade(): - op.add_column('engagement_metadata', sa.Column('project_metadata', postgresql.JSONB(astext_type=sa.Text()), nullable=True)) - op.add_column('engagement_metadata', sa.Column('project_tracking_id', sa.VARCHAR(length=100), autoincrement=False, nullable=True)) - op.alter_column('engagement_metadata', 'engagement_id', existing_type=sa.INTEGER(), nullable=False) - op.drop_constraint('fk_engagement_meta_taxon', 'engagement_metadata', type_='foreignkey') - # remove primary key constraint from engagement_metadata.id - op.drop_constraint('pk_engagement_metadata', 'engagement_metadata', type_='primary') - op.drop_column('engagement_metadata', 'value') - op.drop_column('engagement_metadata', 'taxon_id') - op.drop_column('engagement_metadata', 'id') - op.drop_index(op.f('ix_engagement_metadata_taxa_position'), table_name='engagement_metadata_taxa') - # add primary key constraint to engagement_metadata.engagement_id - op.create_primary_key('engagement_metadata_pkey', 'engagement_metadata', ['engagement_id']) - op.drop_table('engagement_metadata_taxa') - # ### end Alembic commands ### diff --git a/met-api/migrations/versions/ec0128056a33_table_until_feb_09_2024.py b/met-api/migrations/versions/ec0128056a33_table_until_feb_09_2024.py new file mode 100644 index 000000000..f682ed458 --- /dev/null +++ b/met-api/migrations/versions/ec0128056a33_table_until_feb_09_2024.py @@ -0,0 +1,709 @@ +"""Tables Until Feb 09 2024 + +Revision ID: ec0128056a33 +Revises: +Create Date: 2023-12-18 18:37:08.781433 + +""" +from enum import auto +from alembic import op +from regex import F +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql +from flask import current_app + +from met_api.models.tenant import Tenant as TenantModel + + +# revision identifiers, used by Alembic. +revision = 'ec0128056a33' +down_revision = None +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('comment_status', + sa.Column('created_date', sa.DateTime(), nullable=False), + sa.Column('updated_date', sa.DateTime(), nullable=True), + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('status_name', sa.String(length=50), nullable=False), + sa.Column('description', sa.String(length=50), nullable=True), + sa.Column('created_by', sa.String(length=50), nullable=True), + sa.Column('updated_by', sa.String(length=50), nullable=True), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('status_name') + ) + op.create_table('email_queue', + sa.Column('created_date', sa.DateTime(), nullable=False), + sa.Column('updated_date', sa.DateTime(), nullable=True), + sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), + sa.Column('entity_id', sa.Integer(), nullable=False), + sa.Column('entity_type', sa.String(length=100), nullable=False), + sa.Column('action', sa.String(length=100), nullable=True), + sa.Column('notification_status', sa.Enum('PROCESSING', 'SENT', name='notificationstatus'), nullable=True), + sa.Column('created_by', sa.String(length=50), nullable=True), + sa.Column('updated_by', sa.String(length=50), nullable=True), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('engagement_status', + sa.Column('created_date', sa.DateTime(), nullable=False), + sa.Column('updated_date', sa.DateTime(), nullable=True), + sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), + sa.Column('status_name', sa.String(length=50), nullable=True), + sa.Column('description', sa.String(length=50), nullable=True), + sa.Column('created_by', sa.String(length=50), nullable=True), + sa.Column('updated_by', sa.String(length=50), nullable=True), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('generated_document_type', + sa.Column('created_date', sa.DateTime(), nullable=False), + sa.Column('updated_date', sa.DateTime(), nullable=True), + sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), + sa.Column('name', sa.String(length=30), nullable=False), + sa.Column('description', sa.String(length=100), nullable=True), + sa.Column('created_by', sa.String(length=50), nullable=True), + sa.Column('updated_by', sa.String(length=50), nullable=True), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('membership_status_codes', + sa.Column('created_date', sa.DateTime(), nullable=False), + sa.Column('updated_date', sa.DateTime(), nullable=True), + sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), + sa.Column('status_name', sa.String(length=50), nullable=True), + sa.Column('description', sa.String(length=50), nullable=True), + sa.Column('created_by', sa.String(length=50), nullable=True), + sa.Column('updated_by', sa.String(length=50), nullable=True), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('participant', + sa.Column('created_date', sa.DateTime(), nullable=False), + sa.Column('updated_date', sa.DateTime(), nullable=True), + sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), + sa.Column('email_address', sa.String(length=500), nullable=True), + sa.Column('created_by', sa.String(length=50), nullable=True), + sa.Column('updated_by', sa.String(length=50), nullable=True), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('tenant', + sa.Column('created_date', sa.DateTime(), nullable=False), + sa.Column('updated_date', sa.DateTime(), nullable=True), + sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), + sa.Column('short_name', sa.String(length=10), nullable=False, comment='A small code for the tenant ie GDX , EAO.'), + sa.Column('name', sa.String(length=50), nullable=True, comment='Full name of the ministry.ie Env Assessment Office'), + sa.Column('description', sa.String(length=300), nullable=True), + sa.Column('title', sa.String(length=30), nullable=False), + sa.Column('logo_url', sa.String(length=300), nullable=True), + sa.Column('created_by', sa.String(length=50), nullable=True), + sa.Column('updated_by', sa.String(length=50), nullable=True), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('user_status', + sa.Column('created_date', sa.DateTime(), nullable=False), + sa.Column('updated_date', sa.DateTime(), nullable=True), + sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), + sa.Column('status_name', sa.String(length=50), nullable=True), + sa.Column('description', sa.String(length=50), nullable=True), + sa.Column('created_by', sa.String(length=50), nullable=True), + sa.Column('updated_by', sa.String(length=50), nullable=True), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('widget_type', + sa.Column('created_date', sa.DateTime(), nullable=False), + sa.Column('updated_date', sa.DateTime(), nullable=True), + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('name', sa.String(length=50), nullable=False), + sa.Column('description', sa.String(length=200), nullable=True), + sa.Column('created_by', sa.String(length=50), nullable=True), + sa.Column('updated_by', sa.String(length=50), nullable=True), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('name') + ) + op.create_table('contact', + sa.Column('created_date', sa.DateTime(), nullable=False), + sa.Column('updated_date', sa.DateTime(), nullable=True), + sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), + sa.Column('name', sa.String(length=50), nullable=True), + sa.Column('title', sa.String(length=50), nullable=True), + sa.Column('email', sa.String(length=50), nullable=True), + sa.Column('phone_number', sa.String(length=50), nullable=True), + sa.Column('address', sa.String(length=150), nullable=True), + sa.Column('bio', sa.String(length=500), nullable=True, comment='A biography or short biographical profile of someone.'), + sa.Column('avatar_filename', sa.String(), nullable=True), + sa.Column('tenant_id', sa.Integer(), nullable=True), + sa.Column('created_by', sa.String(length=50), nullable=True), + sa.Column('updated_by', sa.String(length=50), nullable=True), + sa.ForeignKeyConstraint(['tenant_id'], ['tenant.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('engagement', + sa.Column('created_date', sa.DateTime(), nullable=False), + sa.Column('updated_date', sa.DateTime(), nullable=True), + sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), + sa.Column('name', sa.String(length=50), nullable=True), + sa.Column('description', sa.Text(), nullable=False), + sa.Column('rich_description', postgresql.JSON(astext_type=sa.Text()), nullable=False), + sa.Column('start_date', sa.DateTime(), nullable=True), + sa.Column('end_date', sa.DateTime(), nullable=True), + sa.Column('status_id', sa.Integer(), nullable=True), + sa.Column('published_date', sa.DateTime(), nullable=True), + sa.Column('scheduled_date', sa.DateTime(), nullable=True), + sa.Column('content', sa.Text(), nullable=False), + sa.Column('rich_content', postgresql.JSON(astext_type=sa.Text()), nullable=False), + sa.Column('banner_filename', sa.String(), nullable=True), + sa.Column('tenant_id', sa.Integer(), nullable=True), + sa.Column('is_internal', sa.Boolean(), nullable=False), + sa.Column('consent_message', postgresql.JSON(astext_type=sa.Text()), nullable=True), + sa.Column('created_by', sa.String(length=50), nullable=True), + sa.Column('updated_by', sa.String(length=50), nullable=True), + sa.ForeignKeyConstraint(['status_id'], ['engagement_status.id'], ondelete='CASCADE'), + sa.ForeignKeyConstraint(['tenant_id'], ['tenant.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('engagement_metadata_taxa', + sa.Column('created_date', sa.DateTime(), nullable=False), + sa.Column('updated_date', sa.DateTime(), nullable=True), + sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), + sa.Column('tenant_id', sa.Integer(), nullable=False), + sa.Column('name', sa.String(length=64), nullable=True), + sa.Column('description', sa.String(length=256), nullable=True), + sa.Column('freeform', sa.Boolean(), nullable=False), + sa.Column('data_type', sa.String(length=64), nullable=True), + sa.Column('default_value', sa.Text(), nullable=True), + sa.Column('one_per_engagement', sa.Boolean(), nullable=True), + sa.Column('position', sa.Integer(), nullable=False), + sa.Column('created_by', sa.String(length=50), nullable=True), + sa.Column('updated_by', sa.String(length=50), nullable=True), + sa.ForeignKeyConstraint(['tenant_id'], ['tenant.id'], ondelete='CASCADE'), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('id') + ) + op.create_index(op.f('ix_engagement_metadata_taxa_position'), 'engagement_metadata_taxa', ['position'], unique=False) + op.create_index(op.f('ix_engagement_metadata_taxa_tenant_id'), 'engagement_metadata_taxa', ['tenant_id'], unique=False) + op.create_table('feedback', + sa.Column('created_date', sa.DateTime(), nullable=False), + sa.Column('updated_date', sa.DateTime(), nullable=True), + sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), + sa.Column('status', sa.Enum('Unreviewed', 'Archived', name='feedbackstatustype'), nullable=False), + sa.Column('rating', sa.Enum('NONE', 'VerySatisfied', 'Satisfied', 'Neutral', 'Unsatisfied', 'VeryUnsatisfied', name='ratingtype'), nullable=True), + sa.Column('comment_type', sa.Enum('NONE', 'Issue', 'Idea', 'Else', name='commenttype'), nullable=True), + sa.Column('comment', sa.Text(), nullable=True), + sa.Column('submission_path', sa.Text(), nullable=True), + sa.Column('source', sa.Enum('Public', 'Internal', name='feedbacksourcetype'), nullable=True), + sa.Column('tenant_id', sa.Integer(), nullable=True), + sa.Column('created_by', sa.String(length=50), nullable=True), + sa.Column('updated_by', sa.String(length=50), nullable=True), + sa.ForeignKeyConstraint(['tenant_id'], ['tenant.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('generated_document_template', + sa.Column('created_date', sa.DateTime(), nullable=False), + sa.Column('updated_date', sa.DateTime(), nullable=True), + sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), + sa.Column('type_id', sa.Integer(), nullable=False), + sa.Column('hash_code', sa.String(length=64), nullable=True), + sa.Column('extension', sa.String(length=10), nullable=False), + sa.Column('created_by', sa.String(length=50), nullable=True), + sa.Column('updated_by', sa.String(length=50), nullable=True), + sa.ForeignKeyConstraint(['type_id'], ['generated_document_type.id'], ), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('hash_code') + ) + op.create_table('staff_users', + sa.Column('created_date', sa.DateTime(), nullable=False), + sa.Column('updated_date', sa.DateTime(), nullable=True), + sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), + sa.Column('first_name', sa.String(length=50), nullable=True), + sa.Column('middle_name', sa.String(length=50), nullable=True), + sa.Column('last_name', sa.String(length=50), nullable=True), + sa.Column('username', sa.String(length=100), nullable=True), + sa.Column('email_address', sa.String(length=100), nullable=True), + sa.Column('contact_number', sa.String(length=50), nullable=True), + sa.Column('external_id', sa.String(length=50), nullable=False), + sa.Column('status_id', sa.Integer(), nullable=False), + sa.Column('tenant_id', sa.Integer(), nullable=True), + sa.Column('created_by', sa.String(length=50), nullable=True), + sa.Column('updated_by', sa.String(length=50), nullable=True), + sa.ForeignKeyConstraint(['status_id'], ['user_status.id'], ), + sa.ForeignKeyConstraint(['tenant_id'], ['tenant.id'], ), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('external_id') + ) + op.create_index(op.f('ix_staff_users_username'), 'staff_users', ['username'], unique=True) + op.create_table('subscription', + sa.Column('created_date', sa.DateTime(), nullable=False), + sa.Column('updated_date', sa.DateTime(), nullable=True), + sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), + sa.Column('engagement_id', sa.Integer(), nullable=True), + sa.Column('participant_id', sa.Integer(), nullable=True), + sa.Column('is_subscribed', sa.Boolean(), nullable=False), + sa.Column('project_id', sa.String(length=50), nullable=True), + sa.Column('type', sa.Enum('ENGAGEMENT', 'PROJECT', 'TENANT', name='subscriptiontype'), nullable=True), + sa.Column('created_by', sa.String(length=50), nullable=True), + sa.Column('updated_by', sa.String(length=50), nullable=True), + sa.ForeignKeyConstraint(['participant_id'], ['participant.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('cac_form', + sa.Column('created_date', sa.DateTime(), nullable=False), + sa.Column('updated_date', sa.DateTime(), nullable=True), + sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), + sa.Column('engagement_id', sa.Integer(), nullable=False), + sa.Column('tenant_id', sa.Integer(), nullable=True), + sa.Column('understand', sa.Boolean(), nullable=False), + sa.Column('terms_of_reference', sa.Boolean(), nullable=False), + sa.Column('first_name', sa.String(length=50), nullable=False), + sa.Column('last_name', sa.String(length=50), nullable=False), + sa.Column('city', sa.String(length=50), nullable=False), + sa.Column('email', sa.String(length=50), nullable=False), + sa.Column('created_by', sa.String(length=50), nullable=True), + sa.Column('updated_by', sa.String(length=50), nullable=True), + sa.ForeignKeyConstraint(['engagement_id'], ['engagement.id'], ondelete='CASCADE'), + sa.ForeignKeyConstraint(['tenant_id'], ['tenant.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('engagement_metadata', + sa.Column('created_date', sa.DateTime(), nullable=False), + sa.Column('updated_date', sa.DateTime(), nullable=True), + sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), + sa.Column('engagement_id', sa.Integer(), nullable=True), + sa.Column('taxon_id', sa.Integer(), nullable=False), + sa.Column('value', sa.Text(), nullable=False), + sa.Column('created_by', sa.String(length=50), nullable=True), + sa.Column('updated_by', sa.String(length=50), nullable=True), + sa.ForeignKeyConstraint(['engagement_id'], ['engagement.id'], ondelete='CASCADE'), + sa.ForeignKeyConstraint(['taxon_id'], ['engagement_metadata_taxa.id'], ondelete='CASCADE'), + sa.PrimaryKeyConstraint('id') + ) + op.create_index(op.f('ix_engagement_metadata_engagement_id'), 'engagement_metadata', ['engagement_id'], unique=False) + op.create_index(op.f('ix_engagement_metadata_taxon_id'), 'engagement_metadata', ['taxon_id'], unique=False) + op.create_index(op.f('ix_engagement_metadata_value'), 'engagement_metadata', ['value'], unique=False) + op.create_table('engagement_settings', + sa.Column('created_date', sa.DateTime(), nullable=False), + sa.Column('updated_date', sa.DateTime(), nullable=True), + sa.Column('engagement_id', sa.Integer(), nullable=False), + sa.Column('send_report', sa.Boolean(), nullable=False), + sa.Column('created_by', sa.String(length=50), nullable=True), + sa.Column('updated_by', sa.String(length=50), nullable=True), + sa.ForeignKeyConstraint(['engagement_id'], ['engagement.id'], ondelete='CASCADE'), + sa.PrimaryKeyConstraint('engagement_id') + ) + op.create_table('engagement_slug', + sa.Column('created_date', sa.DateTime(), nullable=False), + sa.Column('updated_date', sa.DateTime(), nullable=True), + sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), + sa.Column('engagement_id', sa.Integer(), nullable=False), + sa.Column('slug', sa.String(length=200), nullable=False), + sa.Column('created_by', sa.String(length=50), nullable=True), + sa.Column('updated_by', sa.String(length=50), nullable=True), + sa.ForeignKeyConstraint(['engagement_id'], ['engagement.id'], ondelete='CASCADE'), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('engagement_id'), + sa.UniqueConstraint('slug') + ) + op.create_index('idx_slug', 'engagement_slug', ['slug'], unique=False) + op.create_table('engagement_status_block', + sa.Column('created_date', sa.DateTime(), nullable=False), + sa.Column('updated_date', sa.DateTime(), nullable=True), + sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), + sa.Column('engagement_id', sa.Integer(), nullable=True), + sa.Column('survey_status', sa.Enum('Upcoming', 'Open', 'Closed', name='submissionstatus'), nullable=False), + sa.Column('block_text', postgresql.JSON(astext_type=sa.Text()), nullable=False), + sa.Column('created_by', sa.String(length=50), nullable=True), + sa.Column('updated_by', sa.String(length=50), nullable=True), + sa.ForeignKeyConstraint(['engagement_id'], ['engagement.id'], ondelete='CASCADE'), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('engagement_id', 'survey_status', name='unique_engagement_status_block') + ) + op.create_table('membership', + sa.Column('created_date', sa.DateTime(), nullable=False), + sa.Column('updated_date', sa.DateTime(), nullable=True), + sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), + sa.Column('status', sa.Integer(), nullable=True), + sa.Column('revoked_date', sa.DateTime(), nullable=True), + sa.Column('engagement_id', sa.Integer(), nullable=False), + sa.Column('user_id', sa.Integer(), nullable=True), + sa.Column('type', sa.Enum('TEAM_MEMBER', 'REVIEWER', name='membershiptype'), nullable=False), + sa.Column('tenant_id', sa.Integer(), nullable=True), + sa.Column('version', sa.Integer(), nullable=False), + sa.Column('is_latest', sa.Boolean(), nullable=False), + sa.Column('created_by', sa.String(length=50), nullable=True), + sa.Column('updated_by', sa.String(length=50), nullable=True), + sa.ForeignKeyConstraint(['engagement_id'], ['engagement.id'], ondelete='CASCADE'), + sa.ForeignKeyConstraint(['status'], ['membership_status_codes.id'], ), + sa.ForeignKeyConstraint(['tenant_id'], ['tenant.id'], ), + sa.ForeignKeyConstraint(['user_id'], ['staff_users.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('survey', + sa.Column('created_date', sa.DateTime(), nullable=False), + sa.Column('updated_date', sa.DateTime(), nullable=True), + sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), + sa.Column('name', sa.String(length=50), nullable=True), + sa.Column('form_json', postgresql.JSONB(astext_type=sa.Text()), server_default='{}', nullable=False), + sa.Column('engagement_id', sa.Integer(), nullable=True), + sa.Column('tenant_id', sa.Integer(), nullable=True), + sa.Column('is_hidden', sa.Boolean(), nullable=False), + sa.Column('is_template', sa.Boolean(), nullable=False), + sa.Column('generate_dashboard', sa.Boolean(), nullable=True), + sa.Column('created_by', sa.String(length=50), nullable=True), + sa.Column('updated_by', sa.String(length=50), nullable=True), + sa.ForeignKeyConstraint(['engagement_id'], ['engagement.id'], ondelete='CASCADE'), + sa.ForeignKeyConstraint(['tenant_id'], ['tenant.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_index(op.f('ix_survey_name'), 'survey', ['name'], unique=False) + op.create_table('widget', + sa.Column('created_date', sa.DateTime(), nullable=False), + sa.Column('updated_date', sa.DateTime(), nullable=True), + sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), + sa.Column('widget_type_id', sa.Integer(), nullable=True), + sa.Column('engagement_id', sa.Integer(), nullable=True), + sa.Column('title', sa.String(length=100), nullable=True, comment='Custom title for the widget.'), + sa.Column('sort_index', sa.Integer(), nullable=False), + sa.Column('created_by', sa.String(length=50), nullable=True), + sa.Column('updated_by', sa.String(length=50), nullable=True), + sa.ForeignKeyConstraint(['engagement_id'], ['engagement.id'], ondelete='CASCADE'), + sa.ForeignKeyConstraint(['widget_type_id'], ['widget_type.id'], ondelete='CASCADE'), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('widget_type_id', 'engagement_id', name='unique_widget_type') + ) + op.create_table('report_setting', + sa.Column('created_date', sa.DateTime(), nullable=False), + sa.Column('updated_date', sa.DateTime(), nullable=True), + sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), + sa.Column('survey_id', sa.Integer(), nullable=False), + sa.Column('question_id', sa.Text(), nullable=True), + sa.Column('question_key', sa.Text(), nullable=True), + sa.Column('question_type', sa.Text(), nullable=True), + sa.Column('question', sa.Text(), nullable=True), + sa.Column('display', sa.Boolean(), nullable=True, comment='Flag to identify if the question needs to be diplayed on the dashboard.'), + sa.Column('created_by', sa.String(length=50), nullable=True), + sa.Column('updated_by', sa.String(length=50), nullable=True), + sa.ForeignKeyConstraint(['survey_id'], ['survey.id'], ondelete='CASCADE'), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('submission', + sa.Column('created_date', sa.DateTime(), nullable=False), + sa.Column('updated_date', sa.DateTime(), nullable=True), + sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), + sa.Column('submission_json', postgresql.JSONB(astext_type=sa.Text()), server_default='{}', nullable=False), + sa.Column('survey_id', sa.Integer(), nullable=False), + sa.Column('engagement_id', sa.Integer(), nullable=False), + sa.Column('participant_id', sa.Integer(), nullable=True), + sa.Column('reviewed_by', sa.String(length=50), nullable=True), + sa.Column('review_date', sa.DateTime(), nullable=True), + sa.Column('comment_status_id', sa.Integer(), nullable=True), + sa.Column('has_personal_info', sa.Boolean(), nullable=True), + sa.Column('has_profanity', sa.Boolean(), nullable=True), + sa.Column('rejected_reason_other', sa.String(length=500), nullable=True), + sa.Column('has_threat', sa.Boolean(), nullable=True), + sa.Column('notify_email', sa.Boolean(), nullable=True), + sa.Column('created_by', sa.String(length=50), nullable=True), + sa.Column('updated_by', sa.String(length=50), nullable=True), + sa.ForeignKeyConstraint(['comment_status_id'], ['comment_status.id'], ondelete='SET NULL'), + sa.ForeignKeyConstraint(['engagement_id'], ['engagement.id'], ondelete='CASCADE'), + sa.ForeignKeyConstraint(['participant_id'], ['participant.id'], ), + sa.ForeignKeyConstraint(['survey_id'], ['survey.id'], ondelete='CASCADE'), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('widget_documents', + sa.Column('created_date', sa.DateTime(), nullable=False), + sa.Column('updated_date', sa.DateTime(), nullable=True), + sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), + sa.Column('title', sa.String(length=50), nullable=True), + sa.Column('type', sa.String(length=50), nullable=True, comment='File or Folder identifier'), + sa.Column('parent_document_id', sa.Integer(), nullable=True), + sa.Column('url', sa.String(length=2000), nullable=True), + sa.Column('sort_index', sa.Integer(), nullable=True), + sa.Column('widget_id', sa.Integer(), nullable=True), + sa.Column('is_uploaded', sa.Boolean(), nullable=True), + sa.Column('created_by', sa.String(length=50), nullable=True), + sa.Column('updated_by', sa.String(length=50), nullable=True), + sa.ForeignKeyConstraint(['parent_document_id'], ['widget_documents.id'], ), + sa.ForeignKeyConstraint(['widget_id'], ['widget.id'], ondelete='CASCADE'), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('widget_events', + sa.Column('created_date', sa.DateTime(), nullable=False), + sa.Column('updated_date', sa.DateTime(), nullable=True), + sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), + sa.Column('title', sa.String(length=50), nullable=True), + sa.Column('type', sa.Enum('OPENHOUSE', 'MEETUP', 'VIRTUAL', name='eventtypes'), nullable=False), + sa.Column('sort_index', sa.Integer(), nullable=True), + sa.Column('widget_id', sa.Integer(), nullable=True), + sa.Column('created_by', sa.String(length=50), nullable=True), + sa.Column('updated_by', sa.String(length=50), nullable=True), + sa.ForeignKeyConstraint(['widget_id'], ['widget.id'], ondelete='CASCADE'), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('widget_item', + sa.Column('created_date', sa.DateTime(), nullable=False), + sa.Column('updated_date', sa.DateTime(), nullable=True), + sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), + sa.Column('widget_data_id', sa.Integer(), nullable=False, comment='A dynamic foreign key that could be to any table where the widget data is hosted.'), + sa.Column('widget_id', sa.Integer(), nullable=True), + sa.Column('sort_index', sa.Integer(), nullable=False), + sa.Column('created_by', sa.String(length=50), nullable=True), + sa.Column('updated_by', sa.String(length=50), nullable=True), + sa.ForeignKeyConstraint(['widget_id'], ['widget.id'], ondelete='CASCADE'), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('widget_data_id', 'widget_id', name='unique_widget_data') + ) + op.create_table('widget_map', + sa.Column('created_date', sa.DateTime(), nullable=False), + sa.Column('updated_date', sa.DateTime(), nullable=True), + sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), + sa.Column('widget_id', sa.Integer(), nullable=True), + sa.Column('engagement_id', sa.Integer(), nullable=True), + sa.Column('marker_label', sa.String(length=30), nullable=True), + sa.Column('latitude', sa.Float(), nullable=False), + sa.Column('longitude', sa.Float(), nullable=False), + sa.Column('geojson', sa.Text(), nullable=True), + sa.Column('file_name', sa.Text(), nullable=True), + sa.Column('created_by', sa.String(length=50), nullable=True), + sa.Column('updated_by', sa.String(length=50), nullable=True), + sa.ForeignKeyConstraint(['engagement_id'], ['engagement.id'], ondelete='CASCADE'), + sa.ForeignKeyConstraint(['widget_id'], ['widget.id'], ondelete='CASCADE'), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('widget_polls', + sa.Column('created_date', sa.DateTime(), nullable=False), + sa.Column('updated_date', sa.DateTime(), nullable=True), + sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), + sa.Column('title', sa.String(length=255), nullable=False), + sa.Column('description', sa.String(length=2048), nullable=True), + sa.Column('status', sa.Enum('active', 'inactive', name='poll_status'), nullable=True), + sa.Column('widget_id', sa.Integer(), nullable=False), + sa.Column('engagement_id', sa.Integer(), nullable=False), + sa.Column('created_by', sa.String(length=50), nullable=True), + sa.Column('updated_by', sa.String(length=50), nullable=True), + sa.ForeignKeyConstraint(['engagement_id'], ['engagement.id'], ondelete='CASCADE'), + sa.ForeignKeyConstraint(['widget_id'], ['widget.id'], ondelete='CASCADE'), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('widget_subscribe', + sa.Column('created_date', sa.DateTime(), nullable=False), + sa.Column('updated_date', sa.DateTime(), nullable=True), + sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), + sa.Column('type', sa.Enum('EMAIL_LIST', 'SIGN_UP', name='subscribetypes'), nullable=False), + sa.Column('sort_index', sa.Integer(), nullable=True), + sa.Column('widget_id', sa.Integer(), nullable=True), + sa.Column('created_by', sa.String(length=50), nullable=True), + sa.Column('updated_by', sa.String(length=50), nullable=True), + sa.ForeignKeyConstraint(['widget_id'], ['widget.id'], ondelete='CASCADE'), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('widget_timeline', + sa.Column('created_date', sa.DateTime(), nullable=False), + sa.Column('updated_date', sa.DateTime(), nullable=True), + sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), + sa.Column('engagement_id', sa.Integer(), nullable=False), + sa.Column('widget_id', sa.Integer(), nullable=False), + sa.Column('title', sa.String(length=255), nullable=True), + sa.Column('description', sa.Text(), nullable=True), + sa.Column('created_by', sa.String(length=50), nullable=True), + sa.Column('updated_by', sa.String(length=50), nullable=True), + sa.ForeignKeyConstraint(['engagement_id'], ['engagement.id'], ondelete='CASCADE'), + sa.ForeignKeyConstraint(['widget_id'], ['widget.id'], ondelete='CASCADE'), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('widget_video', + sa.Column('created_date', sa.DateTime(), nullable=False), + sa.Column('updated_date', sa.DateTime(), nullable=True), + sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), + sa.Column('widget_id', sa.Integer(), nullable=True), + sa.Column('engagement_id', sa.Integer(), nullable=True), + sa.Column('video_url', sa.String(length=255), nullable=False), + sa.Column('description', sa.Text(), nullable=True), + sa.Column('created_by', sa.String(length=50), nullable=True), + sa.Column('updated_by', sa.String(length=50), nullable=True), + sa.ForeignKeyConstraint(['engagement_id'], ['engagement.id'], ondelete='CASCADE'), + sa.ForeignKeyConstraint(['widget_id'], ['widget.id'], ondelete='CASCADE'), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('comment', + sa.Column('created_date', sa.DateTime(), nullable=False), + sa.Column('updated_date', sa.DateTime(), nullable=True), + sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), + sa.Column('text', sa.Text(), nullable=False), + sa.Column('submission_date', sa.DateTime(), nullable=True), + sa.Column('survey_id', sa.Integer(), nullable=False), + sa.Column('participant_id', sa.Integer(), nullable=True), + sa.Column('submission_id', sa.Integer(), nullable=True), + sa.Column('component_id', sa.String(length=10), nullable=True), + sa.Column('created_by', sa.String(length=50), nullable=True), + sa.Column('updated_by', sa.String(length=50), nullable=True), + sa.ForeignKeyConstraint(['participant_id'], ['participant.id'], ondelete='SET NULL'), + sa.ForeignKeyConstraint(['submission_id'], ['submission.id'], ondelete='SET NULL'), + sa.ForeignKeyConstraint(['survey_id'], ['survey.id'], ondelete='CASCADE'), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('email_verification', + sa.Column('created_date', sa.DateTime(), nullable=False), + sa.Column('updated_date', sa.DateTime(), nullable=True), + sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), + sa.Column('verification_token', sa.String(length=50), nullable=False), + sa.Column('participant_id', sa.Integer(), nullable=True), + sa.Column('is_active', sa.Boolean(), nullable=False), + sa.Column('type', sa.Enum('Survey', 'RejectedComment', 'Subscribe', name='emailverificationtype'), nullable=False), + sa.Column('survey_id', sa.Integer(), nullable=True), + sa.Column('submission_id', sa.Integer(), nullable=True), + sa.Column('tenant_id', sa.Integer(), nullable=True), + sa.Column('created_by', sa.String(length=50), nullable=True), + sa.Column('updated_by', sa.String(length=50), nullable=True), + sa.ForeignKeyConstraint(['participant_id'], ['participant.id'], ), + sa.ForeignKeyConstraint(['submission_id'], ['submission.id'], ), + sa.ForeignKeyConstraint(['survey_id'], ['survey.id'], ), + sa.ForeignKeyConstraint(['tenant_id'], ['tenant.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('event_item', + sa.Column('created_date', sa.DateTime(), nullable=False), + sa.Column('updated_date', sa.DateTime(), nullable=True), + sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), + sa.Column('description', sa.String(length=500), nullable=True), + sa.Column('location_name', sa.String(length=50), nullable=True), + sa.Column('location_address', sa.String(length=100), nullable=True, comment='The address of the location'), + sa.Column('start_date', sa.DateTime(), nullable=True), + sa.Column('end_date', sa.DateTime(), nullable=True), + sa.Column('url', sa.String(length=500), nullable=True), + sa.Column('url_label', sa.String(length=100), nullable=True, comment='Label to show for href links'), + sa.Column('sort_index', sa.Integer(), nullable=True), + sa.Column('widget_events_id', sa.Integer(), nullable=True), + sa.Column('created_by', sa.String(length=50), nullable=True), + sa.Column('updated_by', sa.String(length=50), nullable=True), + sa.ForeignKeyConstraint(['widget_events_id'], ['widget_events.id'], ondelete='CASCADE'), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('poll_answers', + sa.Column('created_date', sa.DateTime(), nullable=False), + sa.Column('updated_date', sa.DateTime(), nullable=True), + sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), + sa.Column('answer_text', sa.String(length=255), nullable=False), + sa.Column('poll_id', sa.Integer(), nullable=False), + sa.Column('created_by', sa.String(length=50), nullable=True), + sa.Column('updated_by', sa.String(length=50), nullable=True), + sa.ForeignKeyConstraint(['poll_id'], ['widget_polls.id'], ondelete='CASCADE'), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('staff_note', + sa.Column('created_date', sa.DateTime(), nullable=False), + sa.Column('updated_date', sa.DateTime(), nullable=True), + sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), + sa.Column('note', sa.Text(), nullable=True), + sa.Column('note_type', sa.String(length=50), nullable=True), + sa.Column('survey_id', sa.Integer(), nullable=False), + sa.Column('submission_id', sa.Integer(), nullable=False), + sa.Column('created_by', sa.String(length=50), nullable=True), + sa.Column('updated_by', sa.String(length=50), nullable=True), + sa.ForeignKeyConstraint(['submission_id'], ['submission.id'], ondelete='SET NULL'), + sa.ForeignKeyConstraint(['survey_id'], ['survey.id'], ondelete='CASCADE'), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('subscribe_item', + sa.Column('created_date', sa.DateTime(), nullable=False), + sa.Column('updated_date', sa.DateTime(), nullable=True), + sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), + sa.Column('description', sa.String(length=500), nullable=True), + sa.Column('rich_description', sa.Text(), nullable=True), + sa.Column('call_to_action_text', sa.String(length=25), nullable=True), + sa.Column('call_to_action_type', sa.String(length=25), nullable=False), + sa.Column('sort_index', sa.Integer(), nullable=True), + sa.Column('widget_subscribe_id', sa.Integer(), nullable=True), + sa.Column('created_by', sa.String(length=50), nullable=True), + sa.Column('updated_by', sa.String(length=50), nullable=True), + sa.ForeignKeyConstraint(['widget_subscribe_id'], ['widget_subscribe.id'], ondelete='CASCADE'), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('timeline_event', + sa.Column('created_date', sa.DateTime(), nullable=False), + sa.Column('updated_date', sa.DateTime(), nullable=True), + sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), + sa.Column('engagement_id', sa.Integer(), nullable=False), + sa.Column('widget_id', sa.Integer(), nullable=False), + sa.Column('timeline_id', sa.Integer(), nullable=False), + sa.Column('status', sa.Enum('Pending', 'InProgress', 'Completed', name='timelineeventstatus'), nullable=False), + sa.Column('position', sa.Integer(), nullable=False), + sa.Column('description', sa.Text(), nullable=True), + sa.Column('time', sa.String(length=255), nullable=True), + sa.Column('created_by', sa.String(length=50), nullable=True), + sa.Column('updated_by', sa.String(length=50), nullable=True), + sa.ForeignKeyConstraint(['engagement_id'], ['engagement.id'], ondelete='CASCADE'), + sa.ForeignKeyConstraint(['timeline_id'], ['widget_timeline.id'], ondelete='CASCADE'), + sa.ForeignKeyConstraint(['widget_id'], ['widget.id'], ondelete='CASCADE'), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('poll_responses', + sa.Column('created_date', sa.DateTime(), nullable=False), + sa.Column('updated_date', sa.DateTime(), nullable=True), + sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), + sa.Column('participant_id', sa.String(length=255), nullable=False), + sa.Column('selected_answer_id', sa.Integer(), nullable=False), + sa.Column('poll_id', sa.Integer(), nullable=False), + sa.Column('widget_id', sa.Integer(), nullable=False), + sa.Column('is_deleted', sa.Boolean(), nullable=True), + sa.Column('created_by', sa.String(length=50), nullable=True), + sa.Column('updated_by', sa.String(length=50), nullable=True), + sa.ForeignKeyConstraint(['poll_id'], ['widget_polls.id'], ondelete='CASCADE'), + sa.ForeignKeyConstraint(['selected_answer_id'], ['poll_answers.id'], ondelete='CASCADE'), + sa.ForeignKeyConstraint(['widget_id'], ['widget.id'], ondelete='CASCADE'), + sa.PrimaryKeyConstraint('id') + ) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_table('poll_responses') + op.drop_table('timeline_event') + op.drop_table('subscribe_item') + op.drop_table('staff_note') + op.drop_table('poll_answers') + op.drop_table('event_item') + op.drop_table('email_verification') + op.drop_table('comment') + op.drop_table('widget_video') + op.drop_table('widget_timeline') + op.drop_table('widget_subscribe') + op.drop_table('widget_polls') + op.drop_table('widget_map') + op.drop_table('widget_item') + op.drop_table('widget_events') + op.drop_table('widget_documents') + op.drop_table('submission') + op.drop_table('report_setting') + op.drop_table('widget') + op.drop_index(op.f('ix_survey_name'), table_name='survey') + op.drop_table('survey') + op.drop_table('membership') + op.drop_table('engagement_status_block') + op.drop_index('idx_slug', table_name='engagement_slug') + op.drop_table('engagement_slug') + op.drop_table('engagement_settings') + op.drop_index(op.f('ix_engagement_metadata_value'), table_name='engagement_metadata') + op.drop_index(op.f('ix_engagement_metadata_taxon_id'), table_name='engagement_metadata') + op.drop_index(op.f('ix_engagement_metadata_engagement_id'), table_name='engagement_metadata') + op.drop_table('engagement_metadata') + op.drop_table('cac_form') + op.drop_table('subscription') + op.drop_index(op.f('ix_staff_users_username'), table_name='staff_users') + op.drop_table('staff_users') + op.drop_table('generated_document_template') + op.drop_table('feedback') + op.drop_index(op.f('ix_engagement_metadata_taxa_tenant_id'), table_name='engagement_metadata_taxa') + op.drop_index(op.f('ix_engagement_metadata_taxa_position'), table_name='engagement_metadata_taxa') + op.drop_table('engagement_metadata_taxa') + op.drop_table('engagement') + op.drop_table('contact') + op.drop_table('widget_type') + op.drop_table('user_status') + op.drop_table('tenant') + op.drop_table('participant') + op.drop_table('membership_status_codes') + op.drop_table('generated_document_type') + op.drop_table('engagement_status') + op.drop_table('email_queue') + op.drop_table('comment_status') + # ### end Alembic commands ### \ No newline at end of file diff --git a/met-api/migrations/versions/ec504565fab3_update_status_and_user.py b/met-api/migrations/versions/ec504565fab3_update_status_and_user.py deleted file mode 100644 index 853d40264..000000000 --- a/met-api/migrations/versions/ec504565fab3_update_status_and_user.py +++ /dev/null @@ -1,53 +0,0 @@ -"""update status and user - -Revision ID: ec504565fab3 -Revises: d0f92ae9ba77 -Create Date: 2022-06-29 13:34:24.214735 - -""" -from alembic import op -import sqlalchemy as sa -from datetime import datetime - - -# revision identifiers, used by Alembic. -revision = 'ec504565fab3' -down_revision = 'd0f92ae9ba77' -branch_labels = None -depends_on = None - - -def upgrade(): - conn = op.get_bind() - - engagement_status = sa.table('engagement_status', - sa.column('id', sa.Integer), - sa.column('status_name', sa.String), - sa.column('description', sa.String), - sa.column('created_date', sa.DateTime), - sa.column('updated_date', sa.DateTime)) - - conn.execute('UPDATE "user" SET first_name=\'MET\', middle_name=\'\', last_name=\'System\' WHERE id=1') - - conn.execute( - engagement_status.update() - .where(engagement_status.c.id==1) - .values({'status_name': 'Draft', 'description': 'Not ready to the public'})) - - op.bulk_insert(engagement_status, [ - {'id': 2, 'status_name': 'Published', 'description': 'Visible to the public', 'created_date': datetime.utcnow(), 'updated_date': datetime.utcnow()} - ]) - - conn.execute('SELECT setval(\'engagement_status_id_seq\', 2);') - # ### end Alembic commands ### - - -def downgrade(): - conn = op.get_bind() - - conn.execute('UPDATE engagement_status SET status_name=\'draft\', description=\'Test Description\' WHERE id=1') - conn.execute('UPDATE "user" SET first_name=\'A\', middle_name=\'B\', last_name=\'C\' WHERE id=1') - conn.execute('DELETE FROM engagement_status WHERE id=2') - - conn.execute('SELECT setval(\'engagement_status_id_seq\', 1);') - # ### end Alembic commands ### diff --git a/met-api/migrations/versions/eef9cc71cca7_.py b/met-api/migrations/versions/eef9cc71cca7_.py deleted file mode 100644 index 666ece2a8..000000000 --- a/met-api/migrations/versions/eef9cc71cca7_.py +++ /dev/null @@ -1,28 +0,0 @@ -"""empty message - -Revision ID: eef9cc71cca7 -Revises: 326419c08f59 -Create Date: 2022-05-26 09:03:34.300400 - -""" -from alembic import op -import sqlalchemy as sa -from sqlalchemy.dialects import postgresql - -# revision identifiers, used by Alembic. -revision = 'eef9cc71cca7' -down_revision = '326419c08f59' -branch_labels = None -depends_on = None - - -def upgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.add_column('engagement', sa.Column('rich_description', postgresql.JSON(astext_type=sa.Text()), nullable=False)) - # ### end Alembic commands ### - - -def downgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.drop_column('engagement', 'rich_description') - # ### end Alembic commands ### diff --git a/met-api/migrations/versions/f037908194df_kc_attribute_script.py b/met-api/migrations/versions/f037908194df_kc_attribute_script.py deleted file mode 100644 index 656c02ca3..000000000 --- a/met-api/migrations/versions/f037908194df_kc_attribute_script.py +++ /dev/null @@ -1,39 +0,0 @@ -"""kc attribute script - -Revision ID: f037908194df -Revises: 04e6c48187da -Create Date: 2023-08-27 22:50:02.047232 - -""" -from typing import List - -from alembic import op -import sqlalchemy as sa - -from met_api.models import StaffUser -from met_api.services.participant_service import KEYCLOAK_SERVICE - -# revision identifiers, used by Alembic. -revision = 'f037908194df' -down_revision = '04e6c48187da' -branch_labels = None -depends_on = None - - -def upgrade(): - default_tenant_id = 1 - conn = op.get_bind() - - user_res = conn.execute("SELECT * FROM staff_users WHERE external_id IS NOT NULL;") - user_list: List[StaffUser] = user_res.fetchall() - for user in user_list: - try: - print(f'Processing profile for {user.first_name} {user.last_name}',) - KEYCLOAK_SERVICE.add_attribute_to_user(user_id=user.external_id, attribute_value=default_tenant_id) - except Exception as exc: - print('Profile Error for', user.first_name) - print(exc) - - -def downgrade(): - pass diff --git a/met-api/migrations/versions/f2b3f08c8d60_alter_comment_add_submission_id.py b/met-api/migrations/versions/f2b3f08c8d60_alter_comment_add_submission_id.py deleted file mode 100644 index a0764c9c0..000000000 --- a/met-api/migrations/versions/f2b3f08c8d60_alter_comment_add_submission_id.py +++ /dev/null @@ -1,51 +0,0 @@ -"""alter comment add submission_id - -Revision ID: f2b3f08c8d60 -Revises: c19bc1af9f2b -Create Date: 2022-11-01 11:59:02.563717 - -""" -from alembic import op -import sqlalchemy as sa - - -# revision identifiers, used by Alembic. -revision = 'f2b3f08c8d60' -down_revision = 'c19bc1af9f2b' -branch_labels = None -depends_on = None - - -def upgrade(): - conn = op.get_bind() - op.add_column('comment', sa.Column('submission_id', sa.Integer(), nullable=True)) - op.add_column('comment', sa.Column('component_id', sa.String(), nullable=True)) - op.create_foreign_key('comment_submission_id_fkey', 'comment', 'submission', ['submission_id'], ['id'], ondelete='SET NULL') - # Attempt to populate the submission id for previous answers BUT - # this could potentially link a comment with a wrong submission id - # when there are multiple submissions for the same survey from the same user (we are unable to identify different submissions for the same user) - conn.execute('UPDATE comment c \ - SET submission_id=s.id \ - FROM submission s \ - WHERE \ - c.submission_id is null AND \ - s.survey_id = c.survey_id AND \ - s.user_id = c.user_id') - - # Attempt to populate the corresponding question/componnent id for a comment BUT - # this could potentially link a comment with its a wrong question/component - # when there are multiple questions in the survey (we are unable to identify which question it belongs to) - conn.execute('UPDATE comment c \ - SET component_id=item_object->>\'key\' \ - FROM survey s, \ - jsonb_array_elements(s.form_json->\'components\') with ordinality arr(item_object, position) \ - WHERE \ - c.component_id is null AND \ - c.survey_id = s.id AND \ - item_object->>\'inputType\' = \'text\'') - - -def downgrade(): - op.drop_constraint('comment_submission_id_fkey', 'comment', type_='foreignkey') - op.drop_column('comment', 'submission_id') - op.drop_column('comment', 'component_id') diff --git a/met-api/migrations/versions/f40da1b8f3e0_initialize_user_status.py b/met-api/migrations/versions/f40da1b8f3e0_initialize_user_status.py deleted file mode 100644 index b4a610f77..000000000 --- a/met-api/migrations/versions/f40da1b8f3e0_initialize_user_status.py +++ /dev/null @@ -1,28 +0,0 @@ -""" Initialize user status -P -Revision ID: f40da1b8f3e0 -Revises: 31041fb90d53 -Create Date: 2023-08-18 09:50:27.567044 - -""" -from alembic import op - -# revision identifiers, used by Alembic. -revision = 'f40da1b8f3e0' -down_revision = '31041fb90d53' -branch_labels = None -depends_on = None - - -def upgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.execute("UPDATE staff_users SET status_id = 1") - op.alter_column('staff_users', 'status_id', nullable=False, server_default='1') - # ### end Alembic commands ### - - -def downgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.alter_column('staff_users', 'status_id', nullable=True, server_default=None) - op.execute("UPDATE staff_users SET status_id = NULL") - # ### end Alembic commands ### diff --git a/met-api/migrations/versions/f6f480b5b664_add_survey_template_column.py b/met-api/migrations/versions/f6f480b5b664_add_survey_template_column.py deleted file mode 100644 index 1816e50e9..000000000 --- a/met-api/migrations/versions/f6f480b5b664_add_survey_template_column.py +++ /dev/null @@ -1,30 +0,0 @@ -"""add_survey_template_column - -Revision ID: f6f480b5b664 -Revises: e5d3bbb8d2f0 -Create Date: 2023-04-10 15:33:38.529726 - -""" -from alembic import op -import sqlalchemy as sa -from sqlalchemy.dialects import postgresql - -# revision identifiers, used by Alembic. -revision = 'f6f480b5b664' -down_revision = 'e5d3bbb8d2f0' -branch_labels = None -depends_on = None - - -def upgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.add_column('survey', sa.Column('is_template', sa.Boolean(), nullable=True)) - op.execute('UPDATE survey SET is_template = false') - op.alter_column('survey', 'is_template', nullable=False, server_default=False) - # ### end Alembic commands ### - - -def downgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.drop_column('survey', 'is_template') - # ### end Alembic commands ### diff --git a/met-api/migrations/versions/f99eb7f53041_.py b/met-api/migrations/versions/f99eb7f53041_.py deleted file mode 100644 index 0fc66957a..000000000 --- a/met-api/migrations/versions/f99eb7f53041_.py +++ /dev/null @@ -1,34 +0,0 @@ -"""Increase submission.rejected_reason_other length - -Revision ID: f99eb7f53041 -Revises: 7bf7394a517c -Create Date: 2023-03-27 11:15:52.342030 - -""" -from alembic import op -import sqlalchemy as sa -from sqlalchemy.dialects import postgresql - -# revision identifiers, used by Alembic. -revision = 'f99eb7f53041' -down_revision = '7bf7394a517c' -branch_labels = None -depends_on = None - - -def upgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.alter_column('submission', 'rejected_reason_other', - existing_type=sa.VARCHAR(length=50), - type_=sa.VARCHAR(length=500), - nullable=True) - # ### end Alembic commands ### - - -def downgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.alter_column('submission', 'rejected_reason_other', - existing_type=sa.VARCHAR(length=500), - type_=sa.VARCHAR(length=50), - nullable=True) - # ### end Alembic commands ### diff --git a/met-api/migrations/versions/fc570c0faace_survey_migration.py b/met-api/migrations/versions/fc570c0faace_survey_migration.py deleted file mode 100644 index 00d97d85d..000000000 --- a/met-api/migrations/versions/fc570c0faace_survey_migration.py +++ /dev/null @@ -1,41 +0,0 @@ -"""Survey Migration - -Revision ID: fc570c0faace -Revises: d0f92ae9ba77 -Create Date: 2022-07-05 12:21:37.175093 - -""" -from alembic import op -import sqlalchemy as sa -from sqlalchemy.dialects import postgresql - -# revision identifiers, used by Alembic. -revision = 'fc570c0faace' -down_revision = 'ec504565fab3' -branch_labels = None -depends_on = None - - -def upgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.create_table('survey', - sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), - sa.Column('name', sa.String(length=50), nullable=True, index=True), - sa.Column('form_json', postgresql.JSONB(astext_type=sa.Text()), server_default='{}', nullable=False), - sa.Column('engagement_id', sa.Integer(), nullable=True), - sa.Column('created_date', sa.DateTime(), nullable=True), - sa.Column('updated_date', sa.DateTime(), nullable=True), - sa.Column('created_by', sa.String(length=50), nullable=True), - sa.Column('updated_by', sa.String(length=50), nullable=True), - sa.PrimaryKeyConstraint('id') - ) - - op.create_foreign_key('fk_survey_engagement', 'survey', 'engagement', ['engagement_id'], ['id'], ondelete='CASCADE') - # ### end Alembic commands ### - - -def downgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.drop_constraint('fk_survey_engagement', 'survey', type_='foreignkey') - op.drop_table('survey') - # ### end Alembic commands ### diff --git a/met-api/migrations/versions/fda10461892d_basemodel_column_change.py b/met-api/migrations/versions/fda10461892d_basemodel_column_change.py deleted file mode 100644 index a2168ab77..000000000 --- a/met-api/migrations/versions/fda10461892d_basemodel_column_change.py +++ /dev/null @@ -1,52 +0,0 @@ -"""basemodel column change - -Revision ID: fda10461892d -Revises: 0d863f773838 -Create Date: 2022-12-22 05:21:58.579344 - -""" -from alembic import op -import sqlalchemy as sa -from sqlalchemy.dialects import postgresql - -# revision identifiers, used by Alembic. -revision = 'fda10461892d' -down_revision = '0d863f773838' -branch_labels = None -depends_on = None - - -def upgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.alter_column('engagement', 'created_date', - existing_type=postgresql.TIMESTAMP(), - nullable=False) - op.alter_column('engagement', 'created_by', - existing_type=sa.VARCHAR(length=50), - nullable=True) - op.alter_column('engagement', 'updated_by', - existing_type=sa.VARCHAR(length=50), - nullable=True) - op.add_column('user_status', sa.Column('updated_by', sa.String(length=50), nullable=True)) - op.drop_column('user_status', 'modified_by_id') - op.add_column('widget_documents', sa.Column('updated_by', sa.String(length=50), nullable=True)) - op.drop_column('widget_documents', 'modified_by_id') - # ### end Alembic commands ### - - -def downgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.add_column('widget_documents', sa.Column('modified_by_id', sa.VARCHAR(length=50), autoincrement=False, nullable=True)) - op.drop_column('widget_documents', 'updated_by') - op.add_column('user_status', sa.Column('modified_by_id', sa.VARCHAR(length=50), autoincrement=False, nullable=True)) - op.drop_column('user_status', 'updated_by') - op.alter_column('engagement', 'updated_by', - existing_type=sa.VARCHAR(length=50), - nullable=False) - op.alter_column('engagement', 'created_by', - existing_type=sa.VARCHAR(length=50), - nullable=False) - op.alter_column('engagement', 'created_date', - existing_type=postgresql.TIMESTAMP(), - nullable=True) - # ### end Alembic commands ### diff --git a/met-api/migrations/versions/ffac8f5b4288_add_reject_reason.py b/met-api/migrations/versions/ffac8f5b4288_add_reject_reason.py deleted file mode 100644 index 9b7538911..000000000 --- a/met-api/migrations/versions/ffac8f5b4288_add_reject_reason.py +++ /dev/null @@ -1,30 +0,0 @@ -"""add reject reason - -Revision ID: ffac8f5b4288 -Revises: 46490dc02be5 -Create Date: 2022-12-07 12:11:33.172817 - -""" -from alembic import op -import sqlalchemy as sa - - -# revision identifiers, used by Alembic. -revision = 'ffac8f5b4288' -down_revision = '46490dc02be5' -branch_labels = None -depends_on = None - - -def upgrade(): - op.add_column('submission', sa.Column('has_personal_info', sa.Boolean(), nullable=True)) - op.add_column('submission', sa.Column('has_profanity', sa.Boolean(), nullable=True)) - op.add_column('submission', sa.Column('rejected_reason_other', sa.String(length=50), nullable=True)) - op.add_column('submission', sa.Column('has_threat', sa.Boolean(), nullable=True)) - - -def downgrade(): - op.drop_column('submission', 'has_threat') - op.drop_column('submission', 'rejected_reason_other') - op.drop_column('submission', 'has_profanity') - op.drop_column('submission', 'has_personal_info') diff --git a/met-api/tests/utilities/factory_scenarios.py b/met-api/tests/utilities/factory_scenarios.py index 8e3acf1b3..477363623 100644 --- a/met-api/tests/utilities/factory_scenarios.py +++ b/met-api/tests/utilities/factory_scenarios.py @@ -619,7 +619,7 @@ class TestCommentInfo(dict, Enum): comment1 = { 'text': fake.paragraph(nb_sentences=3), - 'component_id': 'simpletextarea', + 'component_id': 'simpletext', 'submission_date': datetime.now().strftime('%Y-%m-%d'), } From ca06c7678161cc3ff64a17788f431bbe3305983f Mon Sep 17 00:00:00 2001 From: VineetBala-AOT <90332175+VineetBala-AOT@users.noreply.github.com> Date: Thu, 15 Feb 2024 15:06:52 -0800 Subject: [PATCH 06/42] DESENG-473: Restore role assignment functionality (#2385) * Backend changes to leverage the CSS API for composite role management (#2382) --- CHANGELOG.MD | 4 + met-api/sample.env | 8 + met-api/src/met_api/config.py | 8 +- .../met_api/resources/engagement_members.py | 23 +- met-api/src/met_api/resources/staff_user.py | 38 +++ met-api/src/met_api/services/keycloak.py | 255 +++++++----------- .../met_api/services/membership_service.py | 143 +++++----- .../services/staff_user_membership_service.py | 23 +- .../met_api/services/staff_user_service.py | 81 +++--- met-api/src/met_api/utils/constants.py | 32 +-- met-api/tests/unit/services/test_keycloak.py | 34 +-- met-web/src/apiManager/endpoints/index.ts | 4 +- .../admin/reviewListing/Submissions.tsx | 4 +- .../comments/admin/textListing/index.tsx | 4 +- .../listing/ActionsDropDown.tsx | 10 +- .../userManagement/listing/AddUserModal.tsx | 2 +- .../listing/AssignRoleModal.tsx | 50 ++-- .../listing/ReassignRoleModal.tsx | 30 +-- .../listing/UserManagementListing.tsx | 4 +- .../userDetails/AddToEngagement.tsx | 52 ++-- .../userDetails/UserDetails.tsx | 6 +- .../userDetails/UserStatusButton.tsx | 4 +- met-web/src/models/user.ts | 12 +- met-web/src/routes/AuthGate.tsx | 4 +- .../src/services/userService/api/index.tsx | 24 +- met-web/src/services/userService/types.ts | 2 +- .../engagement/EngagementFormUserTab.test.tsx | 4 +- 27 files changed, 437 insertions(+), 428 deletions(-) diff --git a/CHANGELOG.MD b/CHANGELOG.MD index 7aefb3db2..98813ea77 100644 --- a/CHANGELOG.MD +++ b/CHANGELOG.MD @@ -1,3 +1,7 @@ +## February 15, 2024 +- **Task**Restore role assignment functionality to MET with the CSS API [DESENG-473](https://apps.itsm.gov.bc.ca/jira/browse/DESENG-473) + - Utilize the CSS API for efficient management of composite roles. This involves the assignment, reassignment, or removal of users from the composite roles of TEAM_MEMBER, REVIEWER, IT_ADMIN, or IT_VIEWER. + ## February 09, 2024 - **Task**Consolidate and re-write old migration files [DESENG-452](https://apps.itsm.gov.bc.ca/jira/browse/DESENG-452) - Deleted old migration files diff --git a/met-api/sample.env b/met-api/sample.env index 2fbe53638..a93566cb2 100644 --- a/met-api/sample.env +++ b/met-api/sample.env @@ -35,6 +35,14 @@ MET_ADMIN_CLIENT_ID="" # resource MET_ADMIN_CLIENT_SECRET="" # credentials.secret KEYCLOAK_CONNECT_TIMEOUT="60" +KEYCLOAK_ADMIN_TOKEN_URL="" # URL to obtain the admin token from Keycloak +KEYCLOAK_ADMIN_CLIENT_ID="" # Admin Client ID for Keycloak authentication +KEYCLOAK_ADMIN_CLIENT_SECRET="" # Admin Client Secret for Keycloak authentication + +CSS_API_URL="" # CSS API URL +CSS_API_ENVIRONMENT="" # CSS API environment +CSS_API_INTEGRATION_ID= # CSS API integration number + # JWT OIDC configuration for authentication # Populate from 'GDX MET web (public)-installation-*.json' JWT_OIDC_AUDIENCE="" # resource diff --git a/met-api/src/met_api/config.py b/met-api/src/met_api/config.py index 630c86e27..c15bd8f3b 100644 --- a/met-api/src/met_api/config.py +++ b/met-api/src/met_api/config.py @@ -185,8 +185,12 @@ def SQLALCHEMY_DATABASE_URI(self) -> str: 'REALMNAME': os.getenv('KEYCLOAK_REALMNAME', 'standard'), 'SERVICE_ACCOUNT_ID': os.getenv('MET_ADMIN_CLIENT_ID'), 'SERVICE_ACCOUNT_SECRET': os.getenv('MET_ADMIN_CLIENT_SECRET'), - 'ADMIN_USERNAME': os.getenv('MET_ADMIN_CLIENT_ID'), - 'ADMIN_SECRET': os.getenv('MET_ADMIN_CLIENT_SECRET'), + 'ADMIN_BASE_URL': os.getenv('KEYCLOAK_ADMIN_TOKEN_URL', ''), + 'ADMIN_USERNAME': os.getenv('KEYCLOAK_ADMIN_CLIENT_ID'), + 'ADMIN_SECRET': os.getenv('KEYCLOAK_ADMIN_CLIENT_SECRET'), + 'CSS_API_URL': os.getenv('CSS_API_URL', ''), + 'CSS_API_ENVIRONMENT': os.getenv('CSS_API_ENVIRONMENT', ''), + 'CSS_API_INTEGRATION_ID': os.getenv('CSS_API_INTEGRATION_ID'), 'CONNECT_TIMEOUT': int(os.getenv('KEYCLOAK_CONNECT_TIMEOUT', '60')), } diff --git a/met-api/src/met_api/resources/engagement_members.py b/met-api/src/met_api/resources/engagement_members.py index 4939c76fc..c827568a8 100644 --- a/met-api/src/met_api/resources/engagement_members.py +++ b/met-api/src/met_api/resources/engagement_members.py @@ -48,18 +48,17 @@ def get(engagement_id): except BusinessException as err: return {'message': err.error}, err.status_code - # TODO: Create membership method that uses composite roles - # @staticmethod - # @cross_origin(origins=allowedorigins()) - # @_jwt.requires_auth - # def post(engagement_id): - # """Create a new membership.""" - # # TODO validate against a schema. - # try: - # member = MembershipService.create_membership(engagement_id, request.get_json()) - # return MembershipSchema().dump(member), HTTPStatus.OK - # except BusinessException as err: - # return {'message': err.error}, err.status_code + @staticmethod + @cross_origin(origins=allowedorigins()) + @_jwt.requires_auth + def post(engagement_id): + """Create a new membership.""" + # TODO validate against a schema. + try: + member = MembershipService.create_membership(engagement_id, request.get_json()) + return MembershipSchema().dump(member), HTTPStatus.OK + except BusinessException as err: + return {'message': err.error}, err.status_code @cors_preflight('GET,OPTIONS') diff --git a/met-api/src/met_api/resources/staff_user.py b/met-api/src/met_api/resources/staff_user.py index bacaeafbf..6a670f76b 100644 --- a/met-api/src/met_api/resources/staff_user.py +++ b/met-api/src/met_api/resources/staff_user.py @@ -121,6 +121,44 @@ def patch(user_id): return str(err), HTTPStatus.BAD_REQUEST +@cors_preflight('POST, PUT') +@API.route('//roles') +class UserRoles(Resource): + """Add user to composite roles.""" + + @staticmethod + @cross_origin(origins=allowedorigins()) + @require_role([Role.CREATE_ADMIN_USER.value], skip_tenant_check_for_admin=True) + def post(user_id): + """Add user to composite roles.""" + try: + args = request.args + user_schema = StaffUserService().assign_composite_role_to_user(user_id, args.get('role')) + return user_schema, HTTPStatus.OK + except KeyError as err: + return str(err), HTTPStatus.INTERNAL_SERVER_ERROR + except ValueError as err: + return str(err), HTTPStatus.INTERNAL_SERVER_ERROR + except BusinessException as err: + return {'message': err.error}, err.status_code + + @staticmethod + @cross_origin(origins=allowedorigins()) + @require_role([Role.UPDATE_USER_GROUP.value]) + def put(user_id): + """Update user composite roles.""" + try: + args = request.args + user_schema = StaffUserMembershipService().reassign_user(user_id, args.get('role')) + return user_schema, HTTPStatus.OK + except KeyError as err: + return str(err), HTTPStatus.INTERNAL_SERVER_ERROR + except ValueError as err: + return str(err), HTTPStatus.INTERNAL_SERVER_ERROR + except BusinessException as err: + return {'message': err.error}, err.status_code + + @cors_preflight('GET,OPTIONS') @API.route('//engagements') class EngagementMemberships(Resource): diff --git a/met-api/src/met_api/services/keycloak.py b/met-api/src/met_api/services/keycloak.py index 7ab9e0305..6f6a474bb 100644 --- a/met-api/src/met_api/services/keycloak.py +++ b/met-api/src/met_api/services/keycloak.py @@ -14,65 +14,67 @@ """Utils for keycloak administration.""" import json +from typing import List import requests -from flask import current_app -from met_api.utils.enums import ContentType +from met_api.config import Config +from met_api.utils.enums import ContentType, KeycloakCompositeRoleNames class KeycloakService: # pylint: disable=too-few-public-methods """Keycloak services.""" - # @staticmethod - # def get_user_groups(user_id): - # """Get user group from Keycloak by userid.""" - # keycloak = current_app.config['KEYCLOAK_CONFIG'] - # timeout = keycloak['CONNECT_TIMEOUT'] - # base_url = keycloak['BASE_URL'] - # realm = keycloak['REALMNAME'] - # admin_token = KeycloakService._get_admin_token() - # headers = { - # 'Content-Type': ContentType.JSON.value, - # 'Authorization': f'Bearer {admin_token}' - # } - - # # Get the user and return - # query_user_url = f'{base_url}/admin/realms/{realm}/users/{user_id}/groups' - # response = requests.get(query_user_url, headers=headers, timeout=timeout) - # response.raise_for_status() - # return response.json() - - # @staticmethod - # def get_users_groups(user_ids: List): - # """Get user groups from Keycloak by user ids.For bulk purposes.""" - # # TODO if List is bigger than a number ; if so reject. - # keycloak = current_app.config['KEYCLOAK_CONFIG'] - # base_url = keycloak['BASE_URL'] - # # TODO fix this during tests and remove below - # if not base_url: - # return {} - # keycloak = current_app.config['KEYCLOAK_CONFIG'] - # realm = keycloak['REALMNAME'] - # timeout = keycloak['CONNECT_TIMEOUT'] - # admin_token = KeycloakService._get_admin_token() - # headers = { - # 'Content-Type': ContentType.JSON.value, - # 'Authorization': f'Bearer {admin_token}' - # } - # user_group_mapping = {} - # # Get the user and return - # for user_id in user_ids: - # query_user_url = f'{base_url}/admin/realms/{realm}/users/{user_id}/groups' - # response = requests.get(query_user_url, headers=headers, timeout=timeout) + # pylint: disable=too-many-instance-attributes + # Eight is reasonable in this case. + def __init__(self): + """Initialize Keycloak configuration.""" + keycloak = Config().KEYCLOAK_CONFIG + self.base_url = keycloak['CSS_API_URL'] + self.realm = keycloak['REALMNAME'] + self.integration_id = keycloak['CSS_API_INTEGRATION_ID'] + self.environment = keycloak['CSS_API_ENVIRONMENT'] + self.timeout = keycloak['CONNECT_TIMEOUT'] + self.admin_base_url = keycloak['ADMIN_BASE_URL'] + self.admin_client_id = keycloak['ADMIN_USERNAME'] + self.admin_secret = keycloak['ADMIN_SECRET'] + + def get_user_roles(self, user_id): + """Get user composite roles from Keycloak by userid.""" + admin_token = self._get_admin_token() + headers = { + 'Content-Type': ContentType.JSON.value, + 'Authorization': f'Bearer {admin_token}' + } - # if response.status_code == 200: - # if (groups := response.json()) is not None: - # user_group_mapping[user_id] = [group.get('name') for group in groups] - # else: - # user_group_mapping[user_id] = [] + # Get the user and return + query_user_url = (f'{self.base_url}/{self.integration_id}/' + f'{self.environment}/users/{user_id}/roles') + response = requests.get(query_user_url, headers=headers, timeout=self.timeout) + response.raise_for_status() + return response.json() - # return user_group_mapping + def get_users_roles(self, user_ids: List): + """Get user composite roles from Keycloak by user ids.""" + # TODO if List is bigger than a number ; if so reject. + admin_token = self._get_admin_token() + headers = { + 'Content-Type': ContentType.JSON.value, + 'Authorization': f'Bearer {admin_token}' + } + user_role_mapping = {} + # Get the user and return + for user_id in user_ids: + query_user_url = (f'{self.base_url}/{self.integration_id}/' + f'{self.environment}/users/{user_id}/roles') + response = requests.get(query_user_url, headers=headers, timeout=self.timeout) + if response.status_code == 200: + if (roles := response.json().get('data')) is not None: + user_role_mapping[user_id] = [role.get('name') for role in roles] + else: + user_role_mapping[user_id] = [] + + return user_role_mapping # @staticmethod # def _get_group_id(admin_token: str, group_name: str): @@ -99,25 +101,18 @@ class KeycloakService: # pylint: disable=too-few-public-methods # return group_id # return None - @staticmethod - def _get_admin_token(): + def _get_admin_token(self): """Create an admin token.""" - keycloak = current_app.config['KEYCLOAK_CONFIG'] - admin_client_id = keycloak['ADMIN_USERNAME'] - admin_secret = keycloak['ADMIN_SECRET'] - timeout = keycloak['CONNECT_TIMEOUT'] - headers = { 'Content-Type': 'application/x-www-form-urlencoded' } - token_issuer = current_app.config['JWT_CONFIG']['ISSUER'] - token_url = f'{token_issuer}/protocol/openid-connect/token' + token_url = f'{self.admin_base_url}/realms/{self.realm}/protocol/openid-connect/token' response = requests.post( token_url, headers=headers, - timeout=timeout, - data=f'client_id={admin_client_id}&grant_type=client_credentials' - f'&client_secret={admin_secret}' + timeout=self.timeout, + data=f'client_id={self.admin_client_id}&grant_type=client_credentials' + f'&client_secret={self.admin_secret}' ) return response.json().get('access_token') @@ -129,7 +124,7 @@ def _get_admin_token(): # realm = keycloak['REALMNAME'] # timeout = keycloak['CONNECT_TIMEOUT'] # # Create an admin token - # admin_token = KeycloakService._get_admin_token() + # admin_token = self._get_admin_token() # # Get the '$group_name' group # group_id = KeycloakService._get_group_id(admin_token, group_name) @@ -143,128 +138,86 @@ def _get_admin_token(): # timeout=timeout) # response.raise_for_status() - # @staticmethod - # def add_user_to_group(user_id: str, group_name: str): - # """Add user to the keycloak group.""" - # keycloak = current_app.config['KEYCLOAK_CONFIG'] - # base_url = keycloak['BASE_URL'] - # realm = keycloak['REALMNAME'] - # timeout = keycloak['CONNECT_TIMEOUT'] - # # Create an admin token - # admin_token = KeycloakService._get_admin_token() - # # Get the '$group_name' group - # group_id = KeycloakService._get_group_id(admin_token, group_name) - - # # Add user to the keycloak group '$group_name' - # headers = { - # 'Content-Type': ContentType.JSON.value, - # 'Authorization': f'Bearer {admin_token}' - # } - # add_to_group_url = f'{base_url}/admin/realms/{realm}/users/{user_id}/groups/{group_id}' - # response = requests.put(add_to_group_url, headers=headers, - # timeout=timeout) - # response.raise_for_status() - - @staticmethod - def add_attribute_to_user(user_id: str, attribute_value: str, attribute_id: str = 'tenant_id'): - """Add attribute to a keyclaok user.Default is set as tenant Id.""" - config = current_app.config['KEYCLOAK_CONFIG'] - base_url = config.get('BASE_URL') - realm = config.get('REALMNAME') - admin_token = KeycloakService._get_admin_token() - - tenant_attributes = { - attribute_id: attribute_value + def assign_composite_role_to_user(self, user_id: str, composite_role: str): + """Add user to the keycloak composite roles.""" + admin_token = self._get_admin_token() + # Add user to the keycloak composite roles '$composite_role' + headers = { + 'Content-Type': ContentType.JSON.value, + 'Authorization': f'Bearer {admin_token}' } + add_to_role_url = f'{self.base_url}/{self.integration_id}/{self.environment}/users/{user_id}/roles' - user_url = f'{base_url}/admin/realms/{realm}/users/{user_id}' - headers = {'Authorization': f'Bearer {admin_token}'} - response = requests.get(user_url, headers=headers) - user_data = response.json() - user_data.setdefault('attributes', {}).update(tenant_attributes) - requests.put(user_url, json=user_data, headers=headers) + # Creating data payload + data = [{'name': composite_role}] + response = requests.post(add_to_role_url, headers=headers, json=data, + timeout=self.timeout) response.raise_for_status() - # @staticmethod - # def remove_user_from_group(user_id: str, group_name: str): - # """Remove user from the keycloak group.""" - # keycloak = current_app.config['KEYCLOAK_CONFIG'] - # base_url = keycloak['BASE_URL'] - # realm = keycloak['REALMNAME'] - # timeout = keycloak['CONNECT_TIMEOUT'] - # # Create an admin token - # admin_token = KeycloakService._get_admin_token() - # # Get the '$group_name' group - # group_id = KeycloakService._get_group_id(admin_token, group_name) + def remove_composite_role_from_user(self, user_id: str, role: str): + """Remove user from the keycloak composite role.""" + # Create an admin token + admin_token = self._get_admin_token() - # # Remove user from the keycloak group '$group_name' - # headers = { - # 'Content-Type': ContentType.JSON.value, - # 'Authorization': f'Bearer {admin_token}' - # } - # remove_from_group_url = f'{base_url}/admin/realms/{realm}/users/{user_id}/groups/{group_id}' - # response = requests.delete(remove_from_group_url, headers=headers, timeout=timeout) - # response.raise_for_status() + # Remove user from the keycloak composite role '$role' + headers = { + 'Content-Type': ContentType.JSON.value, + 'Authorization': f'Bearer {admin_token}' + } + remove_from_role_url = f'{self.base_url}/{self.integration_id}/{self.environment}/users/{user_id}/roles/{role}' + response = requests.delete(remove_from_role_url, headers=headers, timeout=self.timeout) + response.raise_for_status() - @staticmethod - def add_user(user: dict): + def add_user(self, user: dict): """Add user to Keycloak.Mainly used for Tests;Dont use it for actual user creation in application.""" # Add user and set password - admin_token = KeycloakService._get_admin_token() - keycloak = current_app.config['KEYCLOAK_CONFIG'] - base_url = keycloak['BASE_URL'] - realm = keycloak['REALMNAME'] - timeout = keycloak['CONNECT_TIMEOUT'] + admin_token = self._get_admin_token() headers = { 'Content-Type': ContentType.JSON.value, 'Authorization': f'Bearer {admin_token}' } - add_user_url = f'{base_url}/admin/realms/{realm}/users' + add_user_url = f'{self.base_url}/admin/realms/{self.realm}/users' response = requests.post(add_user_url, data=json.dumps(user), headers=headers, - timeout=timeout) + timeout=self.timeout) response.raise_for_status() - return KeycloakService.get_user_by_username(user.get('username'), admin_token) + return self.get_user_by_username(user.get('username'), admin_token) - @staticmethod - def get_user_by_username(username, admin_token=None): + def get_user_by_username(self, username, admin_token=None): """Get user from Keycloak by username.""" - keycloak = current_app.config['KEYCLOAK_CONFIG'] - base_url = keycloak['BASE_URL'] - realm = keycloak['REALMNAME'] - timeout = keycloak['CONNECT_TIMEOUT'] if not admin_token: - admin_token = KeycloakService._get_admin_token() + admin_token = self._get_admin_token() headers = { 'Content-Type': ContentType.JSON.value, 'Authorization': f'Bearer {admin_token}' } # Get the user and return - query_user_url = f'{base_url}/admin/realms/{realm}/users?username={username}' - response = requests.get(query_user_url, headers=headers, timeout=timeout) + query_user_url = f'{self.base_url}/admin/realms/{self.realm}/users?username={username}' + response = requests.get(query_user_url, headers=headers, timeout=self.timeout) return response.json()[0] - @staticmethod - def toggle_user_enabled_status(user_id, enabled): + def toggle_user_enabled_status(self, user_id, enabled): """Toggle the enabled status of a user in Keycloak.""" - keycloak = current_app.config['KEYCLOAK_CONFIG'] - base_url = keycloak['BASE_URL'] - realm = keycloak['REALMNAME'] - timeout = keycloak['CONNECT_TIMEOUT'] - admin_token = KeycloakService._get_admin_token() + admin_token = self._get_admin_token() headers = { 'Content-Type': ContentType.JSON.value, 'Authorization': f'Bearer {admin_token}' } - user_data = { - 'enabled': enabled # Set the user's enabled status based on 'enable' parameter - } + query_user_url = f'{self.base_url}/{self.integration_id}/{self.environment}/users/{user_id}/roles' + response = requests.get(query_user_url, headers=headers, timeout=self.timeout) + + if response.status_code == 200 and enabled: + role_name = KeycloakCompositeRoleNames.IT_VIEWER.value + self.assign_composite_role_to_user(user_id=user_id, composite_role=role_name) + + if response.status_code == 200 and not enabled: + roles_data = response.json().get('data', []) + for role in roles_data: + role_name = role.get('name') + self.remove_composite_role_from_user(user_id=user_id, role=role_name) - # Update the user's enabled status - update_user_url = f'{base_url}/admin/realms/{realm}/users/{user_id}' - response = requests.put(update_user_url, json=user_data, headers=headers, timeout=timeout) response.raise_for_status() diff --git a/met-api/src/met_api/services/membership_service.py b/met-api/src/met_api/services/membership_service.py index 5eb6401b6..9615f9b57 100644 --- a/met-api/src/met_api/services/membership_service.py +++ b/met-api/src/met_api/services/membership_service.py @@ -4,10 +4,14 @@ from met_api.constants.membership_type import MembershipType from met_api.exceptions.business_exception import BusinessException +from met_api.models import StaffUser as StaffUserModel from met_api.models.engagement import Engagement as EngagementModel from met_api.models.membership import Membership as MembershipModel +from met_api.schemas.staff_user import StaffUserSchema from met_api.services import authorization -from met_api.utils.enums import MembershipStatus +from met_api.services.staff_user_service import KEYCLOAK_SERVICE, StaffUserService +from met_api.utils.constants import CompositeRoles +from met_api.utils.enums import KeycloakCompositeRoleNames, MembershipStatus from met_api.utils.roles import Role from met_api.utils.token_info import TokenInfo @@ -15,30 +19,30 @@ class MembershipService: """Membership management service.""" - # TODO: Create membership method that uses composite roles - # @staticmethod - # def create_membership(engagement_id, request_json: dict): - # """Create membership.""" - # user_id = request_json.get('user_id') - # user: StaffUserModel = StaffUserModel.get_user_by_external_id(user_id) - # if not user: - # raise BusinessException( - # error='Invalid User.', - # status_code=HTTPStatus.BAD_REQUEST) - - # one_of_roles = ( - # MembershipType.TEAM_MEMBER.name, - # Role.EDIT_MEMBERS.value - # ) - # authorization.check_auth(one_of_roles=one_of_roles, engagement_id=engagement_id) - - # user_details = StaffUserSchema().dump(user) - - # MembershipService._validate_create_membership(engagement_id, user_details) - # group_name, membership_type = MembershipService._get_membership_details(user_details) - # MembershipService._add_user_group(user_details, group_name) - # membership = MembershipService._create_membership_model(engagement_id, user.id, membership_type) - # return membership + @staticmethod + def create_membership(engagement_id, request_json: dict): + """Create membership.""" + user_id = request_json.get('user_id') + user: StaffUserModel = StaffUserModel.get_user_by_external_id(user_id) + if not user: + raise BusinessException( + error='Invalid User.', + status_code=HTTPStatus.BAD_REQUEST) + + one_of_roles = ( + MembershipType.TEAM_MEMBER.name, + Role.EDIT_MEMBERS.value + ) + authorization.check_auth(one_of_roles=one_of_roles, engagement_id=engagement_id) + + user_details = StaffUserSchema().dump(user) + # attach and map roles + StaffUserService.attach_roles([user_details]) + MembershipService._validate_create_membership(engagement_id, user_details) + composite_roles, membership_type = MembershipService._get_membership_details(user_details) + MembershipService._assign_composite_role_to_user(user_details, composite_roles) + membership = MembershipService._create_membership_model(engagement_id, user.id, membership_type) + return membership @staticmethod def _validate_create_membership(engagement_id, user_details): @@ -51,12 +55,11 @@ def _validate_create_membership(engagement_id, user_details): user_id = user_details.get('id') - # TODO: Check for permission level once composite role permission levels are added. - # roles = user_details.get('roles') - # if KeycloakPermissionLevels.IT_ADMIN.value in roles: - # raise BusinessException( - # error='This user is already a Administrator.', - # status_code=HTTPStatus.CONFLICT.value) + roles = user_details.get('composite_roles') + if KeycloakCompositeRoleNames.IT_ADMIN.value in roles: + raise BusinessException( + error='This user is already a Administrator.', + status_code=HTTPStatus.CONFLICT.value) existing_membership = MembershipModel.find_by_engagement_and_user_id( engagement_id, @@ -66,7 +69,7 @@ def _validate_create_membership(engagement_id, user_details): if existing_membership: raise BusinessException( - error=f'This {user_details.get("main_group", "user")} is already assigned to this engagement.', + error=f'This {user_details.get("main_role", "user")} is already assigned to this engagement.', status_code=HTTPStatus.CONFLICT.value) request_user = TokenInfo.get_user_data() @@ -75,45 +78,43 @@ def _validate_create_membership(engagement_id, user_details): error='You cannot add yourself to an engagement.', status_code=HTTPStatus.FORBIDDEN.value) - # TODO: Replace this method with one that checks membership type with composite roles - # @staticmethod - # def _get_membership_details(user_details): - # """Get the group name and membership type for the user based on their assigned groups.""" - # default_group_name = Groups.TEAM_MEMBER.name - # default_membership_type = MembershipType.TEAM_MEMBER - - # is_reviewer = Groups.REVIEWER.value in user_details.get('groups') - # is_team_member = Groups.TEAM_MEMBER.value in user_details.get('groups') - - # if is_reviewer: - # # If the user is assigned to the REVIEWER group, set the group name and membership type accordingly - # group_name = Groups.REVIEWER.name - # membership_type = MembershipType.REVIEWER - # elif is_team_member: - # # If the user is assigned to the TEAM_MEMBER group, set the group name and membership type accordingly - # group_name = Groups.TEAM_MEMBER.name - # membership_type = MembershipType.TEAM_MEMBER - # else: - # # If the user is not assigned to either group, return default values for group name and membership type - # group_name = default_group_name - # membership_type = default_membership_type - - # return group_name, membership_type - - # TODO: Replace this method with a method to add composite roles - # @staticmethod - # def _add_user_group(user: StaffUserModel, group_name=Groups.TEAM_MEMBER.name): - # valid_member_teams = [Groups.TEAM_MEMBER.name, Groups.REVIEWER.name] - # if group_name not in valid_member_teams: - # raise BusinessException( - # error='Invalid Group name.', - # status_code=HTTPStatus.BAD_REQUEST - # ) - - # KEYCLOAK_SERVICE.add_user_to_group( - # user_id=user.get('external_id'), - # group_name=group_name - # ) + @staticmethod + def _get_membership_details(user_details): + """Get the composite role and membership type for the user based on their assigned composite roles.""" + default_role = CompositeRoles.TEAM_MEMBER.name + default_membership_type = MembershipType.TEAM_MEMBER + + is_reviewer = CompositeRoles.REVIEWER.value in user_details.get('composite_roles') + is_team_member = CompositeRoles.TEAM_MEMBER.value in user_details.get('composite_roles') + + if is_reviewer: + # If the user is assigned to the REVIEWER role, set the role name and membership type accordingly + composite_roles = CompositeRoles.REVIEWER.name + membership_type = MembershipType.REVIEWER + elif is_team_member: + # If the user is assigned to the TEAM_MEMBER role, set the role name and membership type accordingly + composite_roles = CompositeRoles.TEAM_MEMBER.name + membership_type = MembershipType.TEAM_MEMBER + else: + # If the user is not assigned to either role, return default values for role name and membership type + composite_roles = default_role + membership_type = default_membership_type + + return composite_roles, membership_type + + @staticmethod + def _assign_composite_role_to_user(user: StaffUserModel, composite_role=CompositeRoles.TEAM_MEMBER.name): + valid_member_teams = [CompositeRoles.TEAM_MEMBER.name, CompositeRoles.REVIEWER.name] + if composite_role not in valid_member_teams: + raise BusinessException( + error='Invalid composite role name.', + status_code=HTTPStatus.BAD_REQUEST + ) + + KEYCLOAK_SERVICE.assign_composite_role_to_user( + user_id=user.get('external_id'), + composite_role=composite_role + ) @staticmethod def _create_membership_model(engagement_id, user_id, membership_type=MembershipType.TEAM_MEMBER): diff --git a/met-api/src/met_api/services/staff_user_membership_service.py b/met-api/src/met_api/services/staff_user_membership_service.py index e698e58a7..f28cd946b 100644 --- a/met-api/src/met_api/services/staff_user_membership_service.py +++ b/met-api/src/met_api/services/staff_user_membership_service.py @@ -6,17 +6,17 @@ from met_api.schemas.staff_user import StaffUserSchema from met_api.services.membership_service import MembershipService from met_api.services.staff_user_service import KEYCLOAK_SERVICE, StaffUserService -from met_api.utils.user_context import UserContext, user_context +from met_api.utils.constants import CompositeRoles from met_api.utils.enums import UserStatus +from met_api.utils.user_context import UserContext, user_context class StaffUserMembershipService: """Staff User Membership management service.""" - # TODO: Restore a way to add users to composite roles. @classmethod @user_context - def reassign_user(cls, user_id, **kwargs): + def reassign_user(cls, user_id, role, **kwargs): """Add user to a new composite role and reassign memberships.""" user = StaffUserService.get_user_by_id(user_id, include_roles=True) if not user: @@ -25,20 +25,31 @@ def reassign_user(cls, user_id, **kwargs): status_code=HTTPStatus.BAD_REQUEST) external_id = user.get('external_id', None) + main_role = user.get('main_role', None) - # TODO: Put check for composite role membership into this conditional. - if not external_id: + if any([not external_id, not main_role]): raise BusinessException( error='Invalid User.', status_code=HTTPStatus.BAD_REQUEST) - user_from_context: UserContext = kwargs['user_context'] + if role not in CompositeRoles.__members__: + raise BusinessException( + error='Invalid Role.', + status_code=HTTPStatus.BAD_REQUEST) + if main_role == role: + raise BusinessException( + error='User is already assigned this role.', + status_code=HTTPStatus.BAD_REQUEST) + + user_from_context: UserContext = kwargs['user_context'] if external_id == user_from_context.sub: raise BusinessException( error='User cannot change their own permission level.', status_code=HTTPStatus.CONFLICT.value) + StaffUserService.remove_composite_role_from_user(external_id, CompositeRoles.get_name_by_value(main_role)) + StaffUserService.assign_composite_role_to_user(external_id, role) MembershipService.revoke_memberships_bulk(user_id) new_user = StaffUserService.get_user_by_id(user_id, include_roles=True) return StaffUserSchema().dump(new_user) diff --git a/met-api/src/met_api/services/staff_user_service.py b/met-api/src/met_api/services/staff_user_service.py index 4092a6f9b..9123b11d6 100644 --- a/met-api/src/met_api/services/staff_user_service.py +++ b/met-api/src/met_api/services/staff_user_service.py @@ -1,7 +1,7 @@ """Service for user management.""" from http import HTTPStatus -from flask import current_app, g +from flask import current_app from met_api.exceptions.business_exception import BusinessException from met_api.models.pagination_options import PaginationOptions @@ -9,6 +9,8 @@ from met_api.schemas.staff_user import StaffUserSchema from met_api.services.keycloak import KeycloakService from met_api.utils import notification +from met_api.utils.constants import COMPOSITE_ROLE_MAPPING, CompositeRoles +from met_api.utils.enums import KeycloakCompositeRoleNames from met_api.utils.template import Template KEYCLOAK_SERVICE = KeycloakService() @@ -24,9 +26,7 @@ def get_user_by_id(cls, _user_id, include_roles=False, include_inactive=False): db_user = StaffUserModel.get_by_id(_user_id, include_inactive) user = user_schema.dump(db_user) if include_roles: - # TODO: Replace this method with one that uses composite roles - # cls.attach_roles([user]) - pass + cls.attach_roles([user]) return user @classmethod @@ -100,29 +100,24 @@ def _render_email_template(user: StaffUserModel): ) return subject, body, args - # TODO: Replace this method with one that uses composite roles, if necessary - # @staticmethod - # def attach_roles(user_collection): - # """Attach keycloak groups to user object.""" - # group_user_details = KEYCLOAK_SERVICE.get_users_groups( - # [user.get('external_id') for user in user_collection]) - - # for user in user_collection: - # # Transform group name from ADMINISTRATOR to Administrator - # # TODO etc;Arrive at a better implementation than keeping a static list - # # TODO Probably add a custom attribute in the keycloak as title against a group? - # groups = group_user_details.get(user.get('external_id')) - # user['groups'] = '' - # if groups: - # user['groups'] = [GROUP_NAME_MAPPING.get(group, '') for group in groups] - # if Groups.IT_ADMIN.value in user['groups']: - # user['main_group'] = Groups.IT_ADMIN.value - # elif Groups.TEAM_MEMBER.value in user['groups']: - # user['main_group'] = Groups.TEAM_MEMBER.value - # elif Groups.REVIEWER.value in user['groups']: - # user['main_group'] = Groups.REVIEWER.value - # else: - # user['main_group'] = user['groups'][0] + @staticmethod + def attach_roles(user_collection): + """Attach keycloak composite roles to user object.""" + user_roles = KEYCLOAK_SERVICE.get_users_roles( + [user.get('external_id') for user in user_collection]) + for user in user_collection: + composite_roles = user_roles.get(user.get('external_id')) + user['composite_roles'] = '' + if composite_roles: + user['composite_roles'] = [COMPOSITE_ROLE_MAPPING.get(role, '') for role in composite_roles] + if CompositeRoles.IT_ADMIN.value in user['composite_roles']: + user['main_role'] = CompositeRoles.IT_ADMIN.value + elif CompositeRoles.TEAM_MEMBER.value in user['composite_roles']: + user['main_role'] = CompositeRoles.TEAM_MEMBER.value + elif CompositeRoles.REVIEWER.value in user['composite_roles']: + user['main_role'] = CompositeRoles.REVIEWER.value + else: + user['main_role'] = user['composite_roles'][0] @classmethod def find_users( @@ -137,9 +132,7 @@ def find_users( user_collection = StaffUserSchema(many=True).dump(users) if include_roles: - # TODO: Replace this method with one that uses composite roles - # cls.attach_roles(user_collection) - pass + cls.attach_roles(user_collection) return { 'items': user_collection, @@ -160,30 +153,25 @@ def validate_fields(data: StaffUserSchema): raise ValueError('Some required fields are empty') @classmethod - def add_user_to_group(cls, external_id: str, group_name: str): + def assign_composite_role_to_user(cls, external_id: str, composite_role: str): """Create or update a user.""" db_user = StaffUserModel.get_user_by_external_id(external_id) cls.validate_user(db_user) - # TODO: Replace this method with one that uses composite roles - print(group_name) - # KEYCLOAK_SERVICE.add_user_to_group(user_id=external_id, group_name=group_name) - KEYCLOAK_SERVICE.add_attribute_to_user(user_id=external_id, attribute_value=g.tenant_id) + KEYCLOAK_SERVICE.assign_composite_role_to_user(user_id=external_id, composite_role=composite_role) return StaffUserSchema().dump(db_user) @classmethod - def remove_user_from_group(cls, external_id: str, group_name: str): + def remove_composite_role_from_user(cls, external_id: str, role: str): """Create or update a user.""" db_user = StaffUserModel.get_user_by_external_id(external_id) if db_user is None: raise KeyError('User not found') - # TODO: Replace this method with one that uses composite roles - print(group_name) - # KEYCLOAK_SERVICE.remove_user_from_group(user_id=external_id, group_name=group_name) + KEYCLOAK_SERVICE.remove_composite_role_from_user(user_id=external_id, role=role) return StaffUserSchema().dump(db_user) @@ -193,10 +181,11 @@ def validate_user(db_user: StaffUserModel): if db_user is None: raise KeyError('User not found') - # TODO: Restore permission level functionality to replace "groups" later - # groups = KEYCLOAK_SERVICE.get_user_groups(user_id=db_user.external_id) - # group_names = [group.get('name') for group in groups] - # if KeycloakGroupName.IT_ADMIN.value in group_names: - # raise BusinessException( - # error='This user is already an Administrator.', - # status_code=HTTPStatus.CONFLICT.value) + composite_roles = KEYCLOAK_SERVICE.get_user_roles(user_id=db_user.external_id) + + if 'data' in composite_roles and len(composite_roles['data']) > 0: + role_names = [role.get('name') for role in composite_roles] + if KeycloakCompositeRoleNames.IT_ADMIN.value in role_names: + raise BusinessException( + error='This user is already an Administrator.', + status_code=HTTPStatus.CONFLICT.value) diff --git a/met-api/src/met_api/utils/constants.py b/met-api/src/met_api/utils/constants.py index 5a7202c87..52a34b90e 100644 --- a/met-api/src/met_api/utils/constants.py +++ b/met-api/src/met_api/utils/constants.py @@ -13,28 +13,30 @@ # limitations under the License. """Constants definitions.""" -# from enum import Enum +from enum import Enum -# TODO Remove this -# class Groups(Enum): -# """Enumeration representing user groups.""" -# IT_ADMIN = 'Administrator' -# TEAM_MEMBER = 'Team Member' -# REVIEWER = 'Reviewer' -# IT_VIEWER = 'Viewer' +class CompositeRoles(Enum): + """Enumeration representing user roles.""" -# @staticmethod -# def get_name_by_value(value): -# """Get the name of a group by its value.""" -# for group in Groups: -# if group.value == value: -# return group.name -# raise ValueError('No matching key found for the given value.') + IT_ADMIN = 'Administrator' + TEAM_MEMBER = 'Team Member' + REVIEWER = 'Reviewer' + IT_VIEWER = 'Viewer' + + @staticmethod + def get_name_by_value(value): + """Get the name of a role by its value.""" + for role in CompositeRoles: + if role.value == value: + return role.name + raise ValueError('No matching key found for the given value.') TENANT_ID_HEADER = 'tenant-id' +COMPOSITE_ROLE_MAPPING = {role.name: role.value for role in CompositeRoles} + TENANT_ID_JWT_CLAIM = 'tenant_id' diff --git a/met-api/tests/unit/services/test_keycloak.py b/met-api/tests/unit/services/test_keycloak.py index 87cdfb842..d9175b6f3 100644 --- a/met-api/tests/unit/services/test_keycloak.py +++ b/met-api/tests/unit/services/test_keycloak.py @@ -16,25 +16,25 @@ Test-Suite to ensure that the Keycloak Service is working as expected. """ -from met_api.services.keycloak import KeycloakService -from tests.utilities.factory_scenarios import KeycloakScenario - -KEYCLOAK_SERVICE = KeycloakService() - - -def test_keycloak_add_user(session): - """Add user to Keycloak. Assert return a user with the same username as the username in request.""" - request = KeycloakScenario.create_user_request() - user = KEYCLOAK_SERVICE.add_user(request) - assert user.get('username') == request.get('username') +# from met_api.services.keycloak import KeycloakService +# from tests.utilities.factory_scenarios import KeycloakScenario +# +# KEYCLOAK_SERVICE = KeycloakService() +# TODO: Replace this test with one that gets user composite roles +# def test_keycloak_add_user(session): +# """Add user to Keycloak. Assert return a user with the same username as the username in request.""" +# request = KeycloakScenario.create_user_request() +# user = KEYCLOAK_SERVICE.add_user(request) +# assert user.get('username') == request.get('username') -def test_keycloak_get_user_by_username(session): - """Get user by username. Assert get a user with the same username as the username in request.""" - request = KeycloakScenario.create_user_request() - KEYCLOAK_SERVICE.add_user(request) - user = KEYCLOAK_SERVICE.get_user_by_username(request.get('username')) - assert user.get('username') == request.get('username') +# TODO: Replace this test with one that gets user composite roles +# def test_keycloak_get_user_by_username(session): +# """Get user by username. Assert get a user with the same username as the username in request.""" +# request = KeycloakScenario.create_user_request() +# KEYCLOAK_SERVICE.add_user(request) +# user = KEYCLOAK_SERVICE.get_user_by_username(request.get('username')) +# assert user.get('username') == request.get('username') # TODO: Replace this test with one that gets user composite roles # def test_keycloak_get_user_groups(session): diff --git a/met-web/src/apiManager/endpoints/index.ts b/met-web/src/apiManager/endpoints/index.ts index 95666efa6..80e30cfe6 100644 --- a/met-web/src/apiManager/endpoints/index.ts +++ b/met-web/src/apiManager/endpoints/index.ts @@ -36,8 +36,8 @@ const Endpoints = { GET: `${AppConfig.apiUrl}/user/user_id`, CREATE_UPDATE: `${AppConfig.apiUrl}/user/`, GET_LIST: `${AppConfig.apiUrl}/user/`, - ADD_TO_GROUP: `${AppConfig.apiUrl}/user/user_id/groups`, - CHANGE_GROUP: `${AppConfig.apiUrl}/user/user_id/groups`, + ADD_TO_COMPOSITE_ROLE: `${AppConfig.apiUrl}/user/user_id/roles`, + CHANGE_COMPOSITE_ROLE: `${AppConfig.apiUrl}/user/user_id/roles`, GET_USER_ENGAGEMENTS: `${AppConfig.apiUrl}/user/user_id/engagements`, TOGGLE_USER_STATUS: `${AppConfig.apiUrl}/user/user_id/status`, }, diff --git a/met-web/src/components/comments/admin/reviewListing/Submissions.tsx b/met-web/src/components/comments/admin/reviewListing/Submissions.tsx index 82ecfd386..913a19b4c 100644 --- a/met-web/src/components/comments/admin/reviewListing/Submissions.tsx +++ b/met-web/src/components/comments/admin/reviewListing/Submissions.tsx @@ -16,7 +16,7 @@ import { CommentListingContext } from './CommentListingContext'; import ExpandMoreIcon from '@mui/icons-material/ExpandMore'; import { useAppSelector } from 'hooks'; import { USER_ROLES } from 'services/userService/constants'; -import { USER_GROUP } from 'models/user'; +import { USER_COMPOSITE_ROLE } from 'models/user'; const Submissions = () => { const { @@ -54,7 +54,7 @@ const Submissions = () => { if ( roles.includes(USER_ROLES.REVIEW_COMMENTS) || (assignedEngagements.includes(Number(survey.engagement_id)) && - userDetail.groups?.includes('/' + USER_GROUP.TEAM_MEMBER.value)) + userDetail.composite_roles?.includes('/' + USER_COMPOSITE_ROLE.TEAM_MEMBER.value)) ) { return ( diff --git a/met-web/src/components/comments/admin/textListing/index.tsx b/met-web/src/components/comments/admin/textListing/index.tsx index 5f071ffd5..dfc69060d 100644 --- a/met-web/src/components/comments/admin/textListing/index.tsx +++ b/met-web/src/components/comments/admin/textListing/index.tsx @@ -21,7 +21,7 @@ import { getSubmissionPage } from 'services/submissionService'; import { SurveySubmission } from 'models/surveySubmission'; import { formatDate, formatToUTC } from 'components/common/dateHelper'; import { USER_ROLES } from 'services/userService/constants'; -import { USER_GROUP } from 'models/user'; +import { USER_COMPOSITE_ROLE } from 'models/user'; import { updateURLWithPagination } from 'components/common/Table/utils'; import CommentIcon from '@mui/icons-material/Comment'; import CommentsDisabledIcon from '@mui/icons-material/CommentsDisabled'; @@ -184,7 +184,7 @@ const CommentTextListing = () => { if ( roles.includes(USER_ROLES.REVIEW_COMMENTS) || (assignedEngagements.includes(Number(row.engagement_id)) && - userDetail.groups?.includes('/' + USER_GROUP.TEAM_MEMBER.value)) + userDetail.composite_roles?.includes('/' + USER_COMPOSITE_ROLE.TEAM_MEMBER.value)) ) { return ( diff --git a/met-web/src/components/userManagement/listing/ActionsDropDown.tsx b/met-web/src/components/userManagement/listing/ActionsDropDown.tsx index 252dde533..d1a525647 100644 --- a/met-web/src/components/userManagement/listing/ActionsDropDown.tsx +++ b/met-web/src/components/userManagement/listing/ActionsDropDown.tsx @@ -1,6 +1,6 @@ import React, { useMemo, useContext } from 'react'; import { MenuItem, Select } from '@mui/material'; -import { User, USER_GROUP, USER_STATUS } from 'models/user'; +import { User, USER_COMPOSITE_ROLE, USER_STATUS } from 'models/user'; import { Palette } from 'styles/Theme'; import { UserManagementContext } from './UserManagementContext'; import { useAppSelector } from 'hooks'; @@ -18,21 +18,21 @@ export const ActionsDropDown = ({ selectedUser }: { selectedUser: User }) => { const { roles, userDetail } = useAppSelector((state) => state.user); const hasNoRole = (): boolean => { - if (selectedUser.main_group) { + if (selectedUser.main_role) { return false; } return true; }; const isAdmin = (): boolean => { - if (selectedUser?.main_group == USER_GROUP.ADMIN.label) { + if (selectedUser?.main_role == USER_COMPOSITE_ROLE.ADMIN.label) { return true; } return false; }; const isViewer = (): boolean => { - if (selectedUser?.main_group == USER_GROUP.VIEWER.label) { + if (selectedUser?.main_role == USER_COMPOSITE_ROLE.VIEWER.label) { return true; } return false; @@ -81,7 +81,7 @@ export const ActionsDropDown = ({ selectedUser }: { selectedUser: User }) => { selectedUser.id != userDetail?.user?.id, }, ], - [selectedUser.id, selectedUser.main_group], + [selectedUser.id, selectedUser.main_role], ); return ( diff --git a/met-web/src/components/userManagement/listing/AddUserModal.tsx b/met-web/src/components/userManagement/listing/AddUserModal.tsx index 76e753af8..8031103b0 100644 --- a/met-web/src/components/userManagement/listing/AddUserModal.tsx +++ b/met-web/src/components/userManagement/listing/AddUserModal.tsx @@ -100,7 +100,7 @@ export const AddUserModal = () => { openNotification({ severity: 'success', text: `You have successfully added ${user?.first_name + ' ' + user?.last_name} as a ${ - user?.main_group + user?.main_role } on ${data.engagement?.name}.`, }), ); diff --git a/met-web/src/components/userManagement/listing/AssignRoleModal.tsx b/met-web/src/components/userManagement/listing/AssignRoleModal.tsx index 6a2ce5f90..0ed70da3b 100644 --- a/met-web/src/components/userManagement/listing/AssignRoleModal.tsx +++ b/met-web/src/components/userManagement/listing/AssignRoleModal.tsx @@ -15,14 +15,14 @@ import { useTheme, } from '@mui/material'; import { MetHeader3, MetLabel, MetSmallText, modalStyle, PrimaryButton, SecondaryButton } from 'components/common'; -import { USER_GROUP } from 'models/user'; +import { USER_COMPOSITE_ROLE } from 'models/user'; import { UserManagementContext } from './UserManagementContext'; import { Palette } from 'styles/Theme'; import { useForm, FormProvider, SubmitHandler, Controller } from 'react-hook-form'; import { yupResolver } from '@hookform/resolvers/yup'; import * as yup from 'yup'; import ControlledRadioGroup from 'components/common/ControlledInputComponents/ControlledRadioGroup'; -import { addUserToGroup } from 'services/userService/api'; +import { addUserToRole } from 'services/userService/api'; import { addTeamMemberToEngagement } from 'services/membershipService'; import { When } from 'react-if'; import { openNotification } from 'services/notificationService/notificationSlice'; @@ -34,12 +34,12 @@ import { Engagement } from 'models/engagement'; const schema = yup .object({ - group: yup.string().required('A role must be specified'), + role: yup.string().required('A role must be specified'), engagement: yup .object() .nullable() - .when('group', { - is: USER_GROUP.REVIEWER.value, + .when('role', { + is: USER_COMPOSITE_ROLE.REVIEWER.value, then: yup.object().nullable().required('An engagement must be selected'), }), }) @@ -69,7 +69,7 @@ export const AssignRoleModal = () => { watch, } = methods; - const userTypeSelected = watch('group'); + const userTypeSelected = watch('role'); const formValues = watch(); useEffect(() => { @@ -78,7 +78,7 @@ export const AssignRoleModal = () => { } }, [JSON.stringify(formValues)]); - const { group: groupErrors, engagement: engagementErrors } = errors; + const { role: roleErrors, engagement: engagementErrors } = errors; const handleClose = () => { setassignRoleModalOpen(false); @@ -122,24 +122,24 @@ export const AssignRoleModal = () => { ).current; const assignRoleToUser = async (data: AssignRoleForm) => { - if (userTypeSelected === USER_GROUP.ADMIN.value) { - await addUserToGroup({ user_id: user?.external_id, group: data.group }); + if (userTypeSelected === USER_COMPOSITE_ROLE.ADMIN.value) { + await addUserToRole({ user_id: user?.external_id, role: data.role }); dispatch( openNotification({ severity: 'success', - text: `You have successfully added ${user?.first_name} ${user?.last_name} to the group ${USER_GROUP.ADMIN.label}`, + text: `You have successfully added ${user?.first_name} ${user?.last_name} to the role ${USER_COMPOSITE_ROLE.ADMIN.label}`, }), ); - } else if (userTypeSelected === USER_GROUP.VIEWER.value) { - await addUserToGroup({ user_id: user?.external_id, group: data.group }); + } else if (userTypeSelected === USER_COMPOSITE_ROLE.VIEWER.value) { + await addUserToRole({ user_id: user?.external_id, role: data.role }); dispatch( openNotification({ severity: 'success', - text: `You have successfully added ${user?.first_name} ${user?.last_name} to the group ${USER_GROUP.VIEWER.label}`, + text: `You have successfully added ${user?.first_name} ${user?.last_name} to the role ${USER_COMPOSITE_ROLE.VIEWER.label}`, }), ); } else { - await addUserToGroup({ user_id: user?.external_id, group: data.group }); + await addUserToRole({ user_id: user?.external_id, role: data.role }); await addTeamMemberToEngagement({ user_id: user?.external_id, engagement_id: data.engagement?.id, @@ -147,7 +147,7 @@ export const AssignRoleModal = () => { dispatch( openNotification({ severity: 'success', - text: `You have successfully added ${user?.first_name} ${user?.last_name} as a ${data.group} on ${data.engagement?.name}.`, + text: `You have successfully added ${user?.first_name} ${user?.last_name} as a ${data.role} on ${data.engagement?.name}.`, }), ); } @@ -191,44 +191,44 @@ export const AssignRoleModal = () => { rowSpacing={4} > - + What role would you like to assign to this user? - + } label={'Viewer'} /> } label={'Reviewer'} /> } label={'Team Member'} /> } label={'Administrator'} /> - - {String(groupErrors?.message)} + + {String(roleErrors?.message)} diff --git a/met-web/src/components/userManagement/listing/ReassignRoleModal.tsx b/met-web/src/components/userManagement/listing/ReassignRoleModal.tsx index da0d49f2f..d92866c73 100644 --- a/met-web/src/components/userManagement/listing/ReassignRoleModal.tsx +++ b/met-web/src/components/userManagement/listing/ReassignRoleModal.tsx @@ -7,16 +7,16 @@ import { useForm, FormProvider } from 'react-hook-form'; import { yupResolver } from '@hookform/resolvers/yup'; import * as yup from 'yup'; import ControlledRadioGroup from 'components/common/ControlledInputComponents/ControlledRadioGroup'; -import { USER_GROUP } from 'models/user'; +import { USER_COMPOSITE_ROLE } from 'models/user'; import { Unless } from 'react-if'; import { Palette } from 'styles/Theme'; -import { changeUserGroup } from 'services/userService/api'; +import { changeUserRole } from 'services/userService/api'; import { useAppDispatch } from 'hooks'; import { openNotification } from 'services/notificationService/notificationSlice'; const schema = yup .object({ - group: yup.string().required('Please select a role to assign to this user').required(), + role: yup.string().required('Please select a role to assign to this user').required(), }) .required(); @@ -41,15 +41,15 @@ export const ReassignRoleModal = () => { const onSubmit = async (data: AssignRoleForm) => { try { - const { group } = await schema.validate(data); + const { role } = await schema.validate(data); setIsSaving(true); - await changeUserGroup({ user_id: user.id, group }); + await changeUserRole({ user_id: user.id, role }); handleClose(); loadUserListing(); dispatch( openNotification({ severity: 'success', - text: `You have reassigned ${user.first_name} ${user.last_name} as ${group}.`, + text: `You have reassigned ${user.first_name} ${user.last_name} as ${role}.`, }), ); setIsSaving(false); @@ -89,31 +89,31 @@ export const ReassignRoleModal = () => { > What role would you like to reassign to this user? - - + + } label={'Viewer'} /> - + } label={'Reviewer'} /> - + } label={'Team Member'} /> - + } label={'Administrator'} /> diff --git a/met-web/src/components/userManagement/listing/UserManagementListing.tsx b/met-web/src/components/userManagement/listing/UserManagementListing.tsx index 035d372b0..66b902d25 100644 --- a/met-web/src/components/userManagement/listing/UserManagementListing.tsx +++ b/met-web/src/components/userManagement/listing/UserManagementListing.tsx @@ -37,13 +37,13 @@ const UserManagementListing = () => { ), }, { - key: 'main_group', + key: 'main_role', numeric: false, disablePadding: true, label: 'Role', allowSort: false, renderCell: (row: User) => { - return row.main_group; + return row.main_role; }, }, { diff --git a/met-web/src/components/userManagement/userDetails/AddToEngagement.tsx b/met-web/src/components/userManagement/userDetails/AddToEngagement.tsx index b592c8a06..c1769e9e3 100644 --- a/met-web/src/components/userManagement/userDetails/AddToEngagement.tsx +++ b/met-web/src/components/userManagement/userDetails/AddToEngagement.tsx @@ -15,13 +15,13 @@ import { useTheme, } from '@mui/material'; import { MetHeader3, MetLabel, MetSmallText, modalStyle, PrimaryButton, SecondaryButton } from 'components/common'; -import { USER_GROUP } from 'models/user'; +import { USER_COMPOSITE_ROLE } from 'models/user'; import { UserDetailsContext } from './UserDetailsContext'; import { useForm, FormProvider, SubmitHandler, Controller } from 'react-hook-form'; import { yupResolver } from '@hookform/resolvers/yup'; import * as yup from 'yup'; import { getEngagements } from 'services/engagementService'; -import { addUserToGroup } from 'services/userService/api'; +import { addUserToRole } from 'services/userService/api'; import { addTeamMemberToEngagement } from 'services/membershipService'; import { When } from 'react-if'; import { openNotification } from 'services/notificationService/notificationSlice'; @@ -36,12 +36,12 @@ import { HTTP_STATUS_CODES } from 'constants/httpResponseCodes'; export const AddToEngagementModal = () => { const { savedUser, addUserModalOpen, setAddUserModalOpen, getUserMemberships, getUserDetails } = useContext(UserDetailsContext); - const userHasGroup = savedUser?.groups && savedUser?.groups.length > 0; + const userHasRole = savedUser?.composite_roles && savedUser?.composite_roles.length > 0; const schema = yup .object({ engagement: yup.object().nullable(), - group: yup.string().when([], { - is: () => savedUser?.groups.length === 0, + role: yup.string().when([], { + is: () => savedUser?.composite_roles.length === 0, then: yup.string().required('A role must be specified'), otherwise: yup.string(), }), @@ -70,7 +70,7 @@ export const AddToEngagementModal = () => { watch, } = methods; - const userTypeSelected = watch('group'); + const userTypeSelected = watch('role'); const formValues = watch(); useEffect(() => { @@ -79,7 +79,7 @@ export const AddToEngagementModal = () => { } }, [JSON.stringify(formValues)]); - const { group: groupErrors, engagement: engagementErrors } = errors; + const { role: roleErrors, engagement: engagementErrors } = errors; const handleClose = () => { setAddUserModalOpen(false); @@ -119,7 +119,7 @@ export const AddToEngagementModal = () => { ).current; const addUserToEngagement = async (data: AddUserForm) => { - if (userHasGroup) { + if (userHasRole) { await addTeamMemberToEngagement({ user_id: savedUser?.external_id, engagement_id: data.engagement?.id, @@ -127,20 +127,20 @@ export const AddToEngagementModal = () => { dispatch( openNotification({ severity: 'success', - text: `You have successfully added ${savedUser?.first_name} ${savedUser?.last_name} as a ${savedUser?.main_group} on ${data.engagement?.name}.`, + text: `You have successfully added ${savedUser?.first_name} ${savedUser?.last_name} as a ${savedUser?.main_role} on ${data.engagement?.name}.`, }), ); } else { - if (userTypeSelected === USER_GROUP.ADMIN.value) { - await addUserToGroup({ user_id: savedUser?.external_id, group: data.group ?? '' }); + if (userTypeSelected === USER_COMPOSITE_ROLE.ADMIN.value) { + await addUserToRole({ user_id: savedUser?.external_id, role: data.role ?? '' }); dispatch( openNotification({ severity: 'success', - text: `You have successfully added ${savedUser?.first_name} ${savedUser?.last_name} to the group ${USER_GROUP.ADMIN.label}`, + text: `You have successfully added ${savedUser?.first_name} ${savedUser?.last_name} to the role ${USER_COMPOSITE_ROLE.ADMIN.label}`, }), ); } else { - await addUserToGroup({ user_id: savedUser?.external_id, group: data.group ?? '' }); + await addUserToRole({ user_id: savedUser?.external_id, role: data.role ?? '' }); await addTeamMemberToEngagement({ user_id: savedUser?.external_id, engagement_id: data.engagement?.id, @@ -148,7 +148,7 @@ export const AddToEngagementModal = () => { dispatch( openNotification({ severity: 'success', - text: `You have successfully added ${savedUser?.first_name} ${savedUser?.last_name} as a ${data.group} on ${data.engagement?.name}.`, + text: `You have successfully added ${savedUser?.first_name} ${savedUser?.last_name} as a ${data.role} on ${data.engagement?.name}.`, }), ); } @@ -186,12 +186,12 @@ export const AddToEngagementModal = () => {
- + Assign Role to {savedUser?.first_name + ' ' + savedUser?.last_name} - + Add {savedUser?.first_name + ' ' + savedUser?.last_name} to Engagement @@ -207,9 +207,9 @@ export const AddToEngagementModal = () => { justifyContent="flex-start" rowSpacing={4} > - + - + { > What role would you like to assign to this user? - + } label={'Reviewer'} /> } label={'Team Member'} /> - - {String(groupErrors?.message)} + + {String(roleErrors?.message)} diff --git a/met-web/src/components/userManagement/userDetails/UserDetails.tsx b/met-web/src/components/userManagement/userDetails/UserDetails.tsx index 0fdbf884f..4cf553856 100644 --- a/met-web/src/components/userManagement/userDetails/UserDetails.tsx +++ b/met-web/src/components/userManagement/userDetails/UserDetails.tsx @@ -7,7 +7,7 @@ import { formatDate } from 'components/common/dateHelper'; import AssignedEngagementsListing from './AssignedEngagementsListing'; import UserStatusButton from './UserStatusButton'; import UserDetailsSkeleton from './UserDetailsSkeleton'; -import { USER_GROUP, USER_STATUS } from 'models/user'; +import { USER_COMPOSITE_ROLE, USER_STATUS } from 'models/user'; export const UserDetail = ({ label, value }: { label: string; value: JSX.Element }) => { return ( @@ -64,7 +64,7 @@ export const UserDetails = () => { - {savedUser?.main_group}} /> + {savedUser?.main_role}} /> { setAddUserModalOpen(true)} disabled={ - savedUser?.main_group === USER_GROUP.VIEWER.label || + savedUser?.main_role === USER_COMPOSITE_ROLE.VIEWER.label || savedUser?.status_id === USER_STATUS.INACTIVE.value || savedUser?.id === userDetail?.user?.id } diff --git a/met-web/src/components/userManagement/userDetails/UserStatusButton.tsx b/met-web/src/components/userManagement/userDetails/UserStatusButton.tsx index 73390539b..9f92bcd99 100644 --- a/met-web/src/components/userManagement/userDetails/UserStatusButton.tsx +++ b/met-web/src/components/userManagement/userDetails/UserStatusButton.tsx @@ -6,7 +6,7 @@ import { openNotificationModal } from 'services/notificationModalService/notific import { openNotification } from 'services/notificationService/notificationSlice'; import { toggleUserStatus } from 'services/userService/api'; import { USER_ROLES } from 'services/userService/constants'; -import { USER_GROUP } from 'models/user'; +import { USER_COMPOSITE_ROLE } from 'models/user'; const UserStatusButton = () => { const { roles, userDetail } = useAppSelector((state) => state.user); @@ -17,7 +17,7 @@ const UserStatusButton = () => { const isActive = savedUser?.status_id === 1; - const disabled = savedUser?.main_group === USER_GROUP.ADMIN.label || savedUser?.id === userDetail?.user?.id; + const disabled = savedUser?.main_role === USER_COMPOSITE_ROLE.ADMIN.label || savedUser?.id === userDetail?.user?.id; useEffect(() => { setUserStatus(isActive); diff --git a/met-web/src/models/user.ts b/met-web/src/models/user.ts index 53420ecde..9401b235c 100644 --- a/met-web/src/models/user.ts +++ b/met-web/src/models/user.ts @@ -1,6 +1,6 @@ -export type UserGroup = 'IT_ADMIN' | 'IT_VIEWER' | 'TEAM_MEMBER' | 'REVIEWER'; +export type UserCompositeRole = 'IT_ADMIN' | 'IT_VIEWER' | 'TEAM_MEMBER' | 'REVIEWER'; -export const USER_GROUP: { [x: string]: { value: UserGroup; label: string } } = { +export const USER_COMPOSITE_ROLE: { [x: string]: { value: UserCompositeRole; label: string } } = { ADMIN: { value: 'IT_ADMIN', label: 'Administrator', @@ -26,12 +26,12 @@ export interface User { email_address: string; external_id: string; first_name: string; - groups: string[]; + composite_roles: string[]; id: number; last_name: string; updated_date: string; roles: string[]; - main_group: string; + main_role: string; username: string; status_id: number; } @@ -53,13 +53,13 @@ export const createDefaultUser: User = { description: '', email_address: '', external_id: '', - groups: [''], + composite_roles: [''], first_name: '', last_name: '', updated_date: Date(), created_date: Date(), roles: [], username: '', - main_group: '', + main_role: '', status_id: 0, }; diff --git a/met-web/src/routes/AuthGate.tsx b/met-web/src/routes/AuthGate.tsx index c8be2ff32..2e333499f 100644 --- a/met-web/src/routes/AuthGate.tsx +++ b/met-web/src/routes/AuthGate.tsx @@ -1,7 +1,7 @@ import React from 'react'; import { useAppSelector } from 'hooks'; import { useLocation, Navigate, Outlet } from 'react-router-dom'; -import { USER_GROUP } from 'models/user'; +import { USER_COMPOSITE_ROLE } from 'models/user'; const AuthGate = ({ allowedRoles }: { allowedRoles: string[] }) => { const permissions = useAppSelector((state) => state.user.roles); @@ -13,7 +13,7 @@ const AuthGate = ({ allowedRoles }: { allowedRoles: string[] }) => { }); return permissions.some((permission) => scopesMap[permission]) || - permissions?.includes('/' + USER_GROUP.TEAM_MEMBER.value) ? ( + permissions?.includes('/' + USER_COMPOSITE_ROLE.TEAM_MEMBER.value) ? ( ) : ( diff --git a/met-web/src/services/userService/api/index.tsx b/met-web/src/services/userService/api/index.tsx index 8f1430352..685cbd028 100644 --- a/met-web/src/services/userService/api/index.tsx +++ b/met-web/src/services/userService/api/index.tsx @@ -11,7 +11,7 @@ interface GetUserListParams { sort_key?: string; sort_order?: 'asc' | 'desc'; search_text?: string; - // If yes, user groups will be fetched as well from keycloak + // If yes, user roles will be fetched as well from keycloak include_roles?: boolean; include_inactive?: boolean; } @@ -27,7 +27,7 @@ export const getUserList = async (params: GetUserListParams = {}): Promise => { @@ -39,23 +39,23 @@ export const getUser = async (params: GetUserParams): Promise => { return Promise.reject('Failed to fetch user details'); }; -interface AddUserToGroupProps { +interface AddUserToRoleProps { user_id?: string; - group?: string; + role?: string; } -export const addUserToGroup = async ({ user_id, group }: AddUserToGroupProps): Promise => { - const url = replaceUrl(Endpoints.User.ADD_TO_GROUP, 'user_id', String(user_id)); - const responseData = await http.PostRequest(url, {}, { group }); +export const addUserToRole = async ({ user_id, role }: AddUserToRoleProps): Promise => { + const url = replaceUrl(Endpoints.User.ADD_TO_COMPOSITE_ROLE, 'user_id', String(user_id)); + const responseData = await http.PostRequest(url, {}, { role }); return responseData.data; }; -interface ChangeUserGroupProps { +interface ChangeUserRoleProps { user_id: number; - group: string; + role: string; } -export const changeUserGroup = async ({ user_id, group }: ChangeUserGroupProps): Promise => { - const url = replaceUrl(Endpoints.User.CHANGE_GROUP, 'user_id', String(user_id)); - const responseData = await http.PutRequest(url, {}, { group }); +export const changeUserRole = async ({ user_id, role }: ChangeUserRoleProps): Promise => { + const url = replaceUrl(Endpoints.User.CHANGE_COMPOSITE_ROLE, 'user_id', String(user_id)); + const responseData = await http.PutRequest(url, {}, { role }); return responseData.data; }; diff --git a/met-web/src/services/userService/types.ts b/met-web/src/services/userService/types.ts index b2bfde1b1..4f273e93c 100644 --- a/met-web/src/services/userService/types.ts +++ b/met-web/src/services/userService/types.ts @@ -5,7 +5,7 @@ export interface UserDetail { email_verified?: boolean; preferred_username?: string; user?: User; - groups?: string[]; + composite_roles?: string[]; } export interface UserAuthentication { diff --git a/met-web/tests/unit/components/engagement/EngagementFormUserTab.test.tsx b/met-web/tests/unit/components/engagement/EngagementFormUserTab.test.tsx index 8316feef2..0c25ef950 100644 --- a/met-web/tests/unit/components/engagement/EngagementFormUserTab.test.tsx +++ b/met-web/tests/unit/components/engagement/EngagementFormUserTab.test.tsx @@ -12,7 +12,7 @@ import * as teamMemberService from 'services/membershipService'; import * as widgetService from 'services/widgetService'; import { Box } from '@mui/material'; import { draftEngagement, engagementMetadata, engagementSetting } from '../factory'; -import { createDefaultUser, USER_GROUP } from 'models/user'; +import { createDefaultUser, USER_COMPOSITE_ROLE } from 'models/user'; import { EngagementTeamMember, initialDefaultTeamMember } from 'models/engagementTeamMember'; import { USER_ROLES } from 'services/userService/constants'; @@ -24,7 +24,7 @@ const mockTeamMember1: EngagementTeamMember = { id: 1, first_name: 'Jane', last_name: 'Doe', - groups: [USER_GROUP.VIEWER.label], + composite_roles: [USER_COMPOSITE_ROLE.VIEWER.label], }, }; From 75687cd161ebdf4fd7a74fc68b32d18e3882d915 Mon Sep 17 00:00:00 2001 From: VineetBala-AOT <90332175+VineetBala-AOT@users.noreply.github.com> Date: Mon, 19 Feb 2024 14:18:25 -0800 Subject: [PATCH 07/42] Implemented floating save/preview buttons for engagement editing (#2388) --- CHANGELOG.MD | 4 + .../AdditionalTabContent.tsx | 39 +++- .../EngagementFormTabs/EngagementForm.tsx | 166 ++++-------------- .../EngagementTabsContext.tsx | 84 ++++++++- .../Settings/EngagementSettingsForm.tsx | 38 +++- .../EngagementUserManagement.tsx | 37 +++- .../create/EngagementForm.Create.test.tsx | 4 +- .../edit/EngagementForm.Edit.One.test.tsx | 4 +- 8 files changed, 233 insertions(+), 143 deletions(-) diff --git a/CHANGELOG.MD b/CHANGELOG.MD index 98813ea77..7bec09684 100644 --- a/CHANGELOG.MD +++ b/CHANGELOG.MD @@ -1,3 +1,7 @@ +## February 16, 2024 +- **Task**Make a floating save/preview bar when editing engagements [DESENG-498](https://apps.itsm.gov.bc.ca/jira/browse/DESENG-498) + - Implemented a floating behavior for the save/preview buttons during engagement editing. This feature persists across all tabs but exclusively saves data for the Engagement Content tab. + ## February 15, 2024 - **Task**Restore role assignment functionality to MET with the CSS API [DESENG-473](https://apps.itsm.gov.bc.ca/jira/browse/DESENG-473) - Utilize the CSS API for efficient management of composite roles. This involves the assignment, reassignment, or removal of users from the composite roles of TEAM_MEMBER, REVIEWER, IT_ADMIN, or IT_VIEWER. diff --git a/met-web/src/components/engagement/form/EngagementFormTabs/AdditionalDetails/AdditionalTabContent.tsx b/met-web/src/components/engagement/form/EngagementFormTabs/AdditionalDetails/AdditionalTabContent.tsx index 344b13dcf..f683bf4fa 100644 --- a/met-web/src/components/engagement/form/EngagementFormTabs/AdditionalDetails/AdditionalTabContent.tsx +++ b/met-web/src/components/engagement/form/EngagementFormTabs/AdditionalDetails/AdditionalTabContent.tsx @@ -1,13 +1,16 @@ import React, { useContext } from 'react'; -import { Divider, Grid } from '@mui/material'; -import { MetPaper, PrimaryButton } from 'components/common'; +import { Divider, Grid, Box } from '@mui/material'; +import { MetPaper, PrimaryButton, SecondaryButton } from 'components/common'; import ConsentMessage from './ConsentMessage'; import EngagementInformation from './EngagementInformation'; - +import { EngagementTabsContext } from '../EngagementTabsContext'; +import { ActionContext } from '../../ActionContext'; import { AdditionalDetailsContext } from './AdditionalDetailsContext'; const AdditionalTabContent = () => { const { handleSaveAdditional, updatingAdditional } = useContext(AdditionalDetailsContext); + const { isSaving } = useContext(ActionContext); + const { handleSaveEngagement, handlePreviewEngagement } = useContext(EngagementTabsContext); return ( @@ -33,6 +36,36 @@ const AdditionalTabContent = () => { Save + + + handleSaveEngagement()} + loading={isSaving} + > + Save + + handlePreviewEngagement()} + disabled={isSaving} + > + {'Preview'} + + + ); diff --git a/met-web/src/components/engagement/form/EngagementFormTabs/EngagementForm.tsx b/met-web/src/components/engagement/form/EngagementFormTabs/EngagementForm.tsx index 899a81df5..6026740e1 100644 --- a/met-web/src/components/engagement/form/EngagementFormTabs/EngagementForm.tsx +++ b/met-web/src/components/engagement/form/EngagementFormTabs/EngagementForm.tsx @@ -3,11 +3,9 @@ import { Typography, Grid, TextField, Stack, Box } from '@mui/material'; import { MetPaper, MetLabel, PrimaryButton, SecondaryButton, MetDescription } from '../../../common'; import { ActionContext } from '../ActionContext'; import ImageUpload from 'components/imageUpload'; -import { useNavigate } from 'react-router-dom'; import { SurveyBlock } from './SurveyBlock'; -import { If, Then, Else } from 'react-if'; import { EngagementTabsContext } from './EngagementTabsContext'; -import { EngagementStatus, SUBMISSION_STATUS } from 'constants/engagementStatus'; +import { EngagementStatus } from 'constants/engagementStatus'; import DayCalculatorModal from '../DayCalculator'; import { ENGAGEMENT_CROPPER_ASPECT_RATIO, ENGAGEMENT_UPLOADER_HEIGHT } from './constants'; import RichTextEditor from 'components/common/RichTextEditor'; @@ -15,25 +13,20 @@ import { getTextFromDraftJsContentState } from 'components/common/RichTextEditor const CREATE = 'create'; const EngagementForm = () => { - const { - handleCreateEngagementRequest, - handleUpdateEngagementRequest, - isSaving, - savedEngagement, - handleAddBannerImage, - fetchEngagement, - } = useContext(ActionContext); + const { isSaving, savedEngagement, handleAddBannerImage } = useContext(ActionContext); const { engagementFormData, setEngagementFormData, + setIsNewEngagement, + handleSaveEngagement, + handlePreviewEngagement, richDescription, setRichDescription, richContent, setRichContent, engagementFormError, setEngagementFormError, - surveyBlockText, } = useContext(EngagementTabsContext); const [initialRichDescription, setInitialRichDescription] = useState(''); @@ -42,32 +35,16 @@ const EngagementForm = () => { const [isOpen, setIsOpen] = useState(false); - const navigate = useNavigate(); - - const isNewEngagement = window.location.pathname.includes(CREATE); - - const { name, start_date, end_date, description } = engagementFormData; + const isCreateEngagement = window.location.pathname.includes(CREATE); - const surveyBlockList = [ - { - survey_status: SUBMISSION_STATUS.UPCOMING, - block_text: surveyBlockText.Upcoming, - }, - { - survey_status: SUBMISSION_STATUS.OPEN, - block_text: surveyBlockText.Open, - }, - { - survey_status: SUBMISSION_STATUS.CLOSED, - block_text: surveyBlockText.Closed, - }, - ]; + const { name, start_date, end_date } = engagementFormData; useEffect(() => { const initialDescription = getTextFromDraftJsContentState(richDescription || savedEngagement.rich_description); setInitialRichDescription(richDescription || savedEngagement.rich_description); setInitialRichContent(richContent || savedEngagement.rich_content); setDescriptionCharCount(initialDescription.length); + setIsNewEngagement(isCreateEngagement); }, []); const getErrorMessage = () => { @@ -119,72 +96,6 @@ const EngagementForm = () => { setRichContent(newState); }; - const validateForm = () => { - const errors = { - name: !(name && name.length < 50), - start_date: !start_date, - end_date: !end_date, - description: description.length > 550, - }; - setEngagementFormError(errors); - - return Object.values(errors).some((isError: unknown) => isError); - }; - - const handleCreateEngagement = async () => { - const hasErrors = validateForm(); - - if (hasErrors) { - return; - } - - const engagement = await handleCreateEngagementRequest({ - ...engagementFormData, - rich_description: richDescription, - rich_content: richContent, - status_block: surveyBlockList, - }); - - navigate(`/engagements/${engagement.id}/form`); - - return engagement; - }; - - const handleUpdateEngagement = async () => { - const hasErrors = validateForm(); - - if (hasErrors) { - return; - } - - await handleUpdateEngagementRequest({ - ...engagementFormData, - rich_description: richDescription, - rich_content: richContent, - status_block: surveyBlockList, - }); - - fetchEngagement(); - return savedEngagement; - }; - - const handleSaveEngagement = () => { - if (isNewEngagement) { - return handleCreateEngagement(); - } - - return handleUpdateEngagement(); - }; - - const handlePreviewEngagement = async () => { - const engagement = await handleSaveEngagement(); - if (!engagement) { - return; - } - - navigate(`/engagements/${engagement.id}/view`); - }; - const isDateFieldDisabled = [EngagementStatus.Closed, EngagementStatus.Unpublished].includes( savedEngagement.status_id, ); @@ -333,38 +244,35 @@ const EngagementForm = () => { - - - - handleCreateEngagement()} - loading={isSaving} - > - Save - - - - handleUpdateEngagement()} - disabled={isSaving} - loading={isSaving} - > - Save - - - - handlePreviewEngagement()} - disabled={isSaving} - > - {'Preview'} - - + + + handleSaveEngagement()} + loading={isSaving} + > + Save + + handlePreviewEngagement()} + disabled={isSaving} + > + {'Preview'} + + + ); diff --git a/met-web/src/components/engagement/form/EngagementFormTabs/EngagementTabsContext.tsx b/met-web/src/components/engagement/form/EngagementFormTabs/EngagementTabsContext.tsx index 6b77d9c68..b628758fb 100644 --- a/met-web/src/components/engagement/form/EngagementFormTabs/EngagementTabsContext.tsx +++ b/met-web/src/components/engagement/form/EngagementFormTabs/EngagementTabsContext.tsx @@ -1,4 +1,5 @@ import React, { createContext, useContext, useEffect, useState } from 'react'; +import { useNavigate } from 'react-router-dom'; import { SubmissionStatusTypes, SUBMISSION_STATUS } from 'constants/engagementStatus'; import { User } from 'models/user'; import { ActionContext } from '../ActionContext'; @@ -14,6 +15,7 @@ import { getEngagementSettings, patchEngagementSettings, } from 'services/engagementSettingService'; +import { EngagementForm } from '../types'; interface EngagementFormData { name: string; @@ -56,6 +58,10 @@ const initialFormError = { export interface EngagementTabsContextState { engagementFormData: EngagementFormData; setEngagementFormData: React.Dispatch>; + isNewEngagement: boolean; + setIsNewEngagement: React.Dispatch>; + handleSaveEngagement: () => Promise; + handlePreviewEngagement: () => Promise; richDescription: string; setRichDescription: React.Dispatch>; richContent: string; @@ -84,6 +90,16 @@ export const EngagementTabsContext = createContext({ setEngagementFormData: () => { throw new Error('setEngagementFormData is unimplemented'); }, + isNewEngagement: false, + setIsNewEngagement: () => { + throw new Error('setIsNewEngagement is unimplemented'); + }, + handleSaveEngagement: async () => { + console.warn('handleSaveEngagement is unimplemented'); + }, + handlePreviewEngagement: async () => { + console.warn('handlePreviewEngagement is unimplemented'); + }, richDescription: '', setRichDescription: () => { throw new Error('setRichDescription is unimplemented'); @@ -132,7 +148,7 @@ export const EngagementTabsContext = createContext({ }); export const EngagementTabsContextProvider = ({ children }: { children: React.ReactNode }) => { - const { savedEngagement } = useContext(ActionContext); + const { handleCreateEngagementRequest, handleUpdateEngagementRequest, savedEngagement } = useContext(ActionContext); const dispatch = useAppDispatch(); const [engagementFormData, setEngagementFormData] = useState({ name: savedEngagement.name || '', @@ -143,6 +159,7 @@ export const EngagementTabsContextProvider = ({ children }: { children: React.Re is_internal: savedEngagement.is_internal || false, consent_message: savedEngagement.consent_message || '', }); + const [isNewEngagement, setIsNewEngagement] = useState(false); const [richDescription, setRichDescription] = useState(savedEngagement?.rich_description || ''); const [richContent, setRichContent] = useState(savedEngagement?.rich_content || ''); const [engagementFormError, setEngagementFormError] = useState(initialFormError); @@ -261,6 +278,67 @@ export const EngagementTabsContextProvider = ({ children }: { children: React.Re } }; + const { name, start_date, end_date, description } = engagementFormData; + const surveyBlockList = [ + { + survey_status: SUBMISSION_STATUS.UPCOMING, + block_text: surveyBlockText.Upcoming, + }, + { + survey_status: SUBMISSION_STATUS.OPEN, + block_text: surveyBlockText.Open, + }, + { + survey_status: SUBMISSION_STATUS.CLOSED, + block_text: surveyBlockText.Closed, + }, + ]; + const validateForm = () => { + const errors = { + name: !(name && name.length < 50), + start_date: !start_date, + end_date: !end_date, + description: description.length > 550, + }; + setEngagementFormError(errors); + + return Object.values(errors).some((isError: unknown) => isError); + }; + + const handleSaveEngagement = async () => { + const hasErrors = validateForm(); + + if (hasErrors) { + return; + } + + const engagement = isNewEngagement + ? await handleCreateEngagementRequest({ + ...engagementFormData, + rich_description: richDescription, + rich_content: richContent, + status_block: surveyBlockList, + }) + : await handleUpdateEngagementRequest({ + ...engagementFormData, + rich_description: richDescription, + rich_content: richContent, + status_block: surveyBlockList, + }); + + return engagement; + }; + + const navigate = useNavigate(); + const handlePreviewEngagement = async () => { + const engagement = await handleSaveEngagement(); + if (!engagement) { + return; + } + + navigate(`/engagements/${engagement.id}/view`); + }; + useEffect(() => { handleGetSlug(); }, [savedEngagement.id]); @@ -270,6 +348,10 @@ export const EngagementTabsContextProvider = ({ children }: { children: React.Re value={{ engagementFormData, setEngagementFormData, + isNewEngagement, + setIsNewEngagement, + handleSaveEngagement, + handlePreviewEngagement, richDescription, setRichDescription, richContent, diff --git a/met-web/src/components/engagement/form/EngagementFormTabs/Settings/EngagementSettingsForm.tsx b/met-web/src/components/engagement/form/EngagementFormTabs/Settings/EngagementSettingsForm.tsx index ae6fc4dff..0f5d11155 100644 --- a/met-web/src/components/engagement/form/EngagementFormTabs/Settings/EngagementSettingsForm.tsx +++ b/met-web/src/components/engagement/form/EngagementFormTabs/Settings/EngagementSettingsForm.tsx @@ -1,13 +1,17 @@ import React, { useContext } from 'react'; -import { Divider, Grid } from '@mui/material'; -import { MetPaper, PrimaryButton } from 'components/common'; +import { Divider, Grid, Box } from '@mui/material'; +import { MetPaper, PrimaryButton, SecondaryButton } from 'components/common'; import InternalEngagement from './InternalEngagement'; import SendReport from './SendReport'; import { EngagementSettingsContext } from './EngagementSettingsContext'; import { PublicUrls } from './PublicUrls'; +import { EngagementTabsContext } from '../EngagementTabsContext'; +import { ActionContext } from '../../ActionContext'; const EngagementSettingsForm = () => { const { handleSaveSettings, updatingSettings } = useContext(EngagementSettingsContext); + const { isSaving } = useContext(ActionContext); + const { handleSaveEngagement, handlePreviewEngagement } = useContext(EngagementTabsContext); return ( @@ -39,6 +43,36 @@ const EngagementSettingsForm = () => { + + + handleSaveEngagement()} + loading={isSaving} + > + Save + + handlePreviewEngagement()} + disabled={isSaving} + > + {'Preview'} + + + ); diff --git a/met-web/src/components/engagement/form/EngagementFormTabs/UserManagement/EngagementUserManagement.tsx b/met-web/src/components/engagement/form/EngagementFormTabs/UserManagement/EngagementUserManagement.tsx index 471915fa1..d6598a2c3 100644 --- a/met-web/src/components/engagement/form/EngagementFormTabs/UserManagement/EngagementUserManagement.tsx +++ b/met-web/src/components/engagement/form/EngagementFormTabs/UserManagement/EngagementUserManagement.tsx @@ -1,14 +1,14 @@ import React, { useContext } from 'react'; -import { Grid } from '@mui/material'; -import { MetLabel, MetPaper, PrimaryButton, MetParagraph } from 'components/common'; +import { Grid, Box } from '@mui/material'; +import { MetLabel, MetPaper, PrimaryButton, SecondaryButton, MetParagraph } from 'components/common'; import { ActionContext } from '../../ActionContext'; import { EngagementTabsContext } from '../EngagementTabsContext'; import { formatDate } from 'components/common/dateHelper'; import TeamMemberListing from './TeamMemberListing'; const EngagementUserManagement = () => { - const { savedEngagement } = useContext(ActionContext); - const { setAddTeamMemberOpen } = useContext(EngagementTabsContext); + const { isSaving, savedEngagement } = useContext(ActionContext); + const { handleSaveEngagement, handlePreviewEngagement, setAddTeamMemberOpen } = useContext(EngagementTabsContext); return ( @@ -46,6 +46,35 @@ const EngagementUserManagement = () => { + + + handleSaveEngagement()} + loading={isSaving} + > + Save + + handlePreviewEngagement()} + disabled={isSaving} + > + {'Preview'} + + + diff --git a/met-web/tests/unit/components/engagement/form/create/EngagementForm.Create.test.tsx b/met-web/tests/unit/components/engagement/form/create/EngagementForm.Create.test.tsx index 843e88b1a..7a342bc29 100644 --- a/met-web/tests/unit/components/engagement/form/create/EngagementForm.Create.test.tsx +++ b/met-web/tests/unit/components/engagement/form/create/EngagementForm.Create.test.tsx @@ -98,7 +98,7 @@ describe('Engagement form page tests', () => { expect(getByText('Engagement Name')).toBeInTheDocument(); expect(container.querySelector('span.MuiSkeleton-root')).toBeNull(); }); - expect(screen.getByTestId('create-engagement-button')).toBeVisible(); + expect(screen.getByTestId('save-engagement-button')).toBeVisible(); expect(getEngagementMock).not.toHaveBeenCalled(); expect(getEngagementMetadataMock).not.toHaveBeenCalled(); @@ -125,7 +125,7 @@ describe('Engagement form page tests', () => { useParamsMock.mockReturnValue({ engagementId: 'create' }); const { container, getByTestId } = render(); - const createButton = getByTestId('create-engagement-button'); + const createButton = getByTestId('save-engagement-button'); fireEvent.click(createButton); expect(container.querySelectorAll('.Mui-error').length).toBeGreaterThan(0); diff --git a/met-web/tests/unit/components/engagement/form/edit/EngagementForm.Edit.One.test.tsx b/met-web/tests/unit/components/engagement/form/edit/EngagementForm.Edit.One.test.tsx index fde69dd1b..7388349b9 100644 --- a/met-web/tests/unit/components/engagement/form/edit/EngagementForm.Edit.One.test.tsx +++ b/met-web/tests/unit/components/engagement/form/edit/EngagementForm.Edit.One.test.tsx @@ -135,7 +135,7 @@ describe('Engagement form page tests', () => { expect(getEngagementMock).toHaveBeenCalledOnce(); expect(getEngagementMetadataMock).toHaveBeenCalledOnce(); - expect(screen.getByTestId('update-engagement-button')).toBeVisible(); + expect(screen.getByTestId('save-engagement-button')).toBeVisible(); expect(screen.getByDisplayValue('2022-09-01')).toBeInTheDocument(); expect(screen.getByDisplayValue('2022-09-30')).toBeInTheDocument(); expect(screen.getByText('Survey 1')).toBeInTheDocument(); @@ -148,7 +148,7 @@ describe('Engagement form page tests', () => { await waitFor(() => { expect(screen.getByDisplayValue('Test Engagement')).toBeInTheDocument(); }); - const updateButton = screen.getByTestId('update-engagement-button'); + const updateButton = screen.getByTestId('save-engagement-button'); const nameInput = container.querySelector('input[name="name"]'); assert(nameInput, 'Unable to find engagement name input'); From c5f9a930e44f7af3535ff6f05a279e1a4954a929 Mon Sep 17 00:00:00 2001 From: VineetBala-AOT <90332175+VineetBala-AOT@users.noreply.github.com> Date: Tue, 20 Feb 2024 11:23:51 -0800 Subject: [PATCH 08/42] DESENG-502: Upgrading flask version (#2389) * Upgrading flask version * Updating change log --- CHANGELOG.MD | 4 ++++ met-api/requirements.txt | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.MD b/CHANGELOG.MD index 7bec09684..75d567aee 100644 --- a/CHANGELOG.MD +++ b/CHANGELOG.MD @@ -1,3 +1,7 @@ +## February 20, 2024 +- **Task**Upgrade the version of flask [DESENG-502](https://apps.itsm.gov.bc.ca/jira/browse/DESENG-502) + - Performed a Flask version upgrade to version 2.2.5. + ## February 16, 2024 - **Task**Make a floating save/preview bar when editing engagements [DESENG-498](https://apps.itsm.gov.bc.ca/jira/browse/DESENG-498) - Implemented a floating behavior for the save/preview buttons during engagement editing. This feature persists across all tabs but exclusively saves data for the Engagement Content tab. diff --git a/met-api/requirements.txt b/met-api/requirements.txt index ebde07c19..d11227820 100644 --- a/met-api/requirements.txt +++ b/met-api/requirements.txt @@ -4,7 +4,7 @@ apispec==4.5.0 attrs==21.2.0 click==8.0.1 flasgger==0.9.5 -Flask==2.0.1 +Flask==2.2.5 flask-marshmallow==0.14.0 Flask-Migrate==2.7.0 Flask-Script==2.0.5 From 85599f246689d60dba45ceddfd23a90da97c01a6 Mon Sep 17 00:00:00 2001 From: Ratheesh kumar R <108045773+ratheesh-aot@users.noreply.github.com> Date: Mon, 26 Feb 2024 13:52:14 -0800 Subject: [PATCH 09/42] DESENG-483 - Adding missing unit test (#2394) * [TO FEATURE] DESENG-483: Adding Poll Widget Unit test (#2384) * DESENG-483: Adding Poll Widget Unit test * Updated function name * DESENG-483: Added TimeLine widget and VideoWidget test * DESENG-483: Fixing review comments Moving common jest mock and spy to another file * DESENG-483: Fixed Factory * DESENG-483: Updated Poll widget test to use common mocks * DESENG-483: Unit test for Landing page and Subscribe widget * DESENG-483: Added Unit test for Public Dashboard * DESENG-483: Test cases for FormCAC and Poll Widget View * DESENG-483: Timeline widget import fix * Added changelog --- CHANGELOG.MD | 4 + met-web/__mocks__/fileMock.ts | 1 + met-web/jest.config.ts | 1 + .../form/EngagementWidgets/Poll/Form.tsx | 5 +- .../Poll/PolllAnswerItemForm.tsx | 1 + .../unit/components/FormCAC/FormCAC.test.tsx | 105 ++++++++++++ met-web/tests/unit/components/factory.ts | 96 +++++++++++ .../landingPage/LandingPage.test.tsx | 157 ++++++++++++++++++ .../publicDashboard/ProjectLocation.test.tsx | 88 ++++++++++ .../publicDashboard/PublicDashboard.test.tsx | 84 ++++++++++ .../publicDashboard/SubmissionTrend.test.tsx | 67 ++++++++ .../publicDashboard/SurveyEmailsSent.test.tsx | 48 ++++++ .../publicDashboard/SurveysCompleted.test.tsx | 56 +++++++ .../components/widgets/PollWidget.test.tsx | 137 +++++++++++++++ .../widgets/PollWidgetView.test.tsx | 126 ++++++++++++++ .../widgets/SubscribeWidget.test.tsx | 127 ++++++++++++++ .../widgets/TimeLineWidget.test.tsx | 117 +++++++++++++ .../components/widgets/VideoWidget.test.tsx | 103 ++++++++++++ .../components/widgets/setupWidgetTestEnv.tsx | 61 +++++++ 19 files changed, 1383 insertions(+), 1 deletion(-) create mode 100644 met-web/__mocks__/fileMock.ts create mode 100644 met-web/tests/unit/components/FormCAC/FormCAC.test.tsx create mode 100644 met-web/tests/unit/components/landingPage/LandingPage.test.tsx create mode 100644 met-web/tests/unit/components/publicDashboard/ProjectLocation.test.tsx create mode 100644 met-web/tests/unit/components/publicDashboard/PublicDashboard.test.tsx create mode 100644 met-web/tests/unit/components/publicDashboard/SubmissionTrend.test.tsx create mode 100644 met-web/tests/unit/components/publicDashboard/SurveyEmailsSent.test.tsx create mode 100644 met-web/tests/unit/components/publicDashboard/SurveysCompleted.test.tsx create mode 100644 met-web/tests/unit/components/widgets/PollWidget.test.tsx create mode 100644 met-web/tests/unit/components/widgets/PollWidgetView.test.tsx create mode 100644 met-web/tests/unit/components/widgets/SubscribeWidget.test.tsx create mode 100644 met-web/tests/unit/components/widgets/TimeLineWidget.test.tsx create mode 100644 met-web/tests/unit/components/widgets/VideoWidget.test.tsx create mode 100644 met-web/tests/unit/components/widgets/setupWidgetTestEnv.tsx diff --git a/CHANGELOG.MD b/CHANGELOG.MD index 75d567aee..331c4a498 100644 --- a/CHANGELOG.MD +++ b/CHANGELOG.MD @@ -1,3 +1,7 @@ +## February 26, 2024 +- **Task**Adding missing unit test [DESENG-483](https://apps.itsm.gov.bc.ca/jira/browse/DESENG-483) + - Added missing unit test for components. + ## February 20, 2024 - **Task**Upgrade the version of flask [DESENG-502](https://apps.itsm.gov.bc.ca/jira/browse/DESENG-502) - Performed a Flask version upgrade to version 2.2.5. diff --git a/met-web/__mocks__/fileMock.ts b/met-web/__mocks__/fileMock.ts new file mode 100644 index 000000000..da8215b36 --- /dev/null +++ b/met-web/__mocks__/fileMock.ts @@ -0,0 +1 @@ +export default 'mock-file-stub'; diff --git a/met-web/jest.config.ts b/met-web/jest.config.ts index abfab92cd..aa3653754 100644 --- a/met-web/jest.config.ts +++ b/met-web/jest.config.ts @@ -82,6 +82,7 @@ const config: Config.InitialOptions = { 'react-dnd-html5-backend': 'react-dnd-html5-backend-cjs', 'dnd-core': 'dnd-core-cjs', '\\.(css|scss)$': '/tests/unit/components/styleMock.tsx', + "\\.(jpg|jpeg|png|gif|webp|svg)$": "/__mocks__/fileMock.ts", }, // An array of regexp pattern strings, matched against all module paths before considered 'visible' to the module loader diff --git a/met-web/src/components/engagement/form/EngagementWidgets/Poll/Form.tsx b/met-web/src/components/engagement/form/EngagementWidgets/Poll/Form.tsx index df4e41de4..dfacc7099 100644 --- a/met-web/src/components/engagement/form/EngagementWidgets/Poll/Form.tsx +++ b/met-web/src/components/engagement/form/EngagementWidgets/Poll/Form.tsx @@ -163,6 +163,7 @@ const Form = () => { The title must be less than 255 characters. { Description { Status + {Object.entries(TaxonTypes).map(([key, type]: [string, TaxonType]) => ( + + + + + + {type.name} + + + ))} + + )} + /> + + + + + + + + + + { + return ( + onChange(!e.target.checked)} + /> + ); + }} + /> + } + label={ + + {isFreeform ? : } + Limit to preset values + + } + /> + + + + + ( + onChange(!e.target.checked)} /> + )} + /> + } + label={ + + {isMulti ? : } + Allow multiple values + + } + /> + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + ); +}; + +export default TaxonEditForm; diff --git a/met-web/src/components/metadataManagement/TaxonEditor.tsx b/met-web/src/components/metadataManagement/TaxonEditor.tsx new file mode 100644 index 000000000..759d3d563 --- /dev/null +++ b/met-web/src/components/metadataManagement/TaxonEditor.tsx @@ -0,0 +1,351 @@ +import { Grid, Box, Paper, IconButton, Modal, Button, Typography, Chip } from '@mui/material'; +import { useTheme } from '@mui/material/styles'; +import useMediaQuery from '@mui/material/useMediaQuery'; +import { Close, UnfoldMore, UnfoldLess, AddCircle, KeyboardArrowDown, KeyboardArrowUp } from '@mui/icons-material'; +import { DragDropContext, DropResult } from '@hello-pangea/dnd'; +import React, { useContext, useEffect, useMemo, useRef, useState } from 'react'; +import { reorder } from 'utils'; +import { MetadataTaxon } from 'models/engagement'; +import { MetDroppable } from 'components/common/Dragdrop'; +import { ActionContext } from './ActionContext'; +import TaxonEditForm from './TaxonEditForm'; +import { Else, If, Then } from 'react-if'; +import { TaxonCard, TaxonCardSkeleton } from './TaxonCard'; + +export const TaxonEditor = () => { + const theme = useTheme(); + const isSmallScreen = useMediaQuery(theme.breakpoints.down('lg')); + const { metadataTaxa, reorderMetadataTaxa, createMetadataTaxon, selectedTaxon, setSelectedTaxonId, isLoading } = + useContext(ActionContext); + const orderedMetadataTaxa = useMemo(() => metadataTaxa, [metadataTaxa]); + const [expandedCards, setExpandedCards] = useState(new Array(metadataTaxa.length).fill(false)); + + const setCardExpanded = (index: number, state: boolean) => { + setExpandedCards((prevExpandedCards) => { + const newExpandedCards = [...prevExpandedCards]; // create a copy + newExpandedCards[index] = state; // update the copy + return newExpandedCards; // return the updated copy + }); + }; + + const expandAll = () => { + setExpandedCards(new Array(metadataTaxa.length).fill(true)); + }; + + const collapseAll = () => { + setExpandedCards(new Array(metadataTaxa.length).fill(false)); + setSelectedTaxonId(-1); + }; + + const repositionTaxon = (result: DropResult) => { + if (!result.destination) { + return; + } + const items = reorder(metadataTaxa, result.source.index, result.destination.index); + reorderMetadataTaxa(items.map((taxon) => taxon.id)); + }; + + const handleSelectTaxon = (taxon: MetadataTaxon) => { + if (taxon.id === selectedTaxon?.id) { + setSelectedTaxonId(-1); + } else { + setSelectedTaxonId(taxon.id); + } + }; + + const handleExpandTaxon = (taxon: MetadataTaxon) => { + const index = orderedMetadataTaxa.findIndex((t) => t.id === taxon.id); + if (index === -1) { + return; + } + setCardExpanded(index, !expandedCards[index]); + }; + + const addTaxon = async () => { + const newTaxon = await createMetadataTaxon({ + name: 'New Taxon', + data_type: 'text', + freeform: true, + one_per_engagement: true, + }); + if (newTaxon) { + setSelectedTaxonId(newTaxon.id); + } + setTimeout(() => { + scrollableRef.current?.scrollTo({ top: scrollableRef.current?.scrollHeight, behavior: 'smooth' }); + }, 1); // Wait for the new taxon to be rendered before scrolling + }; + + const [showScrollIndicators, setShowScrollIndicators] = useState({ + top: false, + bottom: true, + }); + + const scrollableRef = useRef(null); + + useEffect(() => { + if (!scrollableRef.current) { + return; + } + const currentRef = scrollableRef.current; + const checkScroll = () => { + if (!currentRef) { + return; + } + const { scrollTop, scrollHeight, clientHeight } = scrollableRef.current; + const scrollMargin = 20; + setShowScrollIndicators({ + top: scrollTop > scrollMargin, + bottom: scrollTop < scrollHeight - clientHeight - scrollMargin, + }); + }; + + currentRef.addEventListener('scroll', checkScroll); + + // Initial check + checkScroll(); + + return () => currentRef.removeEventListener('scroll', checkScroll); + }, [orderedMetadataTaxa]); + + const scroll = (amount: number) => { + const scrollableDiv = scrollableRef.current; + if (scrollableDiv) { + scrollableDiv.scrollBy({ top: amount, behavior: 'smooth' }); + } + }; + + return ( + + + + Manage the ways metadata is collected and organized for your engagements. + + + + + + + + + } + onClick={() => { + scroll(-400); + }} + /> + + + + + + {!isLoading && + orderedMetadataTaxa.map((taxon: MetadataTaxon, index) => { + return ( + + ); + })} + {isLoading && [...Array(9)].map(() => )} + {!isLoading && orderedMetadataTaxa.length === 0 && ( + <> + + No taxa found + + + Add a new taxon above to get started. + + + )} + + + + + + } + onClick={() => { + scroll(400); + }} + /> + + + + + {selectedTaxon && ( + setSelectedTaxonId(-1)}> + + setSelectedTaxonId(-1)} + sx={{ + position: 'relative', + left: '-1em', + top: '0.3em', + }} + > + + + + + + )} + + + {selectedTaxon && ( + + + + + + )} + + + + + ); +}; diff --git a/met-web/src/components/metadataManagement/TaxonTypes.tsx b/met-web/src/components/metadataManagement/TaxonTypes.tsx new file mode 100644 index 000000000..a4df174e8 --- /dev/null +++ b/met-web/src/components/metadataManagement/TaxonTypes.tsx @@ -0,0 +1,135 @@ +import { + AlternateEmail, + Event, + EventNote, + Flaky, + Link, + Article, + ChatBubbleOutline, + PinOutlined, + Phone, + Schedule, +} from '@mui/icons-material'; +import { TaxonType, GenericInputProps as TaxonInputProps } from './types'; +import * as yup from 'yup'; +import React from 'react'; +import { TextField } from '@mui/material'; +import { + TaxonPicker, + PickerTypes, + taxonSwitch, +} from 'components/engagement/form/EngagementFormTabs/AdditionalDetails/Metadata/TaxonInputComponents'; + +export const TaxonTypes: { [key: string]: TaxonType } = { + text: { + name: 'Text', + icon: ChatBubbleOutline, + supportsPresetValues: true, + supportsFreeform: true, + supportsMulti: true, + yupValidator: yup.string(), + }, + long_text: { + name: 'Multiline Text', + icon: Article, + supportsPresetValues: false, + supportsFreeform: true, + supportsMulti: false, + yupValidator: yup.string(), + customInput: ({ taxon, field, setValue, errors }: TaxonInputProps) => ( + { + setValue(taxon.id.toString(), e.target.value); + }} + /> + ), + }, + number: { + name: 'Number', + icon: PinOutlined, + supportsPresetValues: true, + supportsFreeform: true, + supportsMulti: true, + yupValidator: yup.number().typeError('This value must be a number.'), + }, + boolean: { + name: 'True/False', + icon: Flaky, + supportsPresetValues: false, + supportsFreeform: false, + supportsMulti: false, + yupValidator: yup.boolean(), + customInput: taxonSwitch, + }, + date: { + name: 'Date', + icon: Event, + supportsPresetValues: false, + supportsFreeform: false, + supportsMulti: false, + yupValidator: yup.date().typeError('This value must be a valid date.'), + customInput: ({ ...props }: TaxonInputProps) => TaxonPicker({ ...props, pickerType: PickerTypes.DATE }), + }, + time: { + name: 'Time', + icon: Schedule, + supportsPresetValues: false, + supportsFreeform: false, + supportsMulti: false, + yupValidator: yup.date().typeError('This value must be a valid time.'), + customInput: ({ ...props }: TaxonInputProps) => TaxonPicker({ ...props, pickerType: PickerTypes.TIME }), + }, + datetime: { + name: 'Date and Time', + icon: EventNote, + supportsPresetValues: false, + supportsFreeform: false, + supportsMulti: false, + yupValidator: yup.date().typeError('This value must consist of a valid date and time.'), + customInput: ({ ...props }: TaxonInputProps) => TaxonPicker({ ...props, pickerType: PickerTypes.DATETIME }), + }, + url: { + name: 'Web Link', + icon: Link, + supportsPresetValues: true, + supportsFreeform: true, + supportsMulti: true, + yupValidator: yup.string().url('This value must be a valid web URL.'), + externalResource: (value: string) => value, + externalResourceLabel: 'Open', + }, + email: { + name: 'Email Address', + icon: AlternateEmail, + supportsPresetValues: true, + supportsFreeform: true, + supportsMulti: true, + yupValidator: yup.string().email('This value must be a valid email address.'), + externalResource: (value: string) => `mailto:${value}`, + externalResourceLabel: 'Email', + }, + phone: { + name: 'Phone Number', + icon: Phone, + supportsPresetValues: true, + supportsFreeform: true, + supportsMulti: true, + yupValidator: yup + .string() + .matches( + /^(\+?\d{1,3}[\s-]?)?\(?\d{3}\)?[\s-]?\d{3}[\s-]?\d{4}$/, + 'This value must be a valid phone number.', + ), + externalResource: (value: string) => `tel:${value}`, + externalResourceLabel: 'Call', + }, +}; diff --git a/met-web/src/components/metadataManagement/index.tsx b/met-web/src/components/metadataManagement/index.tsx new file mode 100644 index 000000000..c0bb959c9 --- /dev/null +++ b/met-web/src/components/metadataManagement/index.tsx @@ -0,0 +1,13 @@ +import React from 'react'; +import ActionProvider from './ActionContext'; +import { TaxonEditor } from './TaxonEditor'; + +const MetadataManagement = () => { + return ( + + + + ); +}; + +export default MetadataManagement; diff --git a/met-web/src/components/metadataManagement/presetFieldsEditor/PresetValuesEditor.tsx b/met-web/src/components/metadataManagement/presetFieldsEditor/PresetValuesEditor.tsx new file mode 100644 index 000000000..7a3c7cb34 --- /dev/null +++ b/met-web/src/components/metadataManagement/presetFieldsEditor/PresetValuesEditor.tsx @@ -0,0 +1,106 @@ +import React, { SyntheticEvent, useState } from 'react'; +import { Autocomplete, TextField, Chip, IconButton, Stack } from '@mui/material'; +import { Control, Controller, FieldError } from 'react-hook-form'; +import { ArrowCircleUp, HighlightOff } from '@mui/icons-material'; + +const PresetValuesEditor = ({ + control, // The control object (from react-hook-form) + name, // The name of the field in the form +}: { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + control: Control; + name: string; +}) => { + // State to manage the input value of the Autocomplete component + const [inputValue, setInputValue] = useState(''); + + return ( + { + const valueErrors = (errors.preset_values as unknown as Array) ?? []; + const errorIndices = new Set(); + const errorMessage = valueErrors?.map((error: FieldError, index: number) => { + errorIndices.add(index); + return ( + + Entry #{index + 1}: {error.message} +
+
+ ); + }); + + const onArrayChange = (_event: SyntheticEvent | null, newValue: string[] | null) => { + newValue = newValue ?? [...value, inputValue]; + newValue = newValue.map((v: string) => v.trim()).filter(Boolean); + onChange(newValue); + setInputValue(''); // Clear the input value after change + }; + + return ( + { + setInputValue(newInputValue); + }} + onChange={onArrayChange} + renderTags={(value, getTagProps) => ( + + {value.map((option, index) => ( + + ))} + + )} + renderInput={(params) => ( + + {inputValue && ( + { + onArrayChange(null, [...value, inputValue]); + }} + > + + + )} + onChange([])} + > + + + + ), + }} + /> + )} + /> + ); + }} + /> + ); +}; + +export default PresetValuesEditor; diff --git a/met-web/src/components/metadataManagement/types.ts b/met-web/src/components/metadataManagement/types.ts new file mode 100644 index 000000000..03d9df105 --- /dev/null +++ b/met-web/src/components/metadataManagement/types.ts @@ -0,0 +1,70 @@ +import { SvgIconComponent } from '@mui/icons-material'; +import { MetadataTaxon, MetadataTaxonModify } from 'models/engagement'; +import { ControllerRenderProps, FieldErrorsImpl, FieldValues } from 'react-hook-form'; +import * as yup from 'yup'; + +export type TaxonFormValues = { + [key: string]: string[]; +} & FieldValues; + +export interface IProps { + errorMessage?: string; + errorCode?: string; +} + +export interface ActionContextProps { + metadataTaxa: MetadataTaxon[]; + selectedTaxon: MetadataTaxon | null; + setSelectedTaxonId: (taxonId: number) => void; + reorderMetadataTaxa: (taxonIds: number[]) => void; + createMetadataTaxon: (taxon: MetadataTaxonModify) => Promise; + updateMetadataTaxon: (taxon: MetadataTaxon) => void; + removeMetadataTaxon: (taxonId: number) => void; + isLoading: boolean; +} + +export interface GenericInputProps { + field: ControllerRenderProps; + taxon: MetadataTaxon; + taxonType: TaxonType; + trigger: (name?: string | string[] | undefined) => Promise; + errors: Partial< + FieldErrorsImpl<{ + // eslint-disable-next-line @typescript-eslint/no-explicit-any + [x: string]: any; // This is the type specified by react-hook-form for formState.errors + }> + >; + setValue: ( + name: string, + // eslint-disable-next-line @typescript-eslint/no-explicit-any + value: any, + options?: + | Partial<{ + shouldValidate: boolean; + shouldDirty: boolean; + shouldTouch: boolean; + }> + | undefined, + ) => void; +} + +export interface TaxonType { + name: string; + icon: SvgIconComponent; + supportsPresetValues: boolean; + supportsFreeform: boolean; + supportsMulti: boolean; + yupValidator: yup.AnySchema; + customInput?: (props: GenericInputProps) => JSX.Element; + externalResource?: (value: string) => string; + externalResourceLabel?: string; +} + +export interface TaxonCardProps { + taxon: MetadataTaxon; + isExpanded: boolean; + onSelect: (taxon: MetadataTaxon) => void; + onExpand: (taxon: MetadataTaxon) => void; + isSelected: boolean; + index: number; +} diff --git a/met-web/src/components/survey/building/index.tsx b/met-web/src/components/survey/building/index.tsx index 9ab2bf4fc..1ef2cba8a 100644 --- a/met-web/src/components/survey/building/index.tsx +++ b/met-web/src/components/survey/building/index.tsx @@ -263,12 +263,7 @@ const SurveyFormBuilder = () => { ) : ( <> - setName(event.target.value)} - onBlur={(event) => setIsNamedFocused(false)} - /> + setName(event.target.value)} /> { setIsNamedFocused(!isNameFocused); diff --git a/met-web/src/models/engagement.ts b/met-web/src/models/engagement.ts index 70f6ee069..e78db3304 100644 --- a/met-web/src/models/engagement.ts +++ b/met-web/src/models/engagement.ts @@ -33,8 +33,26 @@ export interface Status { status_name: string; } +export interface MetadataTaxonModify { + name?: string; // The name of the taxon, optional + description?: string; // The description of the taxon, optional + freeform?: boolean; // Whether the taxon is freeform, optional + data_type?: string; // The data type for the taxon, optional + one_per_engagement?: boolean; // Whether the taxon is limited to one entry per engagement, optional + preset_values?: string[]; // The preset values for the taxon +} + +export interface MetadataTaxon extends MetadataTaxonModify { + id: number; // The id of the taxon + tenant_id: number; // The tenant id + position: number; // The taxon's position within the tenant + entries?: EngagementMetadata[]; // The content of the taxon +} + export interface EngagementMetadata { - engagement_id: number; + value: string; // The content of the metadata + taxon_id: number; // ID of the taxon this metadata is for + engagement_id?: number; // The ID of the relevant engagement } export interface EngagementSettings { @@ -76,12 +94,6 @@ export const createDefaultEngagement = (): Engagement => { }; }; -export const createDefaultEngagementMetadata = (): EngagementMetadata => { - return { - engagement_id: 0, - }; -}; - export const createDefaultEngagementSettings = (): EngagementSettings => { return { engagement_id: 0, diff --git a/met-web/src/routes/AuthenticatedRoutes.tsx b/met-web/src/routes/AuthenticatedRoutes.tsx index be1740c52..03605e52f 100644 --- a/met-web/src/routes/AuthenticatedRoutes.tsx +++ b/met-web/src/routes/AuthenticatedRoutes.tsx @@ -8,6 +8,7 @@ import SurveyListing from 'components/survey/listing'; import CreateSurvey from 'components/survey/create'; import SurveyFormBuilder from 'components/survey/building'; import SurveySubmit from 'components/survey/submit'; +import MetadataManagement from 'components/metadataManagement'; import CommentReview from 'components/comments/admin/review/CommentReview'; import CommentReviewListing from 'components/comments/admin/reviewListing'; import CommentTextListing from 'components/comments/admin/textListing'; @@ -59,6 +60,7 @@ const AuthenticatedRoutes = () => { } /> } /> } /> + } /> }> } /> diff --git a/met-web/src/services/engagementMetadataService/index.ts b/met-web/src/services/engagementMetadataService/index.ts index 1ac1ea3fd..2ee373ac9 100644 --- a/met-web/src/services/engagementMetadataService/index.ts +++ b/met-web/src/services/engagementMetadataService/index.ts @@ -1,18 +1,18 @@ import http from 'apiManager/httpRequestHandler'; -import { EngagementMetadata } from 'models/engagement'; +import { EngagementMetadata, MetadataTaxonModify, MetadataTaxon } from 'models/engagement'; import Endpoints from 'apiManager/endpoints'; import { replaceUrl } from 'helper'; -export const getEngagementMetadata = async (engagementId: number): Promise => { +export const getEngagementMetadata = async (engagementId: number): Promise => { const url = replaceUrl(Endpoints.EngagementMetadata.GET_BY_ENG, 'engagement_id', String(engagementId)); if (!engagementId || isNaN(Number(engagementId))) { - return Promise.reject('Invalid Engagement Id ' + engagementId); + throw new Error('Invalid Engagement ID ' + engagementId); } - const response = await http.GetRequest(url); + const response = await http.GetRequest(url); if (response.data) { return response.data; } - return Promise.reject('Failed to fetch engagement'); + throw new Error('Failed to fetch engagement'); }; export const postEngagementMetadata = async (data: EngagementMetadata): Promise => { @@ -20,7 +20,7 @@ export const postEngagementMetadata = async (data: EngagementMetadata): Promise< if (response.data) { return response.data; } - return Promise.reject('Failed to create engagement metadata'); + throw new Error('Failed to create engagement metadata'); }; export const patchEngagementMetadata = async (data: EngagementMetadata): Promise => { @@ -28,5 +28,75 @@ export const patchEngagementMetadata = async (data: EngagementMetadata): Promise if (response.data) { return response.data; } - return Promise.reject('Failed to update engagement metadata'); + throw new Error('Failed to update engagement metadata'); +}; + +export const bulkPatchEngagementMetadata = async ( + taxon_id: number, + engagement_id: number, + values: Array, +): Promise> => { + const url = replaceUrl(Endpoints.EngagementMetadata.BULK_UPDATE, 'engagement_id', String(engagement_id)); + const response = await http.PatchRequest>(url, { taxon_id, values }); + if (response.data) { + return response.data; + } + throw new Error('Failed to update engagement metadata'); +}; + +export const getMetadataTaxa = async (): Promise> => { + const response = await http.GetRequest>(Endpoints.MetadataTaxa.GET_BY_TENANT); + if (response.data) { + return response.data; + } + throw new Error('Failed to fetch metadata taxa'); +}; + +export const getMetadataTaxon = async (taxonId: number): Promise => { + const url = replaceUrl(Endpoints.MetadataTaxa.GET, 'taxon_id', String(taxonId)); + if (!taxonId || isNaN(Number(taxonId))) { + throw new Error('Invalid Taxon Id ' + taxonId); + } + const response = await http.GetRequest(url); + if (response.data) { + return response.data; + } + throw new Error('Failed to fetch metadata taxon'); +}; + +export const postMetadataTaxon = async (data: MetadataTaxonModify): Promise => { + const response = await http.PostRequest(Endpoints.MetadataTaxa.CREATE, data); + if (response.data) { + return response.data; + } + throw new Error('Failed to create metadata taxon'); +}; + +export const patchMetadataTaxon = async (id: number, data: MetadataTaxonModify): Promise => { + const url = replaceUrl(Endpoints.MetadataTaxa.UPDATE, 'taxon_id', String(id)); + const response = await http.PatchRequest(url, data); + if (response.data) { + return response.data; + } + throw new Error('Failed to update metadata taxon'); +}; + +export const deleteMetadataTaxon = async (taxonId: number): Promise => { + const url = replaceUrl(Endpoints.MetadataTaxa.DELETE, 'taxon_id', String(taxonId)); + const response = await http.DeleteRequest(url); + if (response.status === 204) { + return; + } + throw new Error('Failed to delete metadata taxon'); +}; + +export const patchMetadataTaxaOrder = async (taxonIds: Array): Promise> => { + const data = { + taxon_ids: taxonIds, + }; + const response = await http.PatchRequest>(Endpoints.MetadataTaxa.REORDER, data); + if (response.data) { + return response.data; + } + throw new Error('Failed to reorder metadata taxa'); }; diff --git a/met-web/tests/unit/components/engagement/EngagementFormUserTab.test.tsx b/met-web/tests/unit/components/engagement/EngagementFormUserTab.test.tsx index 0c25ef950..52fb5d1de 100644 --- a/met-web/tests/unit/components/engagement/EngagementFormUserTab.test.tsx +++ b/met-web/tests/unit/components/engagement/EngagementFormUserTab.test.tsx @@ -88,7 +88,9 @@ describe('Engagement form page tests', () => { const useParamsMock = jest.spyOn(reactRouter, 'useParams'); jest.spyOn(engagementService, 'getEngagement').mockReturnValue(Promise.resolve(draftEngagement)); jest.spyOn(widgetService, 'getWidgets').mockReturnValue(Promise.resolve([])); - jest.spyOn(engagementMetadataService, 'getEngagementMetadata').mockReturnValue(Promise.resolve(engagementMetadata)); + jest.spyOn(engagementMetadataService, 'getEngagementMetadata').mockReturnValue( + Promise.resolve([engagementMetadata]), + ); jest.spyOn(engagementSettingService, 'getEngagementSettings').mockReturnValue(Promise.resolve(engagementSetting)); beforeEach(() => { diff --git a/met-web/tests/unit/components/engagement/form/create/EngagementForm.Create.test.tsx b/met-web/tests/unit/components/engagement/form/create/EngagementForm.Create.test.tsx index 88dff7a8c..d98f5218b 100644 --- a/met-web/tests/unit/components/engagement/form/create/EngagementForm.Create.test.tsx +++ b/met-web/tests/unit/components/engagement/form/create/EngagementForm.Create.test.tsx @@ -74,7 +74,7 @@ describe('Engagement form page tests', () => { const useParamsMock = jest.spyOn(reactRouter, 'useParams'); const getEngagementMetadataMock = jest .spyOn(engagementMetadataService, 'getEngagementMetadata') - .mockReturnValue(Promise.resolve(engagementMetadata)); + .mockReturnValue(Promise.resolve([engagementMetadata])); jest.spyOn(engagementMetadataService, 'patchEngagementMetadata').mockReturnValue( Promise.resolve(engagementMetadata), ); diff --git a/met-web/tests/unit/components/engagement/form/edit/EngagementForm.Edit.One.test.tsx b/met-web/tests/unit/components/engagement/form/edit/EngagementForm.Edit.One.test.tsx index 8b8373156..a67edb906 100644 --- a/met-web/tests/unit/components/engagement/form/edit/EngagementForm.Edit.One.test.tsx +++ b/met-web/tests/unit/components/engagement/form/edit/EngagementForm.Edit.One.test.tsx @@ -91,7 +91,7 @@ describe('Engagement form page tests', () => { const useParamsMock = jest.spyOn(reactRouter, 'useParams'); const getEngagementMetadataMock = jest .spyOn(engagementMetadataService, 'getEngagementMetadata') - .mockReturnValue(Promise.resolve(engagementMetadata)); + .mockReturnValue(Promise.resolve([engagementMetadata])); jest.spyOn(engagementSettingService, 'getEngagementSettings').mockReturnValue(Promise.resolve(engagementSetting)); jest.spyOn(teamMemberService, 'getTeamMembers').mockReturnValue(Promise.resolve([])); jest.spyOn(engagementMetadataService, 'patchEngagementMetadata').mockReturnValue( @@ -184,17 +184,12 @@ describe('Engagement form page tests', () => { surveys: surveys, }), ); - getEngagementMetadataMock.mockReturnValueOnce( - Promise.resolve({ - ...engagementMetadata, - }), - ); render(); await waitFor(() => { expect(screen.getByDisplayValue('Test Engagement')).toBeInTheDocument(); }); - + getEngagementMetadataMock.mockReturnValueOnce(Promise.resolve([engagementMetadata])); expect(screen.getByText('Add Survey')).toBeDisabled(); }); diff --git a/met-web/tests/unit/components/engagement/form/edit/EngagementForm.Edit.Two.test.tsx b/met-web/tests/unit/components/engagement/form/edit/EngagementForm.Edit.Two.test.tsx index 9f5e2e065..d47693129 100644 --- a/met-web/tests/unit/components/engagement/form/edit/EngagementForm.Edit.Two.test.tsx +++ b/met-web/tests/unit/components/engagement/form/edit/EngagementForm.Edit.Two.test.tsx @@ -89,7 +89,9 @@ describe('Engagement form page tests', () => { jest.spyOn(engagementMetadataService, 'patchEngagementMetadata').mockReturnValue( Promise.resolve(engagementMetadata), ); - jest.spyOn(engagementMetadataService, 'getEngagementMetadata').mockReturnValue(Promise.resolve(engagementMetadata)); + jest.spyOn(engagementMetadataService, 'getEngagementMetadata').mockReturnValue( + Promise.resolve([engagementMetadata]), + ); jest.spyOn(teamMemberService, 'getTeamMembers').mockReturnValue(Promise.resolve([])); const getEngagementMock = jest .spyOn(engagementService, 'getEngagement') diff --git a/met-web/tests/unit/components/factory.ts b/met-web/tests/unit/components/factory.ts index c7a117b6b..33510f790 100644 --- a/met-web/tests/unit/components/factory.ts +++ b/met-web/tests/unit/components/factory.ts @@ -2,11 +2,11 @@ import '@testing-library/jest-dom'; import { createDefaultSurvey, Survey } from 'models/survey'; import { createDefaultEngagement, - createDefaultEngagementMetadata, createDefaultEngagementSettings, Engagement, EngagementMetadata, EngagementSettings, + MetadataTaxon, } from 'models/engagement'; import { EngagementStatus } from 'constants/engagementStatus'; import { WidgetType, Widget, WidgetItem } from 'models/widget'; @@ -252,8 +252,20 @@ const mockTimeLine: TimelineWidget = { events: [mockTimeLineEvent1], }; const engagementMetadata: EngagementMetadata = { - ...createDefaultEngagementMetadata(), engagement_id: 1, + taxon_id: 1, + value: 'test', +}; + +const engagementMetadataTaxon: MetadataTaxon = { + tenant_id: 1, + id: 1, + name: 'test', + data_type: 'text', + one_per_engagement: false, + freeform: true, + preset_values: ['test'], + position: 1, }; const engagementSetting: EngagementSettings = { @@ -278,6 +290,7 @@ export { eventWidgetItem, eventWidget, engagementMetadata, + engagementMetadataTaxon, engagementSlugData, engagementSetting, mockPoll, diff --git a/met-web/tests/unit/components/widgets/DocumentWidget.test.tsx b/met-web/tests/unit/components/widgets/DocumentWidget.test.tsx index a37cbea52..f6e314724 100644 --- a/met-web/tests/unit/components/widgets/DocumentWidget.test.tsx +++ b/met-web/tests/unit/components/widgets/DocumentWidget.test.tsx @@ -113,7 +113,9 @@ describe('Document widget in engagement page tests', () => { jest.spyOn(notificationSlice, 'openNotification').mockImplementation(jest.fn()); jest.spyOn(engagementService, 'getEngagement').mockReturnValue(Promise.resolve(engagement)); jest.spyOn(documentService, 'fetchDocuments').mockReturnValue(Promise.resolve([mockFolder])); - jest.spyOn(engagementMetadataService, 'getEngagementMetadata').mockReturnValue(Promise.resolve(engagementMetadata)); + jest.spyOn(engagementMetadataService, 'getEngagementMetadata').mockReturnValue( + Promise.resolve([engagementMetadata]), + ); jest.spyOn(membershipService, 'getTeamMembers').mockReturnValue(Promise.resolve([])); jest.spyOn(engagementSettingService, 'getEngagementSettings').mockReturnValue( Promise.resolve(mockEngagementSettings), diff --git a/met-web/tests/unit/components/widgets/EventsWidget.test.tsx b/met-web/tests/unit/components/widgets/EventsWidget.test.tsx index 18c894bda..4174c182d 100644 --- a/met-web/tests/unit/components/widgets/EventsWidget.test.tsx +++ b/met-web/tests/unit/components/widgets/EventsWidget.test.tsx @@ -93,7 +93,9 @@ describe('Event Widget tests', () => { .spyOn(engagementService, 'getEngagement') .mockReturnValue(Promise.resolve(draftEngagement)); const getWidgetsMock = jest.spyOn(widgetService, 'getWidgets').mockReturnValue(Promise.resolve([eventWidget])); - jest.spyOn(engagementMetadataService, 'getEngagementMetadata').mockReturnValue(Promise.resolve(engagementMetadata)); + jest.spyOn(engagementMetadataService, 'getEngagementMetadata').mockReturnValue( + Promise.resolve([engagementMetadata]), + ); jest.spyOn(membershipService, 'getTeamMembers').mockReturnValue(Promise.resolve([])); jest.spyOn(engagementSettingService, 'getEngagementSettings').mockReturnValue( Promise.resolve(mockEngagementSettings), diff --git a/met-web/tests/unit/components/widgets/MapWidget.test.tsx b/met-web/tests/unit/components/widgets/MapWidget.test.tsx index 915b81795..3cc5a2603 100644 --- a/met-web/tests/unit/components/widgets/MapWidget.test.tsx +++ b/met-web/tests/unit/components/widgets/MapWidget.test.tsx @@ -86,7 +86,7 @@ jest.mock('react-router-dom', () => ({ useNavigate: () => jest.fn(), })); -jest.spyOn(engagementMetadataService, 'getEngagementMetadata').mockReturnValue(Promise.resolve(engagementMetadata)); +jest.spyOn(engagementMetadataService, 'getEngagementMetadata').mockReturnValue(Promise.resolve([engagementMetadata])); describe('Map Widget tests', () => { jest.spyOn(reactRedux, 'useDispatch').mockImplementation(() => jest.fn()); diff --git a/met-web/tests/unit/components/widgets/PhasesWidget.test.tsx b/met-web/tests/unit/components/widgets/PhasesWidget.test.tsx index 04e5e0bc6..a2c23f1a0 100644 --- a/met-web/tests/unit/components/widgets/PhasesWidget.test.tsx +++ b/met-web/tests/unit/components/widgets/PhasesWidget.test.tsx @@ -97,7 +97,9 @@ describe('Phases widget tests', () => { .spyOn(engagementService, 'getEngagement') .mockReturnValue(Promise.resolve(draftEngagement)); const getWidgetsMock = jest.spyOn(widgetService, 'getWidgets').mockReturnValue(Promise.resolve([phasesWidget])); - jest.spyOn(engagementMetadataService, 'getEngagementMetadata').mockReturnValue(Promise.resolve(engagementMetadata)); + jest.spyOn(engagementMetadataService, 'getEngagementMetadata').mockReturnValue( + Promise.resolve([engagementMetadata]), + ); jest.spyOn(membershipService, 'getTeamMembers').mockReturnValue(Promise.resolve([])); jest.spyOn(engagementSettingService, 'getEngagementSettings').mockReturnValue( Promise.resolve(mockEngagementSettings), diff --git a/met-web/tests/unit/components/widgets/VideoWidget.test.tsx b/met-web/tests/unit/components/widgets/VideoWidget.test.tsx index 128b7e977..5bdf9144a 100644 --- a/met-web/tests/unit/components/widgets/VideoWidget.test.tsx +++ b/met-web/tests/unit/components/widgets/VideoWidget.test.tsx @@ -32,7 +32,7 @@ jest.mock('apiManager/apiSlices/widgets', () => ({ useDeleteWidgetMutation: () => [jest.fn(() => Promise.resolve())], useSortWidgetsMutation: () => [jest.fn(() => Promise.resolve())], })); -jest.spyOn(engagementMetadataService, 'getEngagementMetadata').mockReturnValue(Promise.resolve(engagementMetadata)); +jest.spyOn(engagementMetadataService, 'getEngagementMetadata').mockReturnValue(Promise.resolve([engagementMetadata])); // Mock the necessary services and contexts jest.mock('services/widgetService/VideoService', () => ({ diff --git a/met-web/tests/unit/components/widgets/WhoIsListeningWidget.test.tsx b/met-web/tests/unit/components/widgets/WhoIsListeningWidget.test.tsx index a56f4f647..8ab61ac0c 100644 --- a/met-web/tests/unit/components/widgets/WhoIsListeningWidget.test.tsx +++ b/met-web/tests/unit/components/widgets/WhoIsListeningWidget.test.tsx @@ -138,11 +138,13 @@ jest.mock('apiManager/apiSlices/widgets', () => ({ useDeleteWidgetMutation: () => [jest.fn(() => Promise.resolve())], useSortWidgetsMutation: () => [jest.fn(() => Promise.resolve())], })); -jest.spyOn(engagementMetadataService, 'getEngagementMetadata').mockReturnValue(Promise.resolve(engagementMetadata)); +jest.spyOn(engagementMetadataService, 'getEngagementMetadata').mockReturnValue(Promise.resolve([engagementMetadata])); describe('Who is Listening widget tests', () => { jest.spyOn(reactRedux, 'useDispatch').mockImplementation(() => jest.fn()); - jest.spyOn(engagementMetadataService, 'getEngagementMetadata').mockReturnValue(Promise.resolve(engagementMetadata)); + jest.spyOn(engagementMetadataService, 'getEngagementMetadata').mockReturnValue( + Promise.resolve([engagementMetadata]), + ); jest.spyOn(membershipService, 'getTeamMembers').mockReturnValue(Promise.resolve([])); jest.spyOn(engagementSettingService, 'getEngagementSettings').mockReturnValue( Promise.resolve(mockEngagementSettings), diff --git a/met-web/tests/unit/components/widgets/setupWidgetTestEnv.tsx b/met-web/tests/unit/components/widgets/setupWidgetTestEnv.tsx index fc6b1d34d..c4de6063e 100644 --- a/met-web/tests/unit/components/widgets/setupWidgetTestEnv.tsx +++ b/met-web/tests/unit/components/widgets/setupWidgetTestEnv.tsx @@ -43,7 +43,9 @@ export const setupWidgetTestEnvMock = (): void => { export const setupWidgetTestEnvSpy = (): void => { setupEnv(); - jest.spyOn(engagementMetadataService, 'getEngagementMetadata').mockReturnValue(Promise.resolve(engagementMetadata)); + jest.spyOn(engagementMetadataService, 'getEngagementMetadata').mockReturnValue( + Promise.resolve([engagementMetadata]), + ); jest.spyOn(reactRedux, 'useSelector').mockImplementation(() => ({ roles: [USER_ROLES.VIEW_PRIVATE_ENGAGEMENTS, USER_ROLES.EDIT_ENGAGEMENT, USER_ROLES.CREATE_ENGAGEMENT], @@ -52,7 +54,9 @@ export const setupWidgetTestEnvSpy = (): void => { jest.spyOn(reactRouter, 'useParams').mockReturnValue({ projectId: '' }); jest.spyOn(reactRouter, 'useNavigate').mockReturnValue(jest.fn()); jest.spyOn(engagementService, 'getEngagement').mockReturnValue(Promise.resolve(draftEngagement)); - jest.spyOn(engagementMetadataService, 'getEngagementMetadata').mockReturnValue(Promise.resolve(engagementMetadata)); + jest.spyOn(engagementMetadataService, 'getEngagementMetadata').mockReturnValue( + Promise.resolve([engagementMetadata]), + ); jest.spyOn(membershipService, 'getTeamMembers').mockReturnValue(Promise.resolve([])); jest.spyOn(engagementSettingService, 'getEngagementSettings').mockReturnValue( Promise.resolve(mockEngagementSettings), From 437b1336a896067599186708c4062d1eb46039ec Mon Sep 17 00:00:00 2001 From: Ratheesh kumar R <108045773+ratheesh-aot@users.noreply.github.com> Date: Fri, 15 Mar 2024 12:04:04 -0700 Subject: [PATCH 20/42] [To Main] DESENG-515: Multi-language - Create event, subcribe_item, poll, timeline widget translation tables & API routes (#2421) * DESENG-515: Models and Test * DESENG-515: Correcting migration file * DESENG-515: Updating function name * DESENG-515 : Added API, Service and Tests for Poll answer translation * DESENG-515: Poll answer translation api and test * DESENG-515 : Event item translation api and test * DESENG-515: Fixing comments and tests * DESENG-515: Subscribe item and Timeline event translation API * DESENG-515: Fixing lint findings * DESENG-515: Fixing review comments * DESENG-515: Fixed migration version based on main * DESENG-515: Updated changelog * DESENG-515: Fixing code cov comments * DESENG-515: Isort fix --- CHANGELOG.MD | 11 +- ...c_event_subscribe_poll_timeline_widget_.py | 93 ++++++ met-api/src/met_api/models/__init__.py | 4 + .../met_api/models/event_item_translation.py | 123 ++++++++ .../met_api/models/poll_answer_translation.py | 104 +++++++ .../models/subscribe_item_translation.py | 114 +++++++ .../models/timeline_event_translation.py | 106 +++++++ met-api/src/met_api/resources/__init__.py | 9 + .../resources/event_item_translation.py | 152 +++++++++ .../resources/poll_answer_translation.py | 152 +++++++++ .../resources/subscribe_item_translation.py | 152 +++++++++ .../resources/timeline_event_translation.py | 153 ++++++++++ .../schemas/event_item_translation_schema.py | 25 ++ .../schemas/poll_answer_translation_schema.py | 21 ++ .../schemas/event_item_translation.json | 75 +++++ .../schemas/poll_answer_translation.json | 44 +++ .../schemas/subscribe_item_translation.json | 58 ++++ .../schemas/timeline_event_translation.json | 50 +++ .../subscribe_item_translation_schema.py | 23 ++ .../timeline_event_translation_schema.py | 22 ++ .../event_item_translation_service.py | 136 +++++++++ .../poll_answer_translation_service.py | 123 ++++++++ .../met_api/services/poll_answers_service.py | 9 + .../subscribe_item_translation_service.py | 105 +++++++ .../services/timeline_event_service.py | 6 +- .../timeline_event_translation_service.py | 132 ++++++++ .../met_api/services/widget_events_service.py | 10 + .../services/widget_subscribe_service.py | 10 + .../services/widget_timeline_service.py | 6 + .../unit/api/test_event_item_translation.py | 170 +++++++++++ .../unit/api/test_poll_answer_translation.py | 141 +++++++++ .../api/test_subscribe_item_translation.py | 132 ++++++++ .../api/test_timeline_event_translation.py | 166 ++++++++++ .../models/test_event_item_translation.py | 89 ++++++ .../models/test_poll_answer_translation.py | 79 +++++ .../models/test_subscribe_item_translation.py | 77 +++++ .../models/test_timeline_event_translation.py | 96 ++++++ .../test_event_item_translation_service.py | 144 +++++++++ .../test_poll_answer_translation_service.py | 147 +++++++++ .../services/test_poll_answers_service.py | 15 + ...test_subscribe_item_translation_service.py | 144 +++++++++ ...test_timeline_event_translation_service.py | 144 +++++++++ met-api/tests/utilities/factory_scenarios.py | 45 +++ met-api/tests/utilities/factory_utils.py | 288 +++++++++++++++--- 44 files changed, 3855 insertions(+), 50 deletions(-) create mode 100644 met-api/migrations/versions/f3842579261c_event_subscribe_poll_timeline_widget_.py create mode 100644 met-api/src/met_api/models/event_item_translation.py create mode 100644 met-api/src/met_api/models/poll_answer_translation.py create mode 100644 met-api/src/met_api/models/subscribe_item_translation.py create mode 100644 met-api/src/met_api/models/timeline_event_translation.py create mode 100644 met-api/src/met_api/resources/event_item_translation.py create mode 100644 met-api/src/met_api/resources/poll_answer_translation.py create mode 100644 met-api/src/met_api/resources/subscribe_item_translation.py create mode 100644 met-api/src/met_api/resources/timeline_event_translation.py create mode 100644 met-api/src/met_api/schemas/event_item_translation_schema.py create mode 100644 met-api/src/met_api/schemas/poll_answer_translation_schema.py create mode 100644 met-api/src/met_api/schemas/schemas/event_item_translation.json create mode 100644 met-api/src/met_api/schemas/schemas/poll_answer_translation.json create mode 100644 met-api/src/met_api/schemas/schemas/subscribe_item_translation.json create mode 100644 met-api/src/met_api/schemas/schemas/timeline_event_translation.json create mode 100644 met-api/src/met_api/schemas/subscribe_item_translation_schema.py create mode 100644 met-api/src/met_api/schemas/timeline_event_translation_schema.py create mode 100644 met-api/src/met_api/services/event_item_translation_service.py create mode 100644 met-api/src/met_api/services/poll_answer_translation_service.py create mode 100644 met-api/src/met_api/services/subscribe_item_translation_service.py create mode 100644 met-api/src/met_api/services/timeline_event_translation_service.py create mode 100644 met-api/tests/unit/api/test_event_item_translation.py create mode 100644 met-api/tests/unit/api/test_poll_answer_translation.py create mode 100644 met-api/tests/unit/api/test_subscribe_item_translation.py create mode 100644 met-api/tests/unit/api/test_timeline_event_translation.py create mode 100644 met-api/tests/unit/models/test_event_item_translation.py create mode 100644 met-api/tests/unit/models/test_poll_answer_translation.py create mode 100644 met-api/tests/unit/models/test_subscribe_item_translation.py create mode 100644 met-api/tests/unit/models/test_timeline_event_translation.py create mode 100644 met-api/tests/unit/services/test_event_item_translation_service.py create mode 100644 met-api/tests/unit/services/test_poll_answer_translation_service.py create mode 100644 met-api/tests/unit/services/test_subscribe_item_translation_service.py create mode 100644 met-api/tests/unit/services/test_timeline_event_translation_service.py diff --git a/CHANGELOG.MD b/CHANGELOG.MD index f4d2a7843..6006949be 100644 --- a/CHANGELOG.MD +++ b/CHANGELOG.MD @@ -1,6 +1,15 @@ +## March 15, 2024 + +- **Task**: Multi-language - Create event, subcribe_item, poll, timeline widget translation tables & API routes [DESENG-515](https://apps.itsm.gov.bc.ca/jira/browse/DESENG-515) + - Added Poll answer translation API. + - Added Timeline Event translation API. + - Added Subscribe Item translation API. + - Added Event item translation API + - Added Unit tests. + ## March 08, 2024 -- **Task**Multi-language - Create engagement translation table & API routes [DESENG-510](https://apps.itsm.gov.bc.ca/jira/browse/DESENG-510) +- **Task**: Multi-language - Create engagement translation table & API routes [DESENG-510](https://apps.itsm.gov.bc.ca/jira/browse/DESENG-510) - Added Engagement translation model. - Added Engagement translation API. - Added Unit tests. diff --git a/met-api/migrations/versions/f3842579261c_event_subscribe_poll_timeline_widget_.py b/met-api/migrations/versions/f3842579261c_event_subscribe_poll_timeline_widget_.py new file mode 100644 index 000000000..260301b80 --- /dev/null +++ b/met-api/migrations/versions/f3842579261c_event_subscribe_poll_timeline_widget_.py @@ -0,0 +1,93 @@ +"""event_subscribe_poll_timeline_widget_translation + +Revision ID: f3842579261c +Revises: 274a2774607b +Create Date: 2024-03-08 10:59:03.021386 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = 'f3842579261c' +down_revision = 'c4f7189494ed' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('event_item_translation', + sa.Column('created_date', sa.DateTime(), nullable=False), + sa.Column('updated_date', sa.DateTime(), nullable=True), + sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), + sa.Column('language_id', sa.Integer(), nullable=False), + sa.Column('event_item_id', sa.Integer(), nullable=False), + sa.Column('description', sa.String(length=500), nullable=True), + sa.Column('location_name', sa.String(length=50), nullable=True), + sa.Column('location_address', sa.String(length=100), nullable=True, comment='The address of the location'), + sa.Column('url', sa.String(length=500), nullable=True), + sa.Column('url_label', sa.String(length=100), nullable=True, comment='Label to show for href links'), + sa.Column('created_by', sa.String(length=50), nullable=True), + sa.Column('updated_by', sa.String(length=50), nullable=True), + sa.ForeignKeyConstraint(['event_item_id'], ['event_item.id'], ondelete='CASCADE'), + sa.ForeignKeyConstraint(['language_id'], ['language.id'], ), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('event_item_id', 'language_id', name='_event_item_language_uc') + ) + op.create_table('poll_answer_translation', + sa.Column('created_date', sa.DateTime(), nullable=False), + sa.Column('updated_date', sa.DateTime(), nullable=True), + sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), + sa.Column('poll_answer_id', sa.Integer(), nullable=False), + sa.Column('language_id', sa.Integer(), nullable=False), + sa.Column('answer_text', sa.String(length=255), nullable=False), + sa.Column('created_by', sa.String(length=50), nullable=True), + sa.Column('updated_by', sa.String(length=50), nullable=True), + sa.ForeignKeyConstraint(['language_id'], ['language.id'], ), + sa.ForeignKeyConstraint(['poll_answer_id'], ['poll_answers.id'], ondelete='CASCADE'), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('poll_answer_id', 'language_id', name='_poll_answer_language_uc') + ) + op.create_table('subscribe_item_translation', + sa.Column('created_date', sa.DateTime(), nullable=False), + sa.Column('updated_date', sa.DateTime(), nullable=True), + sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), + sa.Column('language_id', sa.Integer(), nullable=False), + sa.Column('subscribe_item_id', sa.Integer(), nullable=False), + sa.Column('description', sa.String(length=500), nullable=True), + sa.Column('rich_description', sa.Text(), nullable=True), + sa.Column('call_to_action_text', sa.String(length=25), nullable=True), + sa.Column('created_by', sa.String(length=50), nullable=True), + sa.Column('updated_by', sa.String(length=50), nullable=True), + sa.ForeignKeyConstraint(['language_id'], ['language.id'], ), + sa.ForeignKeyConstraint(['subscribe_item_id'], ['subscribe_item.id'], ondelete='CASCADE'), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('subscribe_item_id', 'language_id', name='_subscribe_item_language_uc') + ) + op.create_table('timeline_event_translation', + sa.Column('created_date', sa.DateTime(), nullable=False), + sa.Column('updated_date', sa.DateTime(), nullable=True), + sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), + sa.Column('language_id', sa.Integer(), nullable=False), + sa.Column('timeline_event_id', sa.Integer(), nullable=False), + sa.Column('description', sa.Text(), nullable=True), + sa.Column('time', sa.String(length=255), nullable=True), + sa.Column('created_by', sa.String(length=50), nullable=True), + sa.Column('updated_by', sa.String(length=50), nullable=True), + sa.ForeignKeyConstraint(['language_id'], ['language.id'], ), + sa.ForeignKeyConstraint(['timeline_event_id'], ['timeline_event.id'], ondelete='CASCADE'), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('timeline_event_id', 'language_id', name='_timeline_event_language_uc') + ) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_table('timeline_event_translation') + op.drop_table('subscribe_item_translation') + op.drop_table('poll_answer_translation') + op.drop_table('event_item_translation') + # ### end Alembic commands ### diff --git a/met-api/src/met_api/models/__init__.py b/met-api/src/met_api/models/__init__.py index ce0468da6..405affb61 100644 --- a/met-api/src/met_api/models/__init__.py +++ b/met-api/src/met_api/models/__init__.py @@ -61,4 +61,8 @@ from .language import Language from .widget_translation import WidgetTranslation from .survey_translation import SurveyTranslation +from .event_item_translation import EventItemTranslation +from .poll_answer_translation import PollAnswerTranslation +from .timeline_event_translation import TimelineEventTranslation +from .subscribe_item_translation import SubscribeItemTranslation from .engagement_translation import EngagementTranslation diff --git a/met-api/src/met_api/models/event_item_translation.py b/met-api/src/met_api/models/event_item_translation.py new file mode 100644 index 000000000..162a790d1 --- /dev/null +++ b/met-api/src/met_api/models/event_item_translation.py @@ -0,0 +1,123 @@ +"""Event item translation model class. + +Manages the translations for Event Items. +""" + +from __future__ import annotations + +from sqlalchemy import UniqueConstraint +from sqlalchemy.sql.schema import ForeignKey + +from .base_model import BaseModel +from .db import db + + +class EventItemTranslation(BaseModel): + """Event Items Translation table.""" + + __tablename__ = 'event_item_translation' + id = db.Column(db.Integer, primary_key=True, autoincrement=True) + language_id = db.Column( + db.Integer, ForeignKey('language.id'), nullable=False + ) + event_item_id = db.Column( + db.Integer, + ForeignKey('event_item.id', ondelete='CASCADE'), + nullable=False, + ) + description = db.Column(db.String(500)) + location_name = db.Column(db.String(50), nullable=True) + location_address = db.Column( + db.String(100), comment='The address of the location', nullable=True + ) + url = db.Column(db.String(500)) + url_label = db.Column( + db.String(100), comment='Label to show for href links' + ) + + # An Event item has only one version in a particular language + __table_args__ = ( + UniqueConstraint( + 'event_item_id', + 'language_id', + name='_event_item_language_uc', + ), + ) + + @staticmethod + def get_by_item_and_language(event_item_id=None, language_id=None): + """ + Get event item translation by item ID and language ID. + + :param event_item_id (int): ID of the event item + :param language_id (int): ID of the language + :return: list: List of EventItemTranslation objects + """ + query = EventItemTranslation.query + if event_item_id is not None: + query = query.filter_by(event_item_id=event_item_id) + if language_id is not None: + query = query.filter_by(language_id=language_id) + + event_item_translation_records = query.all() + return event_item_translation_records + + @classmethod + def create_event_item_translation(cls, data): + """ + Insert a new EventItemTranslation record. + + :param data: Dictionary containing the fields for EventItemTranslation + :return: EventItemTranslation instance + """ + event_item_translation = EventItemTranslation( + event_item_id=data['event_item_id'], + language_id=data['language_id'], + description=data.get( + 'description' + ), + location_name=data.get( + 'location_name' + ), + location_address=data.get( + 'location_address' + ), + url=data.get( + 'url' + ), + url_label=data.get( + 'url_label' + ) + ) + event_item_translation.save() + return event_item_translation + + @classmethod + def update_event_item_translation(cls, translation_id, data): + """ + Update an existing EventItemTranslation record. + + :param translation_id: ID of the EventItemTranslation to update + :param data: Dictionary of fields to update + :return: Updated EventItemTranslation instance + """ + event_item_translation = cls.find_by_id(translation_id) + if event_item_translation: + for key, value in data.items(): + setattr(event_item_translation, key, value) + event_item_translation.save() + return event_item_translation + + @classmethod + def delete_event_item_translation(cls, translation_id): + """ + Delete an EventItemTranslation record. + + :param translation_id: ID of the EventItemTranslation to delete + :return: None + """ + event_item_translation = cls.find_by_id(translation_id) + if event_item_translation: + event_item_translation.delete() + return True + return False diff --git a/met-api/src/met_api/models/poll_answer_translation.py b/met-api/src/met_api/models/poll_answer_translation.py new file mode 100644 index 000000000..b892e66fa --- /dev/null +++ b/met-api/src/met_api/models/poll_answer_translation.py @@ -0,0 +1,104 @@ +""" +PollAnswers translation model class. + +Manages the translation of Poll answers +""" + +from __future__ import annotations + +from sqlalchemy.sql.schema import ForeignKey +from sqlalchemy import UniqueConstraint +from .base_model import BaseModel +from .db import db + + +class PollAnswerTranslation(BaseModel): + """Definition of the PollAnswerTranslation entity.""" + + __tablename__ = 'poll_answer_translation' + id = db.Column(db.Integer, primary_key=True, autoincrement=True) + poll_answer_id = db.Column( + db.Integer, + ForeignKey('poll_answers.id', ondelete='CASCADE'), + nullable=False, + ) + language_id = db.Column( + db.Integer, ForeignKey('language.id'), nullable=False + ) + answer_text = db.Column(db.String(255), nullable=False) + + # A poll answer has only one version in a particular language + __table_args__ = ( + UniqueConstraint( + 'poll_answer_id', 'language_id', name='_poll_answer_language_uc' + ), + ) + + @staticmethod + def get_by_answer_and_language( + poll_answer_id=None, language_id=None + ): + """ + Get poll answer translation by answer ID and language ID. + + :param poll_answer_id (int): ID of the poll answer + :param language_id (int): ID of the language + :return: list: List of PollAnswerTranslation objects + """ + query = PollAnswerTranslation.query + if poll_answer_id is not None: + query = query.filter_by(poll_answer_id=poll_answer_id) + if language_id is not None: + query = query.filter_by(language_id=language_id) + + poll_answer_translation_records = query.all() + return poll_answer_translation_records + + @classmethod + def create_poll_answer_translation(cls, data): + """ + Insert a new PollAnswerTranslation record. + + :param data: Dictionary containing the fields for PollAnswerTranslation + :return: PollAnswerTranslation instance + """ + poll_answer_translation = PollAnswerTranslation( + poll_answer_id=data['poll_answer_id'], + language_id=data['language_id'], + answer_text=data.get( + 'answer_text' + ), # Returns `None` if 'answer_text' is not in `data` as its optional + ) + + poll_answer_translation.save() + return poll_answer_translation + + @classmethod + def update_poll_answer_translation(cls, translation_id, data): + """ + Update an existing PollAnswerTranslation record. + + :param translation_id: ID of the PollAnswerTranslation to update + :param data: Dictionary of fields to update + :return: Updated PollAnswerTranslation instance + """ + poll_answer_translation = cls.find_by_id(translation_id) + if poll_answer_translation: + for key, value in data.items(): + setattr(poll_answer_translation, key, value) + poll_answer_translation.save() + return poll_answer_translation + + @classmethod + def delete_poll_answer_translation(cls, translation_id): + """ + Delete a PollAnswerTranslation record. + + :param translation_id: ID of the PollAnswerTranslation to delete + :return: None + """ + poll_answer_translation = cls.find_by_id(translation_id) + if poll_answer_translation: + poll_answer_translation.delete() + return True + return False diff --git a/met-api/src/met_api/models/subscribe_item_translation.py b/met-api/src/met_api/models/subscribe_item_translation.py new file mode 100644 index 000000000..0b6b28ca4 --- /dev/null +++ b/met-api/src/met_api/models/subscribe_item_translation.py @@ -0,0 +1,114 @@ +"""Subscribe Item Translatiion model class. + +Manages the translation for Subscribe items +""" + +from __future__ import annotations +from sqlalchemy.sql.schema import ForeignKey +from sqlalchemy import UniqueConstraint +from .base_model import BaseModel +from .db import db + + +class SubscribeItemTranslation(BaseModel): + """Subscribe Item Translation table.""" + + __tablename__ = 'subscribe_item_translation' + id = db.Column(db.Integer, primary_key=True, autoincrement=True) + language_id = db.Column( + db.Integer, ForeignKey('language.id'), nullable=False + ) + subscribe_item_id = db.Column( + db.Integer, + ForeignKey('subscribe_item.id', ondelete='CASCADE'), + nullable=False, + ) + description = db.Column(db.String(500)) + rich_description = db.Column(db.Text) + call_to_action_text = db.Column(db.String(25)) + + # A Subscribe item has only one version in a particular language + __table_args__ = ( + UniqueConstraint( + 'subscribe_item_id', + 'language_id', + name='_subscribe_item_language_uc', + ), + ) + + @staticmethod + def get_by_item_and_language( + subscribe_item_id=None, language_id=None + ): + """ + Get subscribe item translation by item ID and language ID. + + :param data: + subscribe_item_id (int): ID of the subscribe item + language_id (int): ID of the language + + :return: + list: List of SubscribeItemTranslation objects + """ + query = SubscribeItemTranslation.query + if subscribe_item_id is not None: + query = query.filter_by(subscribe_item_id=subscribe_item_id) + if language_id is not None: + query = query.filter_by(language_id=language_id) + + subscribe_item_translation_records = query.all() + return subscribe_item_translation_records + + @classmethod + def create_sub_item_translation(cls, data): + """ + Insert a new SubscribeItemTranslation record. + + :param data: Dictionary containing the fields for SubscribeItemTranslation + :return: SubscribeItemTranslation instance + """ + subscribe_item_translation = SubscribeItemTranslation( + subscribe_item_id=data['subscribe_item_id'], + language_id=data['language_id'], + description=data.get( + 'description' + ), + rich_description=data.get( + 'rich_description' + ), + call_to_action_text=data.get( + 'call_to_action_text' + ) + ) + subscribe_item_translation.save() + return subscribe_item_translation + + @classmethod + def update_sub_item_translation(cls, translation_id, data): + """ + Update an existing SubscribeItemTranslation record. + + :param translation_id: ID of the SubscribeItemTranslation to update + :param data: Dictionary of fields to update + :return: Updated SubscribeItemTranslation instance + """ + subscribe_item_translation = cls.find_by_id(translation_id) + if subscribe_item_translation: + for key, value in data.items(): + setattr(subscribe_item_translation, key, value) + subscribe_item_translation.save() + return subscribe_item_translation + + @classmethod + def delete_sub_item_translation(cls, translation_id): + """ + Delete a SubscribeItemTranslation record. + + :param translation_id: ID of the SubscribeItemTranslation to delete + :return: None + """ + subscribe_item_translation = cls.find_by_id(translation_id) + if subscribe_item_translation: + subscribe_item_translation.delete() + return True + return False diff --git a/met-api/src/met_api/models/timeline_event_translation.py b/met-api/src/met_api/models/timeline_event_translation.py new file mode 100644 index 000000000..cf86ef840 --- /dev/null +++ b/met-api/src/met_api/models/timeline_event_translation.py @@ -0,0 +1,106 @@ +"""Timeline Event Translation model class. + +Manages the translatin for timeline events +""" + +from __future__ import annotations + +from sqlalchemy.sql.schema import ForeignKey +from sqlalchemy import UniqueConstraint +from .base_model import BaseModel +from .db import db + + +class TimelineEventTranslation(BaseModel): + """Definition of the TimelineEventTranslation entity.""" + + __tablename__ = 'timeline_event_translation' + id = db.Column(db.Integer, primary_key=True, autoincrement=True) + language_id = db.Column( + db.Integer, ForeignKey('language.id'), nullable=False + ) + timeline_event_id = db.Column( + db.Integer, + ForeignKey('timeline_event.id', ondelete='CASCADE'), + nullable=False, + ) + description = db.Column(db.Text(), nullable=True) + time = db.Column(db.String(255), nullable=True) + + # A TimelineEvent item has only one version in a particular language + __table_args__ = ( + UniqueConstraint( + 'timeline_event_id', + 'language_id', + name='_timeline_event_language_uc', + ), + ) + + @staticmethod + def get_by_event_and_language(timeline_event_id=None, language_id=None): + """ + Get timeline event translation by event ID and language ID. + + :param timeline_event_id (int): ID of the timeline event + :param language_id (int): ID of the language + :return: list: List of TimelineEventTranslation objects + """ + query = TimelineEventTranslation.query + if timeline_event_id is not None: + query = query.filter_by(timeline_event_id=timeline_event_id) + if language_id is not None: + query = query.filter_by(language_id=language_id) + + timeline_event_translation_records = query.all() + return timeline_event_translation_records + + @classmethod + def create_timeline_event_translation(cls, data): + """ + Insert a new TimelineEventTranslation record. + + :param data: Dictionary containing the fields for TimelineEventTranslation + :return: TimelineEventTranslation instance + """ + timeline_event_translation = TimelineEventTranslation( + timeline_event_id=data['timeline_event_id'], + language_id=data['language_id'], + description=data.get( + 'description' + ), + time=data.get( + 'time' + ) + ) + timeline_event_translation.save() + return timeline_event_translation + + @classmethod + def update_timeline_event_translation(cls, translation_id, data): + """ + Update an existing TimelineEventTranslation record. + + :param translation_id: ID of the TimelineEventTranslation to update + :param data: Dictionary of fields to update + :return: Updated TimelineEventTranslation instance + """ + timeline_event_translation = cls.find_by_id(translation_id) + if timeline_event_translation: + for key, value in data.items(): + setattr(timeline_event_translation, key, value) + timeline_event_translation.save() + return timeline_event_translation + + @classmethod + def delete_timeline_event_translation(cls, translation_id): + """ + Delete a TimelineEventTranslation record. + + :param translation_id: ID of the TimelineEventTranslation to delete + :return: None + """ + timeline_event_translation = cls.find_by_id(translation_id) + if timeline_event_translation: + timeline_event_translation.delete() + return True + return False diff --git a/met-api/src/met_api/resources/__init__.py b/met-api/src/met_api/resources/__init__.py index f50fe914b..0bcaad1bc 100644 --- a/met-api/src/met_api/resources/__init__.py +++ b/met-api/src/met_api/resources/__init__.py @@ -21,6 +21,7 @@ That are used to expose operational health information about the service, and meta information. """ +from re import A from flask import Blueprint from .apihelper import Api @@ -58,6 +59,10 @@ from .language import API as LANGUAGE_API from .widget_translation import API as WIDGET_TRANSLATION_API from .survey_translation import API as SURVEY_TRANSLATION_API +from .poll_answer_translation import API as POLL_ANSWER_TRANSLATION_API +from .event_item_translation import API as EVENT_ITEM_TRANSLATION_API +from .subscribe_item_translation import API as SUBSCRIBE_ITEM_TRANSLATION_API +from .timeline_event_translation import API as TIMELINE_EVENT_TRANSLATION_API from .engagement_translation import API as ENGAGEMENT_TRANSLATION_API __all__ = ('API_BLUEPRINT',) @@ -108,4 +113,8 @@ API.add_namespace(LANGUAGE_API, path='/languages') API.add_namespace(WIDGET_TRANSLATION_API, path='/widget//translations') API.add_namespace(SURVEY_TRANSLATION_API, path='/surveys//translations') +API.add_namespace(POLL_ANSWER_TRANSLATION_API, path='/polls//translations') +API.add_namespace(EVENT_ITEM_TRANSLATION_API, path='/events//translations') +API.add_namespace(SUBSCRIBE_ITEM_TRANSLATION_API, path='/subscribe//translations') +API.add_namespace(TIMELINE_EVENT_TRANSLATION_API, path='/timelines//translations') API.add_namespace(ENGAGEMENT_TRANSLATION_API, path='/engagement//translations') diff --git a/met-api/src/met_api/resources/event_item_translation.py b/met-api/src/met_api/resources/event_item_translation.py new file mode 100644 index 000000000..66048cf76 --- /dev/null +++ b/met-api/src/met_api/resources/event_item_translation.py @@ -0,0 +1,152 @@ +# Copyright © 2024 Province of British Columbia +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +"""API endpoints for managing an EventItemTranslation resource.""" + +from http import HTTPStatus + +from flask import request +from flask_cors import cross_origin +from flask_restx import Namespace, Resource +from marshmallow import ValidationError + +from met_api.auth import jwt as _jwt +from met_api.exceptions.business_exception import BusinessException +from met_api.schemas import utils as schema_utils +from met_api.schemas.event_item_translation_schema import EventItemTranslationSchema +from met_api.services.event_item_translation_service import EventItemTranslationService +from met_api.utils.util import allowedorigins, cors_preflight + + +API = Namespace( + 'event_item_translations', + description='Endpoints for EventItemTranslation Management', +) + + +@cors_preflight('GET, POST, PATCH, DELETE, OPTIONS') +@API.route('/') +class EventItemTranslationResource(Resource): + """Resource for managing event item translations.""" + + @staticmethod + @cross_origin(origins=allowedorigins()) + def get(event_item_translation_id, **_): + """Fetch an event item translation by id.""" + try: + event_item_translation = ( + EventItemTranslationService.get_by_id( + event_item_translation_id + ) + ) + return ( + EventItemTranslationSchema().dump(event_item_translation), + HTTPStatus.OK, + ) + except (KeyError, ValueError) as err: + return str(err), HTTPStatus.INTERNAL_SERVER_ERROR + + @staticmethod + @_jwt.requires_auth + @cross_origin(origins=allowedorigins()) + def patch(event_id, event_item_translation_id): + """Update saved event item translation partially.""" + try: + request_json = request.get_json() + event_item_translation = ( + EventItemTranslationService.update_event_item_translation( + event_id, event_item_translation_id, request_json + ) + ) + return ( + EventItemTranslationSchema().dump(event_item_translation), + HTTPStatus.OK, + ) + except ValueError as err: + return str(err), HTTPStatus.NOT_FOUND + except ValidationError as err: + return str(err.messages), HTTPStatus.BAD_REQUEST + + @staticmethod + @_jwt.requires_auth + @cross_origin(origins=allowedorigins()) + def delete(event_id, event_item_translation_id): + """Delete an event item translation.""" + try: + success = EventItemTranslationService.delete_event_item_translation( + event_id, event_item_translation_id + ) + if success: + return ( + 'Successfully deleted event item translation', + HTTPStatus.NO_CONTENT, + ) + raise ValueError('Event item translation not found') + except KeyError as err: + return str(err), HTTPStatus.BAD_REQUEST + except ValueError as err: + return str(err), HTTPStatus.NOT_FOUND + + +@cors_preflight('GET, OPTIONS') +@API.route('/item//language/') +class EventItemTranslationResourceByLanguage(Resource): + """Resource for event item using language_id.""" + + @staticmethod + @cross_origin(origins=allowedorigins()) + def get(event_item_id, language_id, **_): + """Fetch an event item translation by language_id.""" + try: + event_item_translation = ( + EventItemTranslationService.get_event_item_translation( + event_item_id, language_id + ) + ) + return ( + EventItemTranslationSchema().dump( + event_item_translation[0], many=False + ), + HTTPStatus.OK, + ) + except (KeyError, ValueError) as err: + return str(err), HTTPStatus.INTERNAL_SERVER_ERROR + + +@cors_preflight('POST, OPTIONS') +@API.route('/') +class EventItemTranslations(Resource): + """Resource for managing multiple event item translations.""" + + @staticmethod + @_jwt.requires_auth + @cross_origin(origins=allowedorigins()) + def post(event_id): + """Create a new event item translation.""" + try: + request_json = request.get_json() + valid_format, errors = schema_utils.validate( + request_json, 'event_item_translation' + ) + if not valid_format: + return { + 'message': schema_utils.serialize(errors) + }, HTTPStatus.BAD_REQUEST + # Option to pre-populate with default values, default True + pre_populate = request_json.get('pre_populate', True) + + event_item_translation = ( + EventItemTranslationService.create_event_item_translation( + event_id, request_json, pre_populate + ) + ) + return ( + EventItemTranslationSchema().dump(event_item_translation), + HTTPStatus.CREATED, + ) + except (KeyError, ValueError) as err: + return str(err), HTTPStatus.INTERNAL_SERVER_ERROR + except ValidationError as err: + return str(err.messages), HTTPStatus.BAD_REQUEST + except BusinessException as err: + return err.error, err.status_code diff --git a/met-api/src/met_api/resources/poll_answer_translation.py b/met-api/src/met_api/resources/poll_answer_translation.py new file mode 100644 index 000000000..a8834c439 --- /dev/null +++ b/met-api/src/met_api/resources/poll_answer_translation.py @@ -0,0 +1,152 @@ +# Copyright © 2024 Province of British Columbia +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +"""API endpoints for managing a PollAnswerTranslation resource.""" + +from http import HTTPStatus + +from flask import request +from flask_cors import cross_origin +from flask_restx import Namespace, Resource +from marshmallow import ValidationError + +from met_api.auth import jwt as _jwt +from met_api.exceptions.business_exception import BusinessException +from met_api.schemas import utils as schema_utils +from met_api.schemas.poll_answer_translation_schema import PollAnswerTranslationSchema +from met_api.services.poll_answer_translation_service import PollAnswerTranslationService +from met_api.utils.util import allowedorigins, cors_preflight + + +API = Namespace( + 'poll_answer_translations', + description='Endpoints for PollAnswerTranslation Management', +) + + +@cors_preflight('GET, POST, PATCH, DELETE, OPTIONS') +@API.route('/') +class PollAnswerTranslationResource(Resource): + """Resource for managing poll answer translations.""" + + @staticmethod + @cross_origin(origins=allowedorigins()) + def get(poll_answer_translation_id, **_): + """Fetch a poll answer translation by id.""" + try: + poll_answer_translation = PollAnswerTranslationService.get_by_id( + poll_answer_translation_id + ) + return ( + PollAnswerTranslationSchema().dump(poll_answer_translation), + HTTPStatus.OK, + ) + except (KeyError, ValueError) as err: + return str(err), HTTPStatus.INTERNAL_SERVER_ERROR + + @staticmethod + @_jwt.requires_auth + @cross_origin(origins=allowedorigins()) + def patch(poll_id, poll_answer_translation_id): + """Update saved poll answer translation partially.""" + try: + request_json = request.get_json() + poll_answer_translation = ( + PollAnswerTranslationService.update_poll_answer_translation( + poll_id, poll_answer_translation_id, request_json + ) + ) + return ( + PollAnswerTranslationSchema().dump(poll_answer_translation), + HTTPStatus.OK, + ) + except ValueError as err: + return str(err), HTTPStatus.NOT_FOUND + except ValidationError as err: + return str(err.messages), HTTPStatus.BAD_REQUEST + + @staticmethod + @_jwt.requires_auth + @cross_origin(origins=allowedorigins()) + def delete(poll_id, poll_answer_translation_id): + """Delete a poll answer translation.""" + try: + success = ( + PollAnswerTranslationService.delete_poll_answer_translation( + poll_id, poll_answer_translation_id + ) + ) + if success: + return ( + 'Successfully deleted poll answer translation', + HTTPStatus.NO_CONTENT, + ) + raise ValueError('Poll answer translation not found') + except KeyError as err: + return str(err), HTTPStatus.BAD_REQUEST + except ValueError as err: + return str(err), HTTPStatus.NOT_FOUND + + +@cors_preflight('GET, OPTIONS') +@API.route('/answer//language/') +class PollAnswerTranslationResourceByLanguage(Resource): + """Resource for managing poll answer using language_id.""" + + @staticmethod + @cross_origin(origins=allowedorigins()) + def get(poll_answer_id, language_id, **_): + """Fetch a poll answer translation by language_id.""" + try: + poll_answer_translations = ( + PollAnswerTranslationService.get_poll_answer_translation( + poll_answer_id, language_id + ) + ) + return ( + PollAnswerTranslationSchema().dump( + poll_answer_translations, many=True + ), + HTTPStatus.OK, + ) + except (KeyError, ValueError) as err: + return str(err), HTTPStatus.INTERNAL_SERVER_ERROR + + +@cors_preflight('POST, OPTIONS') +@API.route('/') +class PollAnswerTranslations(Resource): + """Resource for managing multiple poll answer translations.""" + + @staticmethod + @_jwt.requires_auth + @cross_origin(origins=allowedorigins()) + def post(poll_id): + """Create a new poll answer translation.""" + try: + request_json = request.get_json() + valid_format, errors = schema_utils.validate( + request_json, 'poll_answer_translation' + ) + if not valid_format: + return { + 'message': schema_utils.serialize(errors) + }, HTTPStatus.BAD_REQUEST + + pre_populate = request_json.get('pre_populate', True) + + poll_answer_translation = ( + PollAnswerTranslationService.create_poll_answer_translation( + poll_id, request_json, pre_populate + ) + ) + return ( + PollAnswerTranslationSchema().dump(poll_answer_translation), + HTTPStatus.CREATED, + ) + except (KeyError, ValueError) as err: + return str(err), HTTPStatus.INTERNAL_SERVER_ERROR + except ValidationError as err: + return str(err.messages), HTTPStatus.BAD_REQUEST + except BusinessException as err: + return err.error, err.status_code diff --git a/met-api/src/met_api/resources/subscribe_item_translation.py b/met-api/src/met_api/resources/subscribe_item_translation.py new file mode 100644 index 000000000..7e6a9b08f --- /dev/null +++ b/met-api/src/met_api/resources/subscribe_item_translation.py @@ -0,0 +1,152 @@ +# Copyright © 2024 Province of British Columbia +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +"""API endpoints for managing a SubscribeItemTranslation resource.""" + +from http import HTTPStatus + +from flask import request +from flask_cors import cross_origin +from flask_restx import Namespace, Resource +from marshmallow import ValidationError + +from met_api.auth import jwt as _jwt +from met_api.exceptions.business_exception import BusinessException +from met_api.schemas import utils as schema_utils +from met_api.schemas.subscribe_item_translation_schema import SubscribeItemTranslationSchema +from met_api.services.subscribe_item_translation_service import SubscribeItemTranslationService +from met_api.utils.util import allowedorigins, cors_preflight + + +API = Namespace( + 'subscribe_item_translations', + description='Endpoints for SubscribeItemTranslation Management', +) + + +@cors_preflight('GET, POST, PATCH, DELETE, OPTIONS') +@API.route('/') +class SubscribeItemTranslationResource(Resource): + """Resource for managing subscribe item translations.""" + + @staticmethod + @cross_origin(origins=allowedorigins()) + def get(subscribe_item_translation_id, **_): + """Fetch a subscribe item translation by id.""" + try: + subscribe_item_translation = ( + SubscribeItemTranslationService.get_by_id( + subscribe_item_translation_id + ) + ) + return ( + SubscribeItemTranslationSchema().dump(subscribe_item_translation), + HTTPStatus.OK, + ) + except (KeyError, ValueError) as err: + return str(err), HTTPStatus.INTERNAL_SERVER_ERROR + + @staticmethod + @_jwt.requires_auth + @cross_origin(origins=allowedorigins()) + def patch(widget_subscribe_id, subscribe_item_translation_id): + """Update saved subscribe item translation partially.""" + try: + request_json = request.get_json() + subscribe_item_translation = ( + SubscribeItemTranslationService.update_subscribe_item_translation( + widget_subscribe_id, subscribe_item_translation_id, request_json + ) + ) + return ( + SubscribeItemTranslationSchema().dump(subscribe_item_translation), + HTTPStatus.OK, + ) + except ValueError as err: + return str(err), HTTPStatus.NOT_FOUND + except ValidationError as err: + return str(err.messages), HTTPStatus.BAD_REQUEST + + @staticmethod + @_jwt.requires_auth + @cross_origin(origins=allowedorigins()) + def delete(widget_subscribe_id, subscribe_item_translation_id): + """Delete a subscribe item translation.""" + try: + success = SubscribeItemTranslationService.delete_subscribe_item_translation( + widget_subscribe_id, subscribe_item_translation_id + ) + if success: + return ( + 'Successfully deleted subscribe item translation', + HTTPStatus.NO_CONTENT, + ) + raise ValueError('Subscribe item translation not found') + except KeyError as err: + return str(err), HTTPStatus.BAD_REQUEST + except ValueError as err: + return str(err), HTTPStatus.NOT_FOUND + + +@cors_preflight('GET, OPTIONS') +@API.route('/item//language/') +class SubscribeItemTranslationResourceByLanguage(Resource): + """Resource for subscribe item using language_id.""" + + @staticmethod + @cross_origin(origins=allowedorigins()) + def get(subscribe_item_id, language_id, **_): + """Fetch a subscribe item translation by language_id.""" + try: + subscribe_item_translation = ( + SubscribeItemTranslationService.get_subscribe_item_translation( + subscribe_item_id, language_id + ) + ) + return ( + SubscribeItemTranslationSchema().dump( + subscribe_item_translation[0], many=False + ), + HTTPStatus.OK, + ) + except (KeyError, ValueError) as err: + return str(err), HTTPStatus.INTERNAL_SERVER_ERROR + + +@cors_preflight('POST, OPTIONS') +@API.route('/') +class SubscribeItemTranslations(Resource): + """Resource for managing multiple subscribe item translations.""" + + @staticmethod + @_jwt.requires_auth + @cross_origin(origins=allowedorigins()) + def post(widget_subscribe_id): + """Create a new subscribe item translation.""" + try: + request_json = request.get_json() + valid_format, errors = schema_utils.validate( + request_json, 'subscribe_item_translation' + ) + if not valid_format: + return { + 'message': schema_utils.serialize(errors) + }, HTTPStatus.BAD_REQUEST + # pre-populate is to indicate whether to pre-populate the translation with data in base language + pre_populate = request_json.get('pre_populate', True) + + subscribe_item_translation = ( + SubscribeItemTranslationService.create_subscribe_item_translation( + widget_subscribe_id, request_json, pre_populate + ) + ) + return ( + SubscribeItemTranslationSchema().dump(subscribe_item_translation), + HTTPStatus.CREATED, + ) + except (KeyError, ValueError) as err: + return str(err), HTTPStatus.INTERNAL_SERVER_ERROR + except ValidationError as err: + return str(err.messages), HTTPStatus.BAD_REQUEST + except BusinessException as err: + return err.error, err.status_code diff --git a/met-api/src/met_api/resources/timeline_event_translation.py b/met-api/src/met_api/resources/timeline_event_translation.py new file mode 100644 index 000000000..f27afc1ed --- /dev/null +++ b/met-api/src/met_api/resources/timeline_event_translation.py @@ -0,0 +1,153 @@ +# Copyright © 2024 Province of British Columbia +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +"""API endpoints for managing a TimelineEventTranslation resource.""" + +from http import HTTPStatus + +from flask import request +from flask_cors import cross_origin +from flask_restx import Namespace, Resource +from marshmallow import ValidationError + +from met_api.auth import jwt as _jwt +from met_api.exceptions.business_exception import BusinessException +from met_api.schemas import utils as schema_utils +from met_api.schemas.timeline_event_translation_schema import TimelineEventTranslationSchema +from met_api.services.timeline_event_translation_service import TimelineEventTranslationService +from met_api.utils.util import allowedorigins, cors_preflight + + +API = Namespace( + 'timeline_event_translations', + description='Endpoints for TimelineEventTranslation Management', +) + + +@cors_preflight('GET, POST, PATCH, DELETE, OPTIONS') +@API.route('/') +class TimelineEventTranslationResource(Resource): + """Resource for managing timeline event translations.""" + + @staticmethod + @cross_origin(origins=allowedorigins()) + def get(timeline_event_translation_id, **_): + """Fetch a timeline event translation by id.""" + try: + timeline_event_translation = ( + TimelineEventTranslationService.get_by_id( + timeline_event_translation_id + ) + ) + return ( + TimelineEventTranslationSchema().dump(timeline_event_translation), + HTTPStatus.OK, + ) + except (KeyError, ValueError) as err: + return str(err), HTTPStatus.INTERNAL_SERVER_ERROR + + @staticmethod + @_jwt.requires_auth + @cross_origin(origins=allowedorigins()) + def patch(timeline_id, timeline_event_translation_id): + """Update saved timeline event translation partially.""" + try: + request_json = request.get_json() + timeline_event_translation = ( + TimelineEventTranslationService.update_timeline_event_translation( + timeline_id, timeline_event_translation_id, request_json + ) + ) + return ( + TimelineEventTranslationSchema().dump(timeline_event_translation), + HTTPStatus.OK, + ) + except ValueError as err: + return str(err), HTTPStatus.NOT_FOUND + except ValidationError as err: + return str(err.messages), HTTPStatus.BAD_REQUEST + + @staticmethod + @_jwt.requires_auth + @cross_origin(origins=allowedorigins()) + def delete(timeline_id, timeline_event_translation_id): + """Delete a timeline event translation.""" + try: + success = TimelineEventTranslationService.delete_timeline_event_translation( + timeline_id, timeline_event_translation_id + ) + if success: + return ( + 'Successfully deleted timeline event translation', + HTTPStatus.NO_CONTENT, + ) + raise ValueError('Timeline event translation not found') + except KeyError as err: + return str(err), HTTPStatus.BAD_REQUEST + except ValueError as err: + return str(err), HTTPStatus.NOT_FOUND + + +@cors_preflight('GET, OPTIONS') +@API.route('/event//language/') +class TimelineEventTranslationResourceByLanguage(Resource): + """Resource for timeline event using language_id.""" + + @staticmethod + @cross_origin(origins=allowedorigins()) + def get(timeline_event_id, language_id, **_): + """Fetch a timeline event translation by language_id.""" + try: + timeline_event_translation = ( + TimelineEventTranslationService.get_timeline_event_translation( + timeline_event_id, language_id + ) + ) + return ( + TimelineEventTranslationSchema().dump( + timeline_event_translation[0], many=False + ), + HTTPStatus.OK, + ) + except (KeyError, ValueError) as err: + return str(err), HTTPStatus.INTERNAL_SERVER_ERROR + + +@cors_preflight('POST, OPTIONS') +@API.route('/') +class TimelineEventTranslations(Resource): + """Resource for managing multiple timeline event translations.""" + + @staticmethod + @_jwt.requires_auth + @cross_origin(origins=allowedorigins()) + def post(timeline_id): + """Create a new timeline event translation.""" + try: + request_json = request.get_json() + valid_format, errors = schema_utils.validate( + request_json, 'timeline_event_translation' + ) + if not valid_format: + return { + 'message': schema_utils.serialize(errors) + }, HTTPStatus.BAD_REQUEST + + # pre-populate is to indicate whether to pre-populate the translation with data in base language + pre_populate = request_json.get('pre_populate', True) + + timeline_event_translation = ( + TimelineEventTranslationService.create_timeline_event_translation( + timeline_id, request_json, pre_populate + ) + ) + return ( + TimelineEventTranslationSchema().dump(timeline_event_translation), + HTTPStatus.CREATED, + ) + except (KeyError, ValueError) as err: + return str(err), HTTPStatus.INTERNAL_SERVER_ERROR + except ValidationError as err: + return str(err.messages), HTTPStatus.BAD_REQUEST + except BusinessException as err: + return err.error, err.status_code diff --git a/met-api/src/met_api/schemas/event_item_translation_schema.py b/met-api/src/met_api/schemas/event_item_translation_schema.py new file mode 100644 index 000000000..c38adb695 --- /dev/null +++ b/met-api/src/met_api/schemas/event_item_translation_schema.py @@ -0,0 +1,25 @@ +"""Schema for EventItemTranslation serialization and deserialization.""" + +from marshmallow import EXCLUDE, fields +from marshmallow_sqlalchemy import SQLAlchemyAutoSchema + +from met_api.models.event_item_translation import EventItemTranslation + + +class EventItemTranslationSchema(SQLAlchemyAutoSchema): + """Schema for EventItemTranslation.""" + + class Meta: + """EventItemTranslationSchema metadata.""" + + model = EventItemTranslation + unknown = EXCLUDE + + id = fields.Int(dump_only=True) + language_id = fields.Int(required=True) + event_item_id = fields.Int(required=True) + description = fields.Str(allow_none=True) + location_name = fields.Str(allow_none=True) + location_address = fields.Str(allow_none=True) + url = fields.Str(allow_none=True) + url_label = fields.Str(allow_none=True) diff --git a/met-api/src/met_api/schemas/poll_answer_translation_schema.py b/met-api/src/met_api/schemas/poll_answer_translation_schema.py new file mode 100644 index 000000000..6975d4c38 --- /dev/null +++ b/met-api/src/met_api/schemas/poll_answer_translation_schema.py @@ -0,0 +1,21 @@ +"""Schema for PollAnswerTranslation serialization and deserialization.""" + +from marshmallow import EXCLUDE, fields +from marshmallow_sqlalchemy import SQLAlchemyAutoSchema + +from met_api.models.poll_answer_translation import PollAnswerTranslation + + +class PollAnswerTranslationSchema(SQLAlchemyAutoSchema): + """Schema for PollAnswerTranslation.""" + + class Meta: + """PollAnswerTranslationSchema metadata.""" + + model = PollAnswerTranslation + unknown = EXCLUDE + + id = fields.Int(dump_only=True) + poll_answer_id = fields.Int(required=True) + language_id = fields.Int(required=True) + answer_text = fields.Str(required=True) diff --git a/met-api/src/met_api/schemas/schemas/event_item_translation.json b/met-api/src/met_api/schemas/schemas/event_item_translation.json new file mode 100644 index 000000000..f88ef0879 --- /dev/null +++ b/met-api/src/met_api/schemas/schemas/event_item_translation.json @@ -0,0 +1,75 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema", + "$id": "https://met.gov.bc.ca/.well_known/schemas/event_item_translation", + "type": "object", + "title": "The EventItemTranslation Schema", + "description": "Schema for EventItemTranslation POST request validation.", + "default": {}, + "examples": [ + { + "language_id": 2, + "event_item_id": 1, + "description": "Description in Spanish", + "location_name": "Location Name in Spanish", + "location_address": "Location Address in Spanish", + "url": "https://example.com/event", + "url_label": "Event Details", + "pre_populate": false + }, + { + "language_id": 2, + "event_item_id": 1, + "pre_populate": true + } + ], + "required": ["language_id", "event_item_id"], + "properties": { + "language_id": { + "$id": "#/properties/language_id", + "type": "integer", + "title": "Language ID", + "description": "The ID of the language in which the event item is translated." + }, + "event_item_id": { + "$id": "#/properties/event_item_id", + "type": "integer", + "title": "Event Item ID", + "description": "The ID of the event item being translated." + }, + "description": { + "$id": "#/properties/description", + "type": "string", + "title": "Description", + "description": "The translated description of the event item.", + "maxLength": 500 + }, + "location_name": { + "$id": "#/properties/location_name", + "type": "string", + "title": "Location Name", + "description": "The name of the event location.", + "maxLength": 50 + }, + "location_address": { + "$id": "#/properties/location_address", + "type": "string", + "title": "Location Address", + "description": "The address of the event location.", + "maxLength": 100 + }, + "url": { + "$id": "#/properties/url", + "type": "string", + "title": "URL", + "description": "The URL related to the event item.", + "maxLength": 500 + }, + "url_label": { + "$id": "#/properties/url_label", + "type": "string", + "title": "URL Label", + "description": "Label to display for URL links.", + "maxLength": 100 + } + } +} diff --git a/met-api/src/met_api/schemas/schemas/poll_answer_translation.json b/met-api/src/met_api/schemas/schemas/poll_answer_translation.json new file mode 100644 index 000000000..d25ff68c0 --- /dev/null +++ b/met-api/src/met_api/schemas/schemas/poll_answer_translation.json @@ -0,0 +1,44 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema", + "$id": "https://met.gov.bc.ca/.well_known/schemas/poll_answer_translation", + "type": "object", + "title": "The PollAnswerTranslation Schema", + "description": "Schema for PollAnswerTranslation POST request validation.", + "default": {}, + "examples": [ + { + "poll_answer_id": 1, + "language_id": 2, + "answer_text": "Answer in Spanish", + "pre_populate": false + }, + { + "poll_answer_id": 1, + "language_id": 2, + "pre_populate": true + } + ], + "required": ["poll_answer_id", "language_id", "answer_text"], + "properties": { + "poll_answer_id": { + "$id": "#/properties/poll_answer_id", + "type": "integer", + "title": "Poll Answer ID", + "description": "The ID of the poll answer being translated." + }, + "language_id": { + "$id": "#/properties/language_id", + "type": "integer", + "title": "Language ID", + "description": "The ID of the language in which the poll answer is translated." + }, + "answer_text": { + "$id": "#/properties/answer_text", + "type": "string", + "title": "Answer Text", + "description": "The translated text of the poll answer.", + "maxLength": 255, + "examples": ["Answer in Spanish"] + } + } +} diff --git a/met-api/src/met_api/schemas/schemas/subscribe_item_translation.json b/met-api/src/met_api/schemas/schemas/subscribe_item_translation.json new file mode 100644 index 000000000..4e99f8537 --- /dev/null +++ b/met-api/src/met_api/schemas/schemas/subscribe_item_translation.json @@ -0,0 +1,58 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema", + "$id": "https://met.gov.bc.ca/.well_known/schemas/subscribe_item_translation", + "type": "object", + "title": "The SubscribeItemTranslation Schema", + "description": "Schema for SubscribeItemTranslation POST request validation.", + "default": {}, + "examples": [ + { + "language_id": 2, + "subscribe_item_id": 1, + "description": "Subscription description in Spanish", + "rich_description": "Rich text subscription description in Spanish", + "call_to_action_text": "Subscribe Now", + "pre_populate" : false + }, + { + "language_id": 2, + "subscribe_item_id": 1, + "pre_populate" : true + } + ], + "required": ["language_id", "subscribe_item_id"], + "properties": { + "language_id": { + "$id": "#/properties/language_id", + "type": "integer", + "title": "Language ID", + "description": "The ID of the language in which the subscribe item is translated." + }, + "subscribe_item_id": { + "$id": "#/properties/subscribe_item_id", + "type": "integer", + "title": "Subscribe Item ID", + "description": "The ID of the subscribe item being translated." + }, + "description": { + "$id": "#/properties/description", + "type": "string", + "title": "Description", + "description": "The translated description of the subscribe item.", + "maxLength": 500 + }, + "rich_description": { + "$id": "#/properties/rich_description", + "type": "string", + "title": "Rich Description", + "description": "A more detailed and formatted translation of the subscribe item." + }, + "call_to_action_text": { + "$id": "#/properties/call_to_action_text", + "type": "string", + "title": "Call to Action Text", + "description": "Text for the call to action button or link.", + "maxLength": 25 + } + } +} diff --git a/met-api/src/met_api/schemas/schemas/timeline_event_translation.json b/met-api/src/met_api/schemas/schemas/timeline_event_translation.json new file mode 100644 index 000000000..317f63dd7 --- /dev/null +++ b/met-api/src/met_api/schemas/schemas/timeline_event_translation.json @@ -0,0 +1,50 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema", + "$id": "https://met.gov.bc.ca/.well_known/schemas/timeline_event_translation", + "type": "object", + "title": "The TimelineEventTranslation Schema", + "description": "Schema for TimelineEventTranslation POST request validation.", + "default": {}, + "examples": [ + { + "language_id": 2, + "timeline_event_id": 1, + "description": "Detailed description of the timeline event in the specified language.", + "time": "2024-03-13T15:00:00", + "pre_populate": false + }, + { + "language_id": 2, + "timeline_event_id": 1, + "pre_populate": true + } + ], + "required": ["language_id", "timeline_event_id"], + "properties": { + "language_id": { + "$id": "#/properties/language_id", + "type": "integer", + "title": "Language ID", + "description": "The ID of the language in which the timeline event is translated." + }, + "timeline_event_id": { + "$id": "#/properties/timeline_event_id", + "type": "integer", + "title": "Timeline Event ID", + "description": "The ID of the timeline event being translated." + }, + "description": { + "$id": "#/properties/description", + "type": "string", + "title": "Description", + "description": "The translated description of the timeline event." + }, + "time": { + "$id": "#/properties/time", + "type": "string", + "title": "Time", + "description": "The time associated with the timeline event.", + "pattern": "^(\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2})$" + } + } +} diff --git a/met-api/src/met_api/schemas/subscribe_item_translation_schema.py b/met-api/src/met_api/schemas/subscribe_item_translation_schema.py new file mode 100644 index 000000000..fabcc5896 --- /dev/null +++ b/met-api/src/met_api/schemas/subscribe_item_translation_schema.py @@ -0,0 +1,23 @@ +"""Schema for SubscribeItemTranslation serialization and deserialization.""" + +from marshmallow import fields +from marshmallow_sqlalchemy import SQLAlchemyAutoSchema + +from met_api.models.subscribe_item_translation import SubscribeItemTranslation + + +class SubscribeItemTranslationSchema(SQLAlchemyAutoSchema): + """Schema for SubscribeItemTranslation.""" + + class Meta: + """SubscribeItemTranslationSchema metadata.""" + + model = SubscribeItemTranslation + load_instance = True # Optional: deserialize to model instances + + id = fields.Int(dump_only=True) + language_id = fields.Int(required=True) + subscribe_item_id = fields.Int(required=True) + description = fields.Str(allow_none=True) + rich_description = fields.Str(allow_none=True) + call_to_action_text = fields.Str(allow_none=True, validate=lambda s: len(s) <= 25) diff --git a/met-api/src/met_api/schemas/timeline_event_translation_schema.py b/met-api/src/met_api/schemas/timeline_event_translation_schema.py new file mode 100644 index 000000000..dc96bef49 --- /dev/null +++ b/met-api/src/met_api/schemas/timeline_event_translation_schema.py @@ -0,0 +1,22 @@ +"""Schema for TimelineEventTranslation serialization and deserialization.""" + +from marshmallow import EXCLUDE, fields +from marshmallow_sqlalchemy import SQLAlchemyAutoSchema + +from met_api.models.timeline_event_translation import TimelineEventTranslation + + +class TimelineEventTranslationSchema(SQLAlchemyAutoSchema): + """Schema for TimelineEventTranslation.""" + + class Meta: + """TimelineEventTranslationSchema metadata.""" + + model = TimelineEventTranslation + unknown = EXCLUDE + + id = fields.Int(dump_only=True) + language_id = fields.Int(required=True) + timeline_event_id = fields.Int(required=True) + description = fields.Str(allow_none=True) + time = fields.Str(allow_none=True) diff --git a/met-api/src/met_api/services/event_item_translation_service.py b/met-api/src/met_api/services/event_item_translation_service.py new file mode 100644 index 000000000..e561374d7 --- /dev/null +++ b/met-api/src/met_api/services/event_item_translation_service.py @@ -0,0 +1,136 @@ +"""Service for EventItemTranslation management with authorization checks.""" + +from http import HTTPStatus + +from sqlalchemy.exc import SQLAlchemyError + +from met_api.constants.membership_type import MembershipType +from met_api.exceptions.business_exception import BusinessException +from met_api.models.event_item_translation import EventItemTranslation as EventItemTranslationModel +from met_api.services import authorization +from met_api.services.widget_events_service import WidgetEventsService +from met_api.services.widget_service import WidgetService +from met_api.utils.roles import Role + + +class EventItemTranslationService: + """EventItemTranslation management service.""" + + @staticmethod + def get_by_id(translation_id: int): + """Get event item translation by ID.""" + return EventItemTranslationModel.find_by_id(translation_id) + + @staticmethod + def get_engagement_id(event_id): + """Get engagement id using event id.""" + widget_event = WidgetEventsService.get_by_id(event_id) + if not widget_event: + raise BusinessException( + status_code=HTTPStatus.NOT_FOUND, + error='Event not found', + ) + widget = WidgetService.get_widget_by_id(widget_event.widget_id) + return widget.engagement_id + + @staticmethod + def get_event_item_translation(event_item_id=None, language_id=None): + """Get event item translations by item ID and language ID.""" + return EventItemTranslationModel.get_by_item_and_language( + event_item_id, language_id + ) + + @staticmethod + def create_event_item_translation(event_id: int, data: dict, pre_populate: bool = True): + """Insert a new EventItemTranslation with authorization check.""" + try: + one_of_roles = ( + MembershipType.TEAM_MEMBER.name, + Role.EDIT_ENGAGEMENT.value, + ) + engagement_id = EventItemTranslationService.get_engagement_id( + event_id + ) + authorization.check_auth( + one_of_roles=one_of_roles, engagement_id=engagement_id + ) + # Pre populating with event item base langauge data + if pre_populate: + event_item = WidgetEventsService.get_event_item_by_id(data['event_item_id']) + if not event_item: + raise BusinessException( + 'EventItem not found', HTTPStatus.NOT_FOUND + ) + data['description'] = event_item.description + data['location_name'] = event_item.location_name + data['location_address'] = event_item.location_address + data['url'] = event_item.url + data['url_label'] = event_item.url_label + + return EventItemTranslationModel.create_event_item_translation( + data + ) + except SQLAlchemyError as e: + raise BusinessException( + str(e), HTTPStatus.INTERNAL_SERVER_ERROR + ) from e + + @staticmethod + def update_event_item_translation(event_id: int, translation_id: int, data: dict): + """Update an existing EventItemTranslation with authorization check.""" + try: + event_item_translation = EventItemTranslationModel.find_by_id(translation_id) + if not event_item_translation: + raise BusinessException( + 'EventItemTranslation not found', HTTPStatus.NOT_FOUND + ) + + one_of_roles = ( + MembershipType.TEAM_MEMBER.name, + Role.EDIT_ENGAGEMENT.value, + ) + engagement_id = EventItemTranslationService.get_engagement_id( + event_id + ) + authorization.check_auth( + one_of_roles=one_of_roles, engagement_id=engagement_id + ) + + updated_translation = ( + EventItemTranslationModel.update_event_item_translation( + translation_id, data + ) + ) + return updated_translation + except SQLAlchemyError as e: + raise BusinessException( + str(e), HTTPStatus.INTERNAL_SERVER_ERROR + ) from e + + @staticmethod + def delete_event_item_translation(event_id: int, translation_id: int): + """Delete an EventItemTranslation with authorization check.""" + try: + event_item_translation = EventItemTranslationModel.find_by_id(translation_id) + if not event_item_translation: + raise BusinessException( + 'EventItemTranslation not found', HTTPStatus.NOT_FOUND + ) + + one_of_roles = ( + MembershipType.TEAM_MEMBER.name, + Role.EDIT_ENGAGEMENT.value, + ) + engagement_id = EventItemTranslationService.get_engagement_id( + event_id + ) + authorization.check_auth( + one_of_roles=one_of_roles, + engagement_id=engagement_id, + ) + + return EventItemTranslationModel.delete_event_item_translation(translation_id) + except SQLAlchemyError as e: + raise BusinessException( + str(e), HTTPStatus.INTERNAL_SERVER_ERROR + ) from e diff --git a/met-api/src/met_api/services/poll_answer_translation_service.py b/met-api/src/met_api/services/poll_answer_translation_service.py new file mode 100644 index 000000000..3809a9603 --- /dev/null +++ b/met-api/src/met_api/services/poll_answer_translation_service.py @@ -0,0 +1,123 @@ +"""Service for PollAnswerTranslation management with authorization checks.""" + +from http import HTTPStatus + +from sqlalchemy.exc import SQLAlchemyError + +from met_api.constants.membership_type import MembershipType +from met_api.exceptions.business_exception import BusinessException +from met_api.models.poll_answer_translation import PollAnswerTranslation as PollAnswerTranslationModel +from met_api.services import authorization +from met_api.services.poll_answers_service import PollAnswerService +from met_api.services.widget_poll_service import WidgetPollService +from met_api.utils.roles import Role + + +class PollAnswerTranslationService: + """PollAnswerTranslation management service.""" + + @staticmethod + def get_by_id(translation_id: int): + """Get poll answer translations by id.""" + return PollAnswerTranslationModel.find_by_id(translation_id) + + @staticmethod + def get_poll_answer_translation(poll_answer_id: int = None, language_id: int = None): + """Get poll answer translations by answer ID and language ID.""" + return PollAnswerTranslationModel.get_by_answer_and_language( + poll_answer_id, language_id + ) + + @staticmethod + def create_poll_answer_translation( + poll_id: int, data: dict, pre_populate: bool = True + ): + """Insert a new PollAnswerTranslation with authorization check.""" + try: + poll = WidgetPollService.get_poll_by_id(poll_id) + one_of_roles = ( + MembershipType.TEAM_MEMBER.name, + Role.EDIT_ENGAGEMENT.value, + ) + authorization.check_auth( + one_of_roles=one_of_roles, + engagement_id=poll.engagement_id, + ) + + if pre_populate: + poll_answer = PollAnswerService.get_poll_answer_by_id( + data['poll_answer_id'] + ) + if not poll_answer: + raise BusinessException( + 'PollAnswer not found', HTTPStatus.NOT_FOUND + ) + # prepopulate translation with base language data + data['answer_text'] = poll_answer.answer_text + + return PollAnswerTranslationModel.create_poll_answer_translation( + data + ) + except SQLAlchemyError as e: + raise BusinessException( + str(e), HTTPStatus.INTERNAL_SERVER_ERROR + ) from e + + @staticmethod + def update_poll_answer_translation(poll_id: int, translation_id: int, data: dict): + """Update an existing PollAnswerTranslation with authorization check.""" + try: + poll_answer_translation = PollAnswerTranslationModel.find_by_id(translation_id) + if not poll_answer_translation: + raise BusinessException( + 'PollAnswerTranslation not found', HTTPStatus.NOT_FOUND + ) + + poll = WidgetPollService.get_poll_by_id(poll_id) + one_of_roles = ( + MembershipType.TEAM_MEMBER.name, + Role.EDIT_ENGAGEMENT.value, + ) + authorization.check_auth( + one_of_roles=one_of_roles, + engagement_id=poll.engagement_id, + ) + + updated_translation = ( + PollAnswerTranslationModel.update_poll_answer_translation( + translation_id, data + ) + ) + return updated_translation + except SQLAlchemyError as e: + raise BusinessException( + str(e), HTTPStatus.INTERNAL_SERVER_ERROR + ) from e + + @staticmethod + def delete_poll_answer_translation(poll_id: int, translation_id: int): + """Delete a PollAnswerTranslation with authorization check.""" + try: + poll_answer_translation = PollAnswerTranslationModel.find_by_id(translation_id) + if not poll_answer_translation: + raise ValueError( + 'PollAnswerTranslation not found', HTTPStatus.NOT_FOUND + ) + + poll = WidgetPollService.get_poll_by_id(poll_id) + one_of_roles = ( + MembershipType.TEAM_MEMBER.name, + Role.EDIT_ENGAGEMENT.value, + ) + authorization.check_auth( + one_of_roles=one_of_roles, + engagement_id=poll.engagement_id, + ) + + return PollAnswerTranslationModel.delete_poll_answer_translation( + translation_id + ) + except SQLAlchemyError as e: + raise BusinessException( + str(e), HTTPStatus.INTERNAL_SERVER_ERROR + ) from e diff --git a/met-api/src/met_api/services/poll_answers_service.py b/met-api/src/met_api/services/poll_answers_service.py index 0eb0e4a1e..12e615dc6 100644 --- a/met-api/src/met_api/services/poll_answers_service.py +++ b/met-api/src/met_api/services/poll_answers_service.py @@ -2,6 +2,7 @@ from http import HTTPStatus from sqlalchemy.exc import SQLAlchemyError + from met_api.exceptions.business_exception import BusinessException from met_api.models.poll_answers import PollAnswer as PollAnswerModel @@ -9,6 +10,14 @@ class PollAnswerService: """PollAnswer management service.""" + @staticmethod + def get_poll_answer_by_id(answer_id: int): + """Get poll answer by id.""" + poll_answer = PollAnswerModel.find_by_id(answer_id) + if not poll_answer: + raise BusinessException('Poll answer not found', HTTPStatus.NOT_FOUND) + return poll_answer + @staticmethod def get_poll_answer(poll_id): """Get poll answer by poll id.""" diff --git a/met-api/src/met_api/services/subscribe_item_translation_service.py b/met-api/src/met_api/services/subscribe_item_translation_service.py new file mode 100644 index 000000000..5ed9a9948 --- /dev/null +++ b/met-api/src/met_api/services/subscribe_item_translation_service.py @@ -0,0 +1,105 @@ +"""Service for SubscribeItemTranslation management with authorization checks.""" + +from http import HTTPStatus + +from sqlalchemy.exc import SQLAlchemyError + +from met_api.constants.membership_type import MembershipType +from met_api.exceptions.business_exception import BusinessException +from met_api.models.subscribe_item_translation import SubscribeItemTranslation as SubscribeItemTranslationModel +from met_api.services import authorization +from met_api.services.widget_service import WidgetService +from met_api.services.widget_subscribe_service import WidgetSubscribeService +from met_api.utils.roles import Role + + +class SubscribeItemTranslationService: + """SubscribeItemTranslation management service.""" + + @staticmethod + def get_by_id(translation_id: int): + """Get subscribe item translation by ID.""" + return SubscribeItemTranslationModel.find_by_id(translation_id) + + @staticmethod + def get_subscribe_item_translation(subscribe_item_id=None, language_id=None): + """Get subscribe item translations by item ID and language ID.""" + return SubscribeItemTranslationModel.get_by_item_and_language(subscribe_item_id, language_id) + + @staticmethod + def get_engagement_id(widget_subscribe_id): + """Get engagement id widget_subscribe_id.""" + widget_subscribe = WidgetSubscribeService.get_by_id(widget_subscribe_id) + if not widget_subscribe: + raise BusinessException( + status_code=HTTPStatus.NOT_FOUND, + error='Subscribe widget not found', + ) + widget = WidgetService.get_widget_by_id(widget_subscribe.widget_id) + return widget.engagement_id + + @staticmethod + def create_subscribe_item_translation(widget_subscribe_id: int, data: dict, pre_populate: bool = True): + """Insert a new SubscribeItemTranslation with authorization check.""" + try: + one_of_roles = ( + MembershipType.TEAM_MEMBER.name, + Role.EDIT_ENGAGEMENT.value, + ) + engagement_id = SubscribeItemTranslationService.get_engagement_id(widget_subscribe_id) + authorization.check_auth(one_of_roles=one_of_roles, engagement_id=engagement_id) + + # Pre populating with Subscribe item base langauge data + if pre_populate: + subscribe_item = WidgetSubscribeService.get_subscribe_item_by_id(data['subscribe_item_id']) + if not subscribe_item: + raise BusinessException('Subscribe item not found', HTTPStatus.NOT_FOUND) + data['description'] = subscribe_item.description + data['rich_description'] = subscribe_item.rich_description + data['call_to_action_text'] = subscribe_item.call_to_action_text + + return SubscribeItemTranslationModel.create_sub_item_translation(data) + except SQLAlchemyError as e: + raise BusinessException(str(e), HTTPStatus.INTERNAL_SERVER_ERROR) from e + + @staticmethod + def update_subscribe_item_translation(widget_subscribe_id: int, translation_id: int, data: dict): + """Update an existing SubscribeItemTranslation with authorization check.""" + try: + subscribe_item_translation = SubscribeItemTranslationModel.find_by_id(translation_id) + if not subscribe_item_translation: + raise BusinessException('SubscribeItemTranslation not found', HTTPStatus.NOT_FOUND) + + one_of_roles = ( + MembershipType.TEAM_MEMBER.name, + Role.EDIT_ENGAGEMENT.value, + ) + engagement_id = SubscribeItemTranslationService.get_engagement_id(widget_subscribe_id) + authorization.check_auth(one_of_roles=one_of_roles, engagement_id=engagement_id) + + updated_translation = SubscribeItemTranslationModel.update_sub_item_translation(translation_id, data) + return updated_translation + except SQLAlchemyError as e: + raise BusinessException(str(e), HTTPStatus.INTERNAL_SERVER_ERROR) from e + + @staticmethod + def delete_subscribe_item_translation(widget_subscribe_id: int, translation_id: int): + """Delete a SubscribeItemTranslation with authorization check.""" + try: + subscribe_item_translation = SubscribeItemTranslationModel.find_by_id(translation_id) + if not subscribe_item_translation: + raise BusinessException('SubscribeItemTranslation not found', HTTPStatus.NOT_FOUND) + + one_of_roles = ( + MembershipType.TEAM_MEMBER.name, + Role.EDIT_ENGAGEMENT.value, + ) + engagement_id = SubscribeItemTranslationService.get_engagement_id(widget_subscribe_id) + authorization.check_auth( + one_of_roles=one_of_roles, + engagement_id=engagement_id, + ) + + return SubscribeItemTranslationModel.delete_sub_item_translation(translation_id) + except SQLAlchemyError as e: + raise BusinessException(str(e), HTTPStatus.INTERNAL_SERVER_ERROR) from e diff --git a/met-api/src/met_api/services/timeline_event_service.py b/met-api/src/met_api/services/timeline_event_service.py index 1f9cf87a5..82d4d3d9e 100644 --- a/met-api/src/met_api/services/timeline_event_service.py +++ b/met-api/src/met_api/services/timeline_event_service.py @@ -9,9 +9,9 @@ class TimelineEventService: """Timeline event management service.""" @staticmethod - def get_timeline_event(timeline_id): - """Get timeline event by timeline id.""" - timeline_event = TimelineEventModel.get_event(timeline_id) + def get_timeline_event(timeline_event_id): + """Get timeline event by timeline event id.""" + timeline_event = TimelineEventModel.find_by_id(timeline_event_id) return timeline_event @staticmethod diff --git a/met-api/src/met_api/services/timeline_event_translation_service.py b/met-api/src/met_api/services/timeline_event_translation_service.py new file mode 100644 index 000000000..0b2c1a370 --- /dev/null +++ b/met-api/src/met_api/services/timeline_event_translation_service.py @@ -0,0 +1,132 @@ +"""Service for TimelineEventTranslation management with authorization checks.""" + +from http import HTTPStatus + +from sqlalchemy.exc import SQLAlchemyError + +from met_api.constants.membership_type import MembershipType +from met_api.exceptions.business_exception import BusinessException +from met_api.models.timeline_event_translation import TimelineEventTranslation as TimelineEventTranslationModel +from met_api.services import authorization +from met_api.services.timeline_event_service import TimelineEventService +from met_api.services.widget_timeline_service import WidgetTimelineService +from met_api.utils.roles import Role + + +class TimelineEventTranslationService: + """TimelineEventTranslation management service.""" + + @staticmethod + def get_by_id(translation_id: int): + """Get timeline event translation by ID.""" + return TimelineEventTranslationModel.find_by_id(translation_id) + + @staticmethod + def get_engagement_id(timeline_id): + """Get engagement id using timeline event id.""" + timeline = WidgetTimelineService.get_timeline_by_id(timeline_id) + if not timeline: + raise BusinessException( + status_code=HTTPStatus.NOT_FOUND, + error='Timeline not found', + ) + return timeline.engagement_id + + @staticmethod + def get_timeline_event_translation(timeline_event_id=None, language_id=None): + """Get timeline event translations by timeline event ID and language ID.""" + return TimelineEventTranslationModel.get_by_event_and_language( + timeline_event_id, language_id + ) + + @staticmethod + def create_timeline_event_translation(timeline_id: int, data: dict, pre_populate: bool = True): + """Insert a new TimelineEventTranslation with authorization check.""" + try: + one_of_roles = ( + MembershipType.TEAM_MEMBER.name, + Role.EDIT_ENGAGEMENT.value, + ) + engagement_id = TimelineEventTranslationService.get_engagement_id( + timeline_id + ) + authorization.check_auth( + one_of_roles=one_of_roles, engagement_id=engagement_id + ) + # Pre populating with timeline event item base language data + if pre_populate: + timeline_event_item = TimelineEventService.get_timeline_event(data['timeline_event_id']) + if not timeline_event_item: + raise BusinessException( + 'TimelineEventItem not found', HTTPStatus.NOT_FOUND + ) + data['description'] = timeline_event_item.description + data['time'] = timeline_event_item.time + + return TimelineEventTranslationModel.create_timeline_event_translation( + data + ) + except SQLAlchemyError as e: + raise BusinessException( + str(e), HTTPStatus.INTERNAL_SERVER_ERROR + ) from e + + @staticmethod + def update_timeline_event_translation(timeline_id: int, translation_id: int, data: dict): + """Update an existing TimelineEventTranslation with authorization check.""" + try: + timeline_event_translation = TimelineEventTranslationModel.find_by_id(translation_id) + if not timeline_event_translation: + raise BusinessException( + 'TimelineEventTranslation not found', HTTPStatus.NOT_FOUND + ) + + one_of_roles = ( + MembershipType.TEAM_MEMBER.name, + Role.EDIT_ENGAGEMENT.value, + ) + engagement_id = TimelineEventTranslationService.get_engagement_id( + timeline_id + ) + authorization.check_auth( + one_of_roles=one_of_roles, engagement_id=engagement_id + ) + + updated_translation = ( + TimelineEventTranslationModel.update_timeline_event_translation( + translation_id, data + ) + ) + return updated_translation + except SQLAlchemyError as e: + raise BusinessException( + str(e), HTTPStatus.INTERNAL_SERVER_ERROR + ) from e + + @staticmethod + def delete_timeline_event_translation(timeline_id: int, translation_id: int): + """Delete a TimelineEventTranslation with authorization check.""" + try: + timeline_event_translation = TimelineEventTranslationModel.find_by_id(translation_id) + if not timeline_event_translation: + raise BusinessException( + 'TimelineEventTranslation not found', HTTPStatus.NOT_FOUND + ) + + one_of_roles = ( + MembershipType.TEAM_MEMBER.name, + Role.EDIT_ENGAGEMENT.value, + ) + engagement_id = TimelineEventTranslationService.get_engagement_id( + timeline_id + ) + authorization.check_auth( + one_of_roles=one_of_roles, + engagement_id=engagement_id, + ) + + return TimelineEventTranslationModel.delete_timeline_event_translation(translation_id) + except SQLAlchemyError as e: + raise BusinessException( + str(e), HTTPStatus.INTERNAL_SERVER_ERROR + ) from e diff --git a/met-api/src/met_api/services/widget_events_service.py b/met-api/src/met_api/services/widget_events_service.py index 315febff2..a845a9aec 100644 --- a/met-api/src/met_api/services/widget_events_service.py +++ b/met-api/src/met_api/services/widget_events_service.py @@ -10,6 +10,11 @@ class WidgetEventsService: """Widget Event management service.""" + @staticmethod + def get_by_id(event_id: int): + """Get widget event by id.""" + return WidgetEventsModel.find_by_id(event_id) + @staticmethod def get_event_by_widget_id(widget_id): """Get documents by widget id.""" @@ -26,6 +31,11 @@ def create_event(widget_id, event_details: dict): event.commit() return event + @staticmethod + def get_event_item_by_id(event_item_id: int): + """Get event item by id.""" + return EventItemsModel.find_by_id(event_item_id) + @staticmethod def create_event_items(widget_id, event_id, event_item_details): """Create events for the widget.""" diff --git a/met-api/src/met_api/services/widget_subscribe_service.py b/met-api/src/met_api/services/widget_subscribe_service.py index 3e0544920..c4ef9a24e 100644 --- a/met-api/src/met_api/services/widget_subscribe_service.py +++ b/met-api/src/met_api/services/widget_subscribe_service.py @@ -10,6 +10,11 @@ class WidgetSubscribeService: """Widget Subscribe management service.""" + @staticmethod + def get_by_id(subscribe_id: int): + """Get subscribe by ID.""" + return WidgetSubscribeModel.find_by_id(subscribe_id) + @staticmethod def get_subscribe_by_widget_id(widget_id): """Get subscribe forms by widget id.""" @@ -41,6 +46,11 @@ def create_subscribe(widget_id, subscribe_details: dict): subscribe.commit() return subscribe + @staticmethod + def get_subscribe_item_by_id(subscribe_item_id: int): + """Get subscribe item by id.""" + return SubscribeItemsModel.find_by_id(subscribe_item_id) + @staticmethod def create_subscribe_items(widget_id, subscribe_id, subscribe_item_details): """Get subscribe form item.""" diff --git a/met-api/src/met_api/services/widget_timeline_service.py b/met-api/src/met_api/services/widget_timeline_service.py index 73aafbd3b..b3184cbcf 100644 --- a/met-api/src/met_api/services/widget_timeline_service.py +++ b/met-api/src/met_api/services/widget_timeline_service.py @@ -14,6 +14,12 @@ class WidgetTimelineService: """Widget Timeline management service.""" + @staticmethod + def get_timeline_by_id(timeline_id: int): + """Get timeline by id.""" + widget_timeline = WidgetTimelineModel.find_by_id(timeline_id) + return widget_timeline + @staticmethod def get_timeline(widget_id: int): """Get timeline by widget id.""" diff --git a/met-api/tests/unit/api/test_event_item_translation.py b/met-api/tests/unit/api/test_event_item_translation.py new file mode 100644 index 000000000..926de1ca7 --- /dev/null +++ b/met-api/tests/unit/api/test_event_item_translation.py @@ -0,0 +1,170 @@ +"""Tests to verify the EventItemTranslation API endpoints.""" +import json +from http import HTTPStatus + +from met_api.utils.enums import ContentType +from tests.utilities.factory_scenarios import TestEventItemTranslationInfo +from tests.utilities.factory_utils import ( + event_item_model_with_language, factory_auth_header, factory_event_item_translation_model) + + +def test_get_event_item_translation(client, jwt, session): + """Assert that an event item translation can be fetched by its ID.""" + headers = factory_auth_header(jwt=jwt, claims={}) + item, event, language = event_item_model_with_language() + event_item_translation = factory_event_item_translation_model( + { + **TestEventItemTranslationInfo.event_item_info1.value, + 'event_item_id': item.id, + 'language_id': language.id, + 'description': 'Test Translation', + } + ) + session.add(event_item_translation) + session.commit() + + rv = client.get( + f'/api/events/{event.id}/translations/{event_item_translation.id}', + headers=headers, + content_type=ContentType.JSON.value, + ) + + assert rv.status_code == HTTPStatus.OK + json_data = rv.json + assert json_data['id'] == event_item_translation.id + + +def test_get_event_item_translation_by_language(client, jwt, session): + """Assert that an event item translation can be fetched by its langauge id.""" + headers = factory_auth_header(jwt=jwt, claims={}) + item, event, language = event_item_model_with_language() + event_item_translation = factory_event_item_translation_model( + { + **TestEventItemTranslationInfo.event_item_info1.value, + 'event_item_id': item.id, + 'language_id': language.id, + 'description': 'Test Translation', + } + ) + + session.commit() + + rv = client.get( + f'/api/events/{event.id}/translations/item/{item.id}/language/{language.id}', + headers=headers, + content_type=ContentType.JSON.value, + ) + + assert rv.status_code == HTTPStatus.OK + json_data = rv.json + assert json_data['id'] == event_item_translation.id + + +def test_patch_event_item_translation( + client, jwt, session, setup_admin_user_and_claims +): + """Assert that an event item translation can be updated using the PATCH API endpoint.""" + _, claims = setup_admin_user_and_claims + headers = factory_auth_header(jwt=jwt, claims=claims) + item, event, language = event_item_model_with_language() + event_item_translation = factory_event_item_translation_model( + { + **TestEventItemTranslationInfo.event_item_info1.value, + 'event_item_id': item.id, + 'language_id': language.id, + 'description': 'Old Translation', + } + ) + session.commit() + + data = {'description': 'Updated Translation'} + rv = client.patch( + f'/api/events/{event.id}/translations/{event_item_translation.id}', + data=json.dumps(data), + headers=headers, + content_type=ContentType.JSON.value, + ) + + assert rv.status_code == HTTPStatus.OK + json_data = rv.json + assert json_data['description'] == 'Updated Translation' + + +def test_delete_event_item_translation( + client, jwt, session, setup_admin_user_and_claims +): + """Assert that an event item translation can be deleted using the DELETE API endpoint.""" + _, claims = setup_admin_user_and_claims + headers = factory_auth_header(jwt=jwt, claims=claims) + item, event, language = event_item_model_with_language() + event_item_translation = factory_event_item_translation_model( + { + **TestEventItemTranslationInfo.event_item_info1.value, + 'event_item_id': item.id, + 'language_id': language.id, + 'description': 'Translation to Delete', + } + ) + session.commit() + + rv = client.delete( + f'/api/events/{event.id}/translations/{event_item_translation.id}', + headers=headers, + content_type=ContentType.JSON.value, + ) + + assert rv.status_code == HTTPStatus.NO_CONTENT + + +def test_create_event_item_translation( + client, jwt, session, setup_admin_user_and_claims +): + """Assert that a new event item translation can be created using the POST API endpoint.""" + _, claims = setup_admin_user_and_claims + headers = factory_auth_header(jwt=jwt, claims=claims) + item, event, language = event_item_model_with_language() + + data = { + **TestEventItemTranslationInfo.event_item_info1.value, + 'event_item_id': item.id, + 'language_id': language.id, + 'description': 'New Translation', + 'pre_populate': False, + } + + rv = client.post( + f'/api/events/{event.id}/translations/', + data=json.dumps(data), + headers=headers, + content_type=ContentType.JSON.value, + ) + + assert rv.status_code == HTTPStatus.CREATED + json_data = rv.json + assert json_data['description'] == 'New Translation' + + +def test_create_event_item_translation_with_pre_populate( + client, jwt, session, setup_admin_user_and_claims +): + """Assert that a new event item translation can be created using the POST API endpoint.""" + _, claims = setup_admin_user_and_claims + headers = factory_auth_header(jwt=jwt, claims=claims) + item, event, language = event_item_model_with_language() + + data = { + 'event_item_id': item.id, + 'language_id': language.id, + 'pre_populate': True, + } + + rv = client.post( + f'/api/events/{event.id}/translations/', + data=json.dumps(data), + headers=headers, + content_type=ContentType.JSON.value, + ) + + assert rv.status_code == HTTPStatus.CREATED + json_data = rv.json + assert json_data['description'] == item.description diff --git a/met-api/tests/unit/api/test_poll_answer_translation.py b/met-api/tests/unit/api/test_poll_answer_translation.py new file mode 100644 index 000000000..8eea38e10 --- /dev/null +++ b/met-api/tests/unit/api/test_poll_answer_translation.py @@ -0,0 +1,141 @@ +"""Tests to verify the PollAnswerTranslation API endpoints.""" + +import json +from http import HTTPStatus + +from met_api.utils.enums import ContentType +from tests.utilities.factory_utils import ( + factory_auth_header, factory_poll_answer_translation_model, poll_answer_model_with_poll_enagement) + + +def test_get_poll_answer_translation(client, jwt, session): + """Assert that a poll answer translation can be fetched by its ID.""" + headers = factory_auth_header(jwt=jwt, claims={}) + answer, poll, language = poll_answer_model_with_poll_enagement() + poll_answer_translation = factory_poll_answer_translation_model( + { + 'poll_answer_id': answer.id, + 'language_id': language.id, + 'answer_text': 'Test Translation', + } + ) + session.add(poll_answer_translation) + session.commit() + + rv = client.get( + f'/api/polls/{poll.id}/translations/{poll_answer_translation.id}', + headers=headers, + content_type=ContentType.JSON.value, + ) + + assert rv.status_code == HTTPStatus.OK + json_data = rv.json + assert json_data['id'] == poll_answer_translation.id + + +def test_get_poll_answer_translation_by_langauge_id(client, jwt, session): + """Assert that a poll answer translation can be fetched by its ID and Language ID.""" + headers = factory_auth_header(jwt=jwt, claims={}) + answer, poll, language = poll_answer_model_with_poll_enagement() + poll_answer_translation = factory_poll_answer_translation_model( + { + 'poll_answer_id': answer.id, + 'language_id': language.id, + 'answer_text': 'Test Translation', + } + ) + session.add(poll_answer_translation) + session.commit() + + rv = client.get( + f'/api/polls/{poll.id}/translations/answer/{answer.id}/language/{language.id}', + headers=headers, + content_type=ContentType.JSON.value, + ) + + assert rv.status_code == HTTPStatus.OK + json_data = rv.json + assert json_data[0]['id'] == poll_answer_translation.id + + +def test_create_poll_answer_translation( + client, jwt, session, setup_admin_user_and_claims +): + """Assert that a new poll answer translation can be created using the POST API endpoint.""" + _, claims = setup_admin_user_and_claims + headers = factory_auth_header(jwt=jwt, claims=claims) + answer, poll, language = poll_answer_model_with_poll_enagement() + + session.commit() + + data = { + 'poll_answer_id': answer.id, + 'language_id': language.id, + 'answer_text': 'New Answer Translation', + 'pre_populate': False, + } + + rv = client.post( + f'/api/polls/{poll.id}/translations/', + data=json.dumps(data), + headers=headers, + content_type=ContentType.JSON.value, + ) + + assert rv.status_code == HTTPStatus.CREATED + json_data = rv.json + assert json_data['answer_text'] == 'New Answer Translation' + + +def test_update_poll_answer_translation( + client, jwt, session, setup_admin_user_and_claims +): + """Assert that a poll answer translation can be updated using the PATCH API endpoint.""" + _, claims = setup_admin_user_and_claims + headers = factory_auth_header(jwt=jwt, claims=claims) + answer, poll, language = poll_answer_model_with_poll_enagement() + poll_answer_translation = factory_poll_answer_translation_model( + { + 'poll_answer_id': answer.id, + 'language_id': language.id, + 'answer_text': 'Test Translation', + } + ) + session.commit() + + data = {'answer_text': 'Updated Answer Translation'} + rv = client.patch( + f'/api/polls/{poll.id}/translations/{poll_answer_translation.id}', + data=json.dumps(data), + headers=headers, + content_type=ContentType.JSON.value, + ) + + assert rv.status_code == HTTPStatus.OK + json_data = rv.json + assert json_data['answer_text'] == 'Updated Answer Translation' + + +def test_delete_poll_answer_translation( + client, jwt, session, setup_admin_user_and_claims +): + """Assert that a poll answer translation can be deleted using the DELETE API endpoint.""" + _, claims = setup_admin_user_and_claims + headers = factory_auth_header(jwt=jwt, claims=claims) + answer, poll, language = poll_answer_model_with_poll_enagement() + poll_answer_translation = factory_poll_answer_translation_model( + { + 'poll_answer_id': answer.id, + 'language_id': language.id, + 'answer_text': 'Test Translation', + } + ) + session.commit() + + rv = client.delete( + f'/api/polls/{poll.id}/translations/{poll_answer_translation.id}', + headers=headers, + content_type=ContentType.JSON.value, + ) + + assert rv.status_code == HTTPStatus.NO_CONTENT diff --git a/met-api/tests/unit/api/test_subscribe_item_translation.py b/met-api/tests/unit/api/test_subscribe_item_translation.py new file mode 100644 index 000000000..e75d14df0 --- /dev/null +++ b/met-api/tests/unit/api/test_subscribe_item_translation.py @@ -0,0 +1,132 @@ +"""Tests to verify the SubscribeItemTranslation API endpoints.""" +import json +from http import HTTPStatus + +from met_api.utils.enums import ContentType +from tests.utilities.factory_utils import ( + factory_auth_header, factory_subscribe_item_translation_model, subscribe_item_model_with_language) + + +def test_get_subscribe_item_translation(client, jwt, session): + """Assert that a subscribe item translation can be fetched by its ID.""" + headers = factory_auth_header(jwt=jwt, claims={}) + item, widget_subscribe, language = subscribe_item_model_with_language() + subscribe_item_translation = factory_subscribe_item_translation_model( + { + 'subscribe_item_id': item.id, + 'language_id': language.id, + 'description': 'Test Translation', + } + ) + session.add(subscribe_item_translation) + session.commit() + + rv = client.get( + f'/api/subscribe/{widget_subscribe.id}/translations/{subscribe_item_translation.id}', + headers=headers, + content_type=ContentType.JSON.value, + ) + + assert rv.status_code == HTTPStatus.OK + json_data = rv.json + assert json_data['id'] == subscribe_item_translation.id + + +def test_get_subscribe_item_translation_by_language(client, jwt, session): + """Assert that a subscribe item translation can be fetched by its language ID.""" + headers = factory_auth_header(jwt=jwt, claims={}) + item, widget_subscribe, language = subscribe_item_model_with_language() + subscribe_item_translation = factory_subscribe_item_translation_model( + { + 'subscribe_item_id': item.id, + 'language_id': language.id, + 'description': 'Test Translation', + } + ) + + session.commit() + + rv = client.get( + f'/api/subscribe/{widget_subscribe.id}/translations/item/{item.id}/language/{language.id}', + headers=headers, + content_type=ContentType.JSON.value, + ) + + assert rv.status_code == HTTPStatus.OK + json_data = rv.json + assert json_data['id'] == subscribe_item_translation.id + + +def test_patch_subscribe_item_translation(client, jwt, session, setup_admin_user_and_claims): + """Assert that a subscribe item translation can be updated using PATCH.""" + _, claims = setup_admin_user_and_claims + headers = factory_auth_header(jwt=jwt, claims=claims) + item, widget_subscribe, language = subscribe_item_model_with_language() + subscribe_item_translation = factory_subscribe_item_translation_model( + { + 'subscribe_item_id': item.id, + 'language_id': language.id, + 'description': 'Old Translation', + } + ) + session.commit() + + data = {'description': 'Updated Translation'} + rv = client.patch( + f'/api/subscribe/{widget_subscribe.id}/translations/{subscribe_item_translation.id}', + data=json.dumps(data), + headers=headers, + content_type=ContentType.JSON.value, + ) + + assert rv.status_code == HTTPStatus.OK + json_data = rv.json + assert json_data['description'] == 'Updated Translation' + + +def test_delete_subscribe_item_translation(client, jwt, session, setup_admin_user_and_claims): + """Assert that a subscribe item translation can be deleted using DELETE.""" + _, claims = setup_admin_user_and_claims + headers = factory_auth_header(jwt=jwt, claims=claims) + item, widget_subscribe, language = subscribe_item_model_with_language() + subscribe_item_translation = factory_subscribe_item_translation_model( + { + 'subscribe_item_id': item.id, + 'language_id': language.id, + 'description': 'Translation to Delete', + } + ) + session.commit() + + rv = client.delete( + f'/api/subscribe/{widget_subscribe.id}/translations/{subscribe_item_translation.id}', + headers=headers, + content_type=ContentType.JSON.value, + ) + + assert rv.status_code == HTTPStatus.NO_CONTENT + + +def test_create_subscribe_item_translation(client, jwt, session, setup_admin_user_and_claims): + """Assert that a new subscribe item translation can be created using POST.""" + _, claims = setup_admin_user_and_claims + headers = factory_auth_header(jwt=jwt, claims=claims) + item, widget_subscribe, language = subscribe_item_model_with_language() + + data = { + 'subscribe_item_id': item.id, + 'language_id': language.id, + 'description': 'New Translation', + 'pre_populate': False, + } + + rv = client.post( + f'/api/subscribe/{widget_subscribe.id}/translations/', + data=json.dumps(data), + headers=headers, + content_type=ContentType.JSON.value, + ) + + assert rv.status_code == HTTPStatus.CREATED + json_data = rv.json + assert json_data['description'] == 'New Translation' diff --git a/met-api/tests/unit/api/test_timeline_event_translation.py b/met-api/tests/unit/api/test_timeline_event_translation.py new file mode 100644 index 000000000..6439ebabc --- /dev/null +++ b/met-api/tests/unit/api/test_timeline_event_translation.py @@ -0,0 +1,166 @@ +"""Tests to verify the TimelineEventTranslation API endpoints.""" +import json +from http import HTTPStatus + +from met_api.utils.enums import ContentType +from tests.utilities.factory_utils import ( + factory_auth_header, factory_timeline_event_translation_model, timeline_event_model_with_language) + + +def test_get_timeline_event_translation(client, jwt, session): + """Assert that a timeline event translation can be fetched by its ID.""" + headers = factory_auth_header(jwt=jwt, claims={}) + timeline_event, timeline, language = timeline_event_model_with_language() + timeline_event_translation = factory_timeline_event_translation_model( + { + 'timeline_event_id': timeline_event.id, + 'language_id': language.id, + 'description': 'Test Translation', + 'time': '2021-01-01T00:00:00', + } + ) + + session.add(timeline_event_translation) + session.commit() + + rv = client.get( + f'/api/timelines/{timeline.id}/translations/{timeline_event_translation.id}', + headers=headers, + content_type=ContentType.JSON.value, + ) + + assert rv.status_code == HTTPStatus.OK + json_data = rv.json + assert json_data['id'] == timeline_event_translation.id + + +def test_get_timeline_event_translation_by_language(client, jwt, session): + """Assert that a timeline event translation can be fetched by its language ID.""" + headers = factory_auth_header(jwt=jwt, claims={}) + timeline_event, timeline, language = timeline_event_model_with_language() + timeline_event_translation = factory_timeline_event_translation_model( + { + 'timeline_event_id': timeline_event.id, + 'language_id': language.id, + 'description': 'Test Translation', + 'time': '2021-01-01T00:00:00', + } + ) + + session.add(timeline_event_translation) + session.commit() + + rv = client.get( + f'/api/timelines/{timeline.id}/translations/event/{timeline_event.id}/language/{language.id}', + headers=headers, + content_type=ContentType.JSON.value, + ) + + assert rv.status_code == HTTPStatus.OK + json_data = rv.json + assert json_data['id'] == timeline_event_translation.id + + +def test_create_timeline_event_translation(client, jwt, session, setup_admin_user_and_claims): + """Assert that a new timeline event translation can be created using POST.""" + _, claims = setup_admin_user_and_claims + headers = factory_auth_header(jwt=jwt, claims=claims) + timeline_event, timeline, language = timeline_event_model_with_language() + + data = { + 'timeline_event_id': timeline_event.id, + 'language_id': language.id, + 'description': 'New Translation', + 'time': '2021-01-01T00:00:00', + 'pre_populate': False, + } + + rv = client.post( + f'/api/timelines/{timeline.id}/translations/', + data=json.dumps(data), + headers=headers, + content_type=ContentType.JSON.value, + ) + + assert rv.status_code == HTTPStatus.CREATED + json_data = rv.json + assert json_data['description'] == 'New Translation' + + +def test_create_timeline_event_translation_with_prepopulate(client, jwt, session, setup_admin_user_and_claims): + """Assert that a new timeline event translation can be created using POST.""" + _, claims = setup_admin_user_and_claims + headers = factory_auth_header(jwt=jwt, claims=claims) + timeline_event, timeline, language = timeline_event_model_with_language() + + data = { + 'timeline_event_id': timeline_event.id, + 'language_id': language.id, + 'pre_populate': True, + } + + rv = client.post( + f'/api/timelines/{timeline.id}/translations/', + data=json.dumps(data), + headers=headers, + content_type=ContentType.JSON.value, + ) + + assert rv.status_code == HTTPStatus.CREATED + json_data = rv.json + assert json_data['description'] == timeline_event.description + + +def test_patch_timeline_event_translation(client, jwt, session, setup_admin_user_and_claims): + """Assert that a timeline event translation can be updated using PATCH.""" + _, claims = setup_admin_user_and_claims + headers = factory_auth_header(jwt=jwt, claims=claims) + timeline_event, timeline, language = timeline_event_model_with_language() + session.commit() + timeline_event_translation = factory_timeline_event_translation_model( + { + 'timeline_event_id': timeline_event.id, + 'language_id': language.id, + 'description': 'Test Translation', + 'time': '2021-01-01T00:00:00', + } + ) + + updated_data = { + 'description': 'Updated Translation', + } + + rv = client.patch( + f'/api/timelines/{timeline.id}/translations/{timeline_event_translation.id}', + data=json.dumps(updated_data), + headers=headers, + content_type=ContentType.JSON.value, + ) + + assert rv.status_code == HTTPStatus.OK + json_data = rv.json + assert json_data['description'] == updated_data['description'] + + +def test_delete_timeline_event_translation(client, jwt, session, setup_admin_user_and_claims): + """Assert that a timeline event translation can be deleted using DELETE.""" + _, claims = setup_admin_user_and_claims + headers = factory_auth_header(jwt=jwt, claims=claims) + timeline_event, timeline, language = timeline_event_model_with_language() + timeline_event_translation = factory_timeline_event_translation_model( + { + 'timeline_event_id': timeline_event.id, + 'language_id': language.id, + 'description': 'Test Translation', + 'time': '2021-01-01T00:00:00', + } + ) + session.commit() + + rv = client.delete( + f'/api/timelines/{timeline.id}/translations/{timeline_event_translation.id}', + headers=headers, + content_type=ContentType.JSON.value, + ) + + assert rv.status_code == HTTPStatus.NO_CONTENT diff --git a/met-api/tests/unit/models/test_event_item_translation.py b/met-api/tests/unit/models/test_event_item_translation.py new file mode 100644 index 000000000..5fcc9533e --- /dev/null +++ b/met-api/tests/unit/models/test_event_item_translation.py @@ -0,0 +1,89 @@ +"""Tests for the EventItemTranslation model. + +Test suite to ensure that the EventItemTranslation model +routines are working as expected. +""" + +from met_api.models.event_item_translation import EventItemTranslation +from tests.utilities.factory_scenarios import TestEventItemTranslationInfo +from tests.utilities.factory_utils import event_item_model_with_language, factory_event_item_translation_model + + +def test_get_event_item_translation_by_item_and_language(session): + """Translations for an event item can be fetched by item and language.""" + event_item, _, language = event_item_model_with_language() + event_item_translation = { + **TestEventItemTranslationInfo.event_item_info1.value, + 'event_item_id': event_item.id, + 'language_id': language.id, + } + + factory_event_item_translation_model(event_item_translation) + session.commit() + + translations = ( + EventItemTranslation.get_by_item_and_language( + event_item.id, language.id + ) + ) + assert len(translations) == 1 + assert ( + translations[0].description == + TestEventItemTranslationInfo.event_item_info1.value['description'] + ) + + +def test_create_event_item_translation(session): + """Assert that an event item translation can be created.""" + event_item, _, language = event_item_model_with_language() + event_item_translation = { + **TestEventItemTranslationInfo.event_item_info1.value, + 'event_item_id': event_item.id, + 'language_id': language.id, + } + + translation = EventItemTranslation.create_event_item_translation( + event_item_translation + ) + assert translation.id is not None + assert ( + translation.description == + TestEventItemTranslationInfo.event_item_info1.value['description'] + ) + + +def test_update_event_item_translation(session): + """Assert that an event item translation can be updated.""" + event_item, _, language = event_item_model_with_language() + event_item_translation = { + **TestEventItemTranslationInfo.event_item_info1.value, + 'event_item_id': event_item.id, + 'language_id': language.id, + } + + translation = factory_event_item_translation_model(event_item_translation) + + updated_data = {'description': 'Updated Description'} + EventItemTranslation.update_event_item_translation( + translation.id, updated_data + ) + updated_translation = EventItemTranslation.query.get(translation.id) + + assert updated_translation.description == 'Updated Description' + + +def test_delete_event_item_translation(session): + """Assert that an event item translation can be deleted.""" + event_item, _, language = event_item_model_with_language() + event_item_translation = { + **TestEventItemTranslationInfo.event_item_info1.value, + 'event_item_id': event_item.id, + 'language_id': language.id, + } + + translation = factory_event_item_translation_model(event_item_translation) + + EventItemTranslation.delete_event_item_translation(translation.id) + deleted_translation = EventItemTranslation.query.get(translation.id) + + assert deleted_translation is None diff --git a/met-api/tests/unit/models/test_poll_answer_translation.py b/met-api/tests/unit/models/test_poll_answer_translation.py new file mode 100644 index 000000000..b89744c2e --- /dev/null +++ b/met-api/tests/unit/models/test_poll_answer_translation.py @@ -0,0 +1,79 @@ +"""Tests for the PollAnswerTranslation model. + +Test suite to ensure that the PollAnswerTranslation model routines are working as expected. +""" + +from met_api.models.poll_answer_translation import PollAnswerTranslation +from tests.utilities.factory_utils import factory_poll_answer_translation_model, poll_answer_model_with_poll_enagement + + +def test_get_poll_answer_translation_by_answer_and_language(session): + """Assert that translations for a poll answer can be fetched by answer and language.""" + answer, _, language = poll_answer_model_with_poll_enagement() + factory_poll_answer_translation_model( + { + 'poll_answer_id': answer.id, + 'language_id': language.id, + 'answer_text': 'Translated Answer', + } + ) + session.commit() + + translations = PollAnswerTranslation.get_by_answer_and_language( + answer.id, language.id + ) + assert len(translations) == 1 + assert translations[0].answer_text == 'Translated Answer' + + +def test_create_poll_answer_translation(session): + """Assert that a poll answer translation can be created.""" + answer, _, language = poll_answer_model_with_poll_enagement() + translation_data = { + 'poll_answer_id': answer.id, + 'language_id': language.id, + 'answer_text': 'Réponse traduite', + } + + translation = PollAnswerTranslation.create_poll_answer_translation( + translation_data + ) + assert translation.id is not None + assert translation.answer_text == 'Réponse traduite' + + +def test_update_poll_answer_translation(session): + """Assert that a poll answer translation can be updated.""" + answer, _, language = poll_answer_model_with_poll_enagement() + translation = factory_poll_answer_translation_model( + { + 'poll_answer_id': answer.id, + 'language_id': language.id, + 'answer_text': 'Translated Answer', + } + ) + + updated_data = {'answer_text': 'Respuesta actualizada'} + PollAnswerTranslation.update_poll_answer_translation( + translation.id, updated_data + ) + updated_translation = PollAnswerTranslation.query.get(translation.id) + + assert updated_translation.answer_text == 'Respuesta actualizada' + + +def test_delete_poll_answer_translation(session): + """Assert that a poll answer translation can be deleted.""" + answer, _, language = poll_answer_model_with_poll_enagement() + translation = factory_poll_answer_translation_model( + { + 'poll_answer_id': answer.id, + 'language_id': language.id, + 'answer_text': 'Translated Answer', + } + ) + + PollAnswerTranslation.delete_poll_answer_translation(translation.id) + deleted_translation = PollAnswerTranslation.query.get(translation.id) + + assert deleted_translation is None diff --git a/met-api/tests/unit/models/test_subscribe_item_translation.py b/met-api/tests/unit/models/test_subscribe_item_translation.py new file mode 100644 index 000000000..695795a09 --- /dev/null +++ b/met-api/tests/unit/models/test_subscribe_item_translation.py @@ -0,0 +1,77 @@ +"""Tests to verify the SubscribeItemTranslation Model.""" + +from met_api.models.subscribe_item_translation import SubscribeItemTranslation +from tests.utilities.factory_utils import ( + factory_subscribe_item_model_with_enagement, factory_subscribe_item_translation_model) + + +def test_create_subscribe_item_translation(session): + """Assert that a subscribe item translation can be created.""" + item, language = factory_subscribe_item_model_with_enagement() + translation_data = { + 'subscribe_item_id': item.id, + 'language_id': language.id, + 'description': 'Description traduite', + } + + translation = SubscribeItemTranslation.create_sub_item_translation( + translation_data + ) + assert translation.id is not None + assert translation.description == 'Description traduite' + + +def test_get_subscribe_item_translation_by_item_and_language(session): + """Assert that translations for a subscribe item can be fetched by item and language.""" + item, language = factory_subscribe_item_model_with_enagement() + factory_subscribe_item_translation_model( + { + 'subscribe_item_id': item.id, + 'language_id': language.id, + 'description': 'Translated Description', + } + ) + session.commit() + + translations = SubscribeItemTranslation.get_by_item_and_language( + item.id, language.id + ) + assert len(translations) == 1 + assert translations[0].description == 'Translated Description' + + +def test_update_subscribe_item_translation(session): + """Assert that a subscribe item translation can be updated.""" + item, language = factory_subscribe_item_model_with_enagement() + translation = factory_subscribe_item_translation_model( + { + 'subscribe_item_id': item.id, + 'language_id': language.id, + 'description': 'Translated Description', + } + ) + + updated_data = {'description': 'Descripción actualizada'} + SubscribeItemTranslation.update_sub_item_translation( + translation.id, updated_data + ) + updated_translation = SubscribeItemTranslation.query.get(translation.id) + + assert updated_translation.description == 'Descripción actualizada' + + +def test_delete_subscribe_item_translation(session): + """Assert that a subscribe item translation can be deleted.""" + item, language = factory_subscribe_item_model_with_enagement() + translation = factory_subscribe_item_translation_model( + { + 'subscribe_item_id': item.id, + 'language_id': language.id, + 'description': 'Translated Description', + } + ) + + SubscribeItemTranslation.delete_sub_item_translation(translation.id) + deleted_translation = SubscribeItemTranslation.query.get(translation.id) + + assert deleted_translation is None diff --git a/met-api/tests/unit/models/test_timeline_event_translation.py b/met-api/tests/unit/models/test_timeline_event_translation.py new file mode 100644 index 000000000..d0cde05cd --- /dev/null +++ b/met-api/tests/unit/models/test_timeline_event_translation.py @@ -0,0 +1,96 @@ +"""Tests for the TimelineEventTranslation model. + +Test suite to ensure that the TimelineEventTranslation model routines are working as expected. +""" + +from met_api.models.timeline_event_translation import TimelineEventTranslation +from tests.utilities.factory_scenarios import TestTimelineEventTranslationInfo +from tests.utilities.factory_utils import factory_timeline_event_translation_model, timeline_event_model_with_language + + +def test_get_by_event_and_language(session): + """Assert translations for a timeline event can be fetched by event and language.""" + timeline_event, _, language = timeline_event_model_with_language() + timeline_event_translation_data = { + **TestTimelineEventTranslationInfo.timeline_event_info1.value, + 'timeline_event_id': timeline_event.id, + 'language_id': language.id, + } + + factory_timeline_event_translation_model(timeline_event_translation_data) + session.commit() + + translations = TimelineEventTranslation.get_by_event_and_language( + timeline_event.id, language.id + ) + assert len(translations) == 1 + assert ( + translations[0].description == + TestTimelineEventTranslationInfo.timeline_event_info1.value[ + 'description' + ] + ) + + +def test_create_timeline_event_translation(session): + """Assert that a timeline event translation can be created.""" + timeline_event, _, language = timeline_event_model_with_language() + timeline_event_translation_data = { + **TestTimelineEventTranslationInfo.timeline_event_info1.value, + 'timeline_event_id': timeline_event.id, + 'language_id': language.id, + } + + translation = TimelineEventTranslation.create_timeline_event_translation( + timeline_event_translation_data + ) + assert translation.id is not None + assert ( + translation.description == + TestTimelineEventTranslationInfo.timeline_event_info1.value[ + 'description' + ] + ) + + +def test_update_timeline_event_translation(session): + """Assert that a timeline event translation can be updated.""" + timeline_event, _, language = timeline_event_model_with_language() + timeline_event_translation = factory_timeline_event_translation_model( + { + **TestTimelineEventTranslationInfo.timeline_event_info1.value, + 'timeline_event_id': timeline_event.id, + 'language_id': language.id, + } + ) + + updated_data = {'description': 'Updated Description'} + TimelineEventTranslation.update_timeline_event_translation( + timeline_event_translation.id, updated_data + ) + updated_translation = TimelineEventTranslation.query.get( + timeline_event_translation.id + ) + + assert updated_translation.description == 'Updated Description' + + +def test_delete_timeline_event_translation(session): + """Assert that a timeline event translation can be deleted.""" + timeline_event, _, language = timeline_event_model_with_language() + timeline_event_translation = factory_timeline_event_translation_model( + { + **TestTimelineEventTranslationInfo.timeline_event_info1.value, + 'timeline_event_id': timeline_event.id, + 'language_id': language.id, + } + ) + + TimelineEventTranslation.delete_timeline_event_translation( + timeline_event_translation.id + ) + deleted_translation = TimelineEventTranslation.query.get( + timeline_event_translation.id + ) + + assert deleted_translation is None diff --git a/met-api/tests/unit/services/test_event_item_translation_service.py b/met-api/tests/unit/services/test_event_item_translation_service.py new file mode 100644 index 000000000..7714267e2 --- /dev/null +++ b/met-api/tests/unit/services/test_event_item_translation_service.py @@ -0,0 +1,144 @@ +"""Test to verify the SubscribeItemTranslation Service.""" +from met_api.services.event_item_translation_service import EventItemTranslationService +from tests.utilities.factory_scenarios import TestEventItemTranslationInfo, TestJwtClaims +from tests.utilities.factory_utils import ( + event_item_model_with_language, factory_event_item_translation_model, factory_staff_user_model, patch_token_info) + + +def test_get_event_item_translation_by_id(session): + """Assert that event item translation can be fetched by its ID.""" + event_item, _, language = event_item_model_with_language() + translation = factory_event_item_translation_model( + { + ** TestEventItemTranslationInfo.event_item_info1.value, + 'event_item_id': event_item.id, + 'language_id': language.id, + } + ) + session.commit() + + fetched_translation = EventItemTranslationService.get_by_id( + translation.id + ) + assert fetched_translation is not None + assert fetched_translation.id == translation.id + + +def test_get_event_item_translation(session): + """Assert that event item translations can be fetched by item and language.""" + event_item, _, language = event_item_model_with_language() + factory_event_item_translation_model( + { + ** TestEventItemTranslationInfo.event_item_info1.value, + 'event_item_id': event_item.id, + 'language_id': language.id, + } + ) + session.commit() + + translations = EventItemTranslationService.get_event_item_translation( + event_item.id, language.id + ) + assert len(translations) == 1 + assert translations[0].description == TestEventItemTranslationInfo.event_item_info1.value['description'] + + +def test_create_event_item_translation_with_authorization( + session, monkeypatch +): + """Assert that an event item translation can be created with proper authorization.""" + # Mock authorization + patch_token_info(TestJwtClaims.staff_admin_role, monkeypatch) + factory_staff_user_model(external_id=TestJwtClaims.staff_admin_role['sub']) + event_item, event, language = event_item_model_with_language() + data = { + ** TestEventItemTranslationInfo.event_item_info1.value, + 'event_item_id': event_item.id, + 'language_id': language.id, + 'description': 'New Translation', + } + + created_translation = ( + EventItemTranslationService.create_event_item_translation( + event.id, data, False + ) + ) + assert created_translation is not None + assert created_translation.description == data['description'] + + +def test_create_event_item_translation_with_authorization_pre_populate( + session, monkeypatch +): + """Assert that an event item translation can be created with proper authorization.""" + # Mock authorization + patch_token_info(TestJwtClaims.staff_admin_role, monkeypatch) + factory_staff_user_model(external_id=TestJwtClaims.staff_admin_role['sub']) + event_item, event, language = event_item_model_with_language() + data = { + ** TestEventItemTranslationInfo.event_item_info1.value, + 'event_item_id': event_item.id, + 'language_id': language.id, + } + + # Setting preopulate true should return the event item description + created_translation = ( + EventItemTranslationService.create_event_item_translation( + event.id, data, True + ) + ) + assert created_translation is not None + assert created_translation.description == event_item.description + + +def test_update_event_item_translation_with_authorization( + session, monkeypatch +): + """Assert that an event item translation can be updated with proper authorization.""" + # Mock authorization + patch_token_info(TestJwtClaims.staff_admin_role, monkeypatch) + factory_staff_user_model(external_id=TestJwtClaims.staff_admin_role['sub']) + event_item, event, language = event_item_model_with_language() + translation = factory_event_item_translation_model( + { + ** TestEventItemTranslationInfo.event_item_info1.value, + 'event_item_id': event_item.id, + 'language_id': language.id, + 'description': 'Old Description', + } + ) + session.commit() + + updated_data = {'description': 'Updated Description'} + updated_translation = ( + EventItemTranslationService.update_event_item_translation( + event.id, translation.id, updated_data + ) + ) + assert updated_translation.description == updated_data['description'] + + +def test_delete_event_item_translation_with_authorization( + session, monkeypatch +): + """Assert that an event item translation can be deleted with proper authorization.""" + # Mock authorization + patch_token_info(TestJwtClaims.staff_admin_role, monkeypatch) + factory_staff_user_model(external_id=TestJwtClaims.staff_admin_role['sub']) + event_item, event, language = event_item_model_with_language() + translation = factory_event_item_translation_model( + { + ** TestEventItemTranslationInfo.event_item_info1.value, + 'event_item_id': event_item.id, + 'language_id': language.id, + } + ) + session.commit() + + EventItemTranslationService.delete_event_item_translation( + event.id, translation.id + ) + deleted_translation = EventItemTranslationService.get_by_id( + translation.id + ) + assert deleted_translation is None diff --git a/met-api/tests/unit/services/test_poll_answer_translation_service.py b/met-api/tests/unit/services/test_poll_answer_translation_service.py new file mode 100644 index 000000000..fa9155d84 --- /dev/null +++ b/met-api/tests/unit/services/test_poll_answer_translation_service.py @@ -0,0 +1,147 @@ +"""Tests for the PollAnswerTranslationService. + +Test suite to ensure that the PollAnswerTranslationService routines are working as expected. +""" + +from met_api.services.poll_answer_translation_service import PollAnswerTranslationService +from tests.utilities.factory_scenarios import TestJwtClaims +from tests.utilities.factory_utils import ( + factory_poll_answer_translation_model, factory_staff_user_model, patch_token_info, + poll_answer_model_with_poll_enagement) + + +def test_get_poll_answer_translation_by_id(session): + """Assert that poll answer translation can be fetched by its ID.""" + answer, _, language = poll_answer_model_with_poll_enagement() + translation = factory_poll_answer_translation_model( + { + 'poll_answer_id': answer.id, + 'language_id': language.id, + 'answer_text': 'Test Translation', + } + ) + session.commit() + + fetched_translation = PollAnswerTranslationService.get_by_id( + translation.id + ) + assert fetched_translation is not None + assert fetched_translation.id == translation.id + + +def test_get_poll_answer_translation(session): + """Assert that poll answer translations can be fetched by answer and language.""" + answer, _, language = poll_answer_model_with_poll_enagement() + factory_poll_answer_translation_model( + { + 'poll_answer_id': answer.id, + 'language_id': language.id, + 'answer_text': 'Test Translation', + } + ) + session.commit() + + translations = PollAnswerTranslationService.get_poll_answer_translation( + answer.id, language.id + ) + assert len(translations) == 1 + assert translations[0].answer_text == 'Test Translation' + + +def test_create_poll_answer_translation_with_authorization( + session, monkeypatch +): + """Assert that a poll answer translation can be created with proper authorization.""" + # Mock the authorization check or provide necessary setup + patch_token_info(TestJwtClaims.staff_admin_role, monkeypatch) + factory_staff_user_model(external_id=TestJwtClaims.staff_admin_role['sub']) + answer, poll, language = poll_answer_model_with_poll_enagement() + data = { + 'poll_answer_id': answer.id, + 'language_id': language.id, + 'answer_text': 'New Translated Answer', + } + + created_translation = ( + PollAnswerTranslationService.create_poll_answer_translation( + poll.id, data, False + ) + ) + assert created_translation is not None + assert created_translation.answer_text == 'New Translated Answer' + + +def test_create_poll_answer_translation_with_authorization_with_prepopulate( + session, monkeypatch +): + """Assert that a poll answer translation can be created with proper authorization.""" + # Mock the authorization check or provide necessary setup + patch_token_info(TestJwtClaims.staff_admin_role, monkeypatch) + factory_staff_user_model(external_id=TestJwtClaims.staff_admin_role['sub']) + answer, poll, language = poll_answer_model_with_poll_enagement() + + data = { + 'poll_answer_id': answer.id, + 'language_id': language.id, + 'pre_populate': True, + } + + created_translation = ( + PollAnswerTranslationService.create_poll_answer_translation( + poll.id, data, data['pre_populate'] + ) + ) + assert created_translation is not None + assert created_translation.answer_text == answer.answer_text + + +def test_update_poll_answer_translation_with_authorization( + session, monkeypatch +): + """Assert that a poll answer translation can be updated with proper authorization.""" + # Mock the authorization check or provide necessary setup + patch_token_info(TestJwtClaims.staff_admin_role, monkeypatch) + factory_staff_user_model(external_id=TestJwtClaims.staff_admin_role['sub']) + answer, poll, language = poll_answer_model_with_poll_enagement() + translation = factory_poll_answer_translation_model( + { + 'poll_answer_id': answer.id, + 'language_id': language.id, + 'answer_text': 'Old Translation', + } + ) + session.commit() + + updated_data = {'answer_text': 'Updated Translation'} + updated_translation = ( + PollAnswerTranslationService.update_poll_answer_translation( + poll.id, translation.id, updated_data + ) + ) + assert updated_translation.answer_text == 'Updated Translation' + + +def test_delete_poll_answer_translation_with_authorization( + session, monkeypatch +): + """Assert that a poll answer translation can be deleted with proper authorization.""" + # Mock the authorization check or provide necessary setup + patch_token_info(TestJwtClaims.staff_admin_role, monkeypatch) + factory_staff_user_model(external_id=TestJwtClaims.staff_admin_role['sub']) + answer, poll, language = poll_answer_model_with_poll_enagement() + translation = factory_poll_answer_translation_model( + { + 'poll_answer_id': answer.id, + 'language_id': language.id, + 'answer_text': 'Translation to Delete', + } + ) + session.commit() + + PollAnswerTranslationService.delete_poll_answer_translation( + poll.id, translation.id + ) + deleted_translation = PollAnswerTranslationService.get_by_id( + translation.id + ) + assert deleted_translation is None diff --git a/met-api/tests/unit/services/test_poll_answers_service.py b/met-api/tests/unit/services/test_poll_answers_service.py index db43c82c8..d1a0e4a3d 100644 --- a/met-api/tests/unit/services/test_poll_answers_service.py +++ b/met-api/tests/unit/services/test_poll_answers_service.py @@ -37,6 +37,21 @@ def test_get_poll_answer(session): assert len(poll_answers) > 0 +def test_get_poll_answer_by_id(session): + """Test getting a poll answer by id.""" + # Create a poll answer + widget = _create_widget() + poll = factory_poll_model(widget, TestWidgetPollInfo.poll1) + answer = factory_poll_answer_model(poll, TestPollAnswerInfo.answer1) + session.commit() + + # Call function under test + result = PollAnswerService.get_poll_answer_by_id(answer.id) + + # Assert + assert result.id == answer.id + + def test_delete_poll_answers(session): """Assert that poll answers can be deleted for a given poll ID.""" widget = _create_widget() diff --git a/met-api/tests/unit/services/test_subscribe_item_translation_service.py b/met-api/tests/unit/services/test_subscribe_item_translation_service.py new file mode 100644 index 000000000..7d6b4818b --- /dev/null +++ b/met-api/tests/unit/services/test_subscribe_item_translation_service.py @@ -0,0 +1,144 @@ +"""Unit tests for SubscribeItemTranslationService.""" + +from met_api.services.subscribe_item_translation_service import SubscribeItemTranslationService +from tests.utilities.factory_scenarios import TestJwtClaims, TestSubscribeItemTranslationInfo +from tests.utilities.factory_utils import ( + factory_staff_user_model, factory_subscribe_item_translation_model, patch_token_info, + subscribe_item_model_with_language) + + +def test_get_subscribe_item_translation_by_id(session): + """Assert that subscribe item translation can be fetched by its ID.""" + subscribe_item, _, language = subscribe_item_model_with_language() + translation = factory_subscribe_item_translation_model( + { + **TestSubscribeItemTranslationInfo.translate_info1.value, + 'subscribe_item_id': subscribe_item.id, + 'language_id': language.id, + } + ) + session.commit() + + fetched_translation = SubscribeItemTranslationService.get_by_id( + translation.id + ) + assert fetched_translation is not None + assert fetched_translation.id == translation.id + + +def test_get_subscribe_item_translation(session): + """Assert that subscribe item translations can be fetched by item and language.""" + subscribe_item, _, language = subscribe_item_model_with_language() + factory_subscribe_item_translation_model( + { + **TestSubscribeItemTranslationInfo.translate_info1.value, + 'subscribe_item_id': subscribe_item.id, + 'language_id': language.id, + } + ) + session.commit() + + translations = SubscribeItemTranslationService.get_subscribe_item_translation( + subscribe_item.id, language.id + ) + assert len(translations) == 1 + assert translations[0].description == TestSubscribeItemTranslationInfo.translate_info1.value['description'] + + +def test_create_subscribe_item_translation_with_authorization( + session, monkeypatch +): + """Assert that a subscribe item translation can be created with proper authorization.""" + # Mock authorization + patch_token_info(TestJwtClaims.staff_admin_role, monkeypatch) + factory_staff_user_model(external_id=TestJwtClaims.staff_admin_role['sub']) + subscribe_item, widget_subscribe, language = subscribe_item_model_with_language() + data = { + **TestSubscribeItemTranslationInfo.translate_info1.value, + 'subscribe_item_id': subscribe_item.id, + 'language_id': language.id, + 'description': 'New Translation', + } + + created_translation = ( + SubscribeItemTranslationService.create_subscribe_item_translation( + widget_subscribe.id, data, False + ) + ) + assert created_translation is not None + assert created_translation.description == data['description'] + + +def test_create_subscribe_item_translation_with_authorization_with_prepopulate( + session, monkeypatch +): + """Assert that a subscribe item translation can be created with proper authorization.""" + # Mock authorization + patch_token_info(TestJwtClaims.staff_admin_role, monkeypatch) + factory_staff_user_model(external_id=TestJwtClaims.staff_admin_role['sub']) + subscribe_item, widget_subscribe, language = subscribe_item_model_with_language() + data = { + 'subscribe_item_id': subscribe_item.id, + 'language_id': language.id, + } + + created_translation = ( + SubscribeItemTranslationService.create_subscribe_item_translation( + widget_subscribe.id, data, True + ) + ) + assert created_translation is not None + assert created_translation.description == subscribe_item.description + + +def test_update_subscribe_item_translation_with_authorization( + session, monkeypatch +): + """Assert that a subscribe item translation can be updated with proper authorization.""" + # Mock authorization + patch_token_info(TestJwtClaims.staff_admin_role, monkeypatch) + factory_staff_user_model(external_id=TestJwtClaims.staff_admin_role['sub']) + subscribe_item, widget_subscribe, language = subscribe_item_model_with_language() + translation = factory_subscribe_item_translation_model( + { + **TestSubscribeItemTranslationInfo.translate_info1.value, + 'subscribe_item_id': subscribe_item.id, + 'language_id': language.id, + 'description': 'Old Description', + } + ) + session.commit() + + updated_data = {'description': 'Updated Description'} + updated_translation = ( + SubscribeItemTranslationService.update_subscribe_item_translation( + widget_subscribe.id, translation.id, updated_data + ) + ) + assert updated_translation.description == updated_data['description'] + + +def test_delete_subscribe_item_translation_with_authorization( + session, monkeypatch +): + """Assert that a subscribe item translation can be deleted with proper authorization.""" + # Mock authorization + patch_token_info(TestJwtClaims.staff_admin_role, monkeypatch) + factory_staff_user_model(external_id=TestJwtClaims.staff_admin_role['sub']) + subscribe_item, widget_subscribe, language = subscribe_item_model_with_language() + translation = factory_subscribe_item_translation_model( + { + **TestSubscribeItemTranslationInfo.translate_info1.value, + 'subscribe_item_id': subscribe_item.id, + 'language_id': language.id, + } + ) + session.commit() + + SubscribeItemTranslationService.delete_subscribe_item_translation( + widget_subscribe.id, translation.id + ) + deleted_translation = SubscribeItemTranslationService.get_by_id( + translation.id + ) + assert deleted_translation is None diff --git a/met-api/tests/unit/services/test_timeline_event_translation_service.py b/met-api/tests/unit/services/test_timeline_event_translation_service.py new file mode 100644 index 000000000..aa532cdeb --- /dev/null +++ b/met-api/tests/unit/services/test_timeline_event_translation_service.py @@ -0,0 +1,144 @@ +"""Unit tests for TimelineEventTranslationService.""" + +from met_api.services.timeline_event_translation_service import TimelineEventTranslationService +from tests.utilities.factory_scenarios import TestJwtClaims, TestTimelineEventTranslationInfo +from tests.utilities.factory_utils import ( + factory_staff_user_model, factory_timeline_event_translation_model, patch_token_info, + timeline_event_model_with_language) + + +def test_get_timeline_event_translation_by_id(session): + """Assert that subscribe item translation can be fetched by its ID.""" + timeline_event, _, language = timeline_event_model_with_language() + translation = factory_timeline_event_translation_model( + { + **TestTimelineEventTranslationInfo.timeline_event_info1.value, + 'timeline_event_id': timeline_event.id, + 'language_id': language.id, + } + ) + session.commit() + + fetched_translation = TimelineEventTranslationService.get_by_id( + translation.id + ) + assert fetched_translation is not None + assert fetched_translation.id == translation.id + + +def test_get_timeline_event_translation_by_language(session): + """Assert that subscribe item translations can be fetched by item and language.""" + timeline_event, _, language = timeline_event_model_with_language() + factory_timeline_event_translation_model( + { + **TestTimelineEventTranslationInfo.timeline_event_info1.value, + 'timeline_event_id': timeline_event.id, + 'language_id': language.id, + } + ) + session.commit() + + translations = TimelineEventTranslationService.get_timeline_event_translation( + timeline_event.id, language.id + ) + assert len(translations) == 1 + assert translations[0].description == TestTimelineEventTranslationInfo.timeline_event_info1.value['description'] + + +def test_create_timeline_event_translation_with_authorization( + session, monkeypatch +): + """Assert that a subscribe item translation can be created with proper authorization.""" + # Mock authorization + patch_token_info(TestJwtClaims.staff_admin_role, monkeypatch) + factory_staff_user_model(external_id=TestJwtClaims.staff_admin_role['sub']) + timeline_event, widget_timeline, language = timeline_event_model_with_language() + data = { + **TestTimelineEventTranslationInfo.timeline_event_info1.value, + 'timeline_event_id': timeline_event.id, + 'language_id': language.id, + 'description': 'New Translation', + } + + created_translation = ( + TimelineEventTranslationService.create_timeline_event_translation( + widget_timeline.id, data, False + ) + ) + assert created_translation is not None + assert created_translation.description == data['description'] + + +def test_create_timeline_event_translation_with_authorization_with_prepopulate( + session, monkeypatch +): + """Assert that a subscribe item translation can be created with proper authorization.""" + # Mock authorization + patch_token_info(TestJwtClaims.staff_admin_role, monkeypatch) + factory_staff_user_model(external_id=TestJwtClaims.staff_admin_role['sub']) + timeline_event, widget_timeline, language = timeline_event_model_with_language() + data = { + 'timeline_event_id': timeline_event.id, + 'language_id': language.id, + } + + created_translation = ( + TimelineEventTranslationService.create_timeline_event_translation( + widget_timeline.id, data, True + ) + ) + assert created_translation is not None + assert created_translation.description == timeline_event.description + + +def test_update_timeline_event_translation_with_authorization( + session, monkeypatch +): + """Assert that a subscribe item translation can be updated with proper authorization.""" + # Mock authorization + patch_token_info(TestJwtClaims.staff_admin_role, monkeypatch) + factory_staff_user_model(external_id=TestJwtClaims.staff_admin_role['sub']) + timeline_event, widget_timeline, language = timeline_event_model_with_language() + translation = factory_timeline_event_translation_model( + { + **TestTimelineEventTranslationInfo.timeline_event_info1.value, + 'timeline_event_id': timeline_event.id, + 'language_id': language.id, + 'description': 'Old Description', + } + ) + session.commit() + + updated_data = {'description': 'Updated Description'} + updated_translation = ( + TimelineEventTranslationService.update_timeline_event_translation( + widget_timeline.id, translation.id, updated_data + ) + ) + assert updated_translation.description == updated_data['description'] + + +def test_delete_timeline_event_translation_with_authorization( + session, monkeypatch +): + """Assert that a subscribe item translation can be deleted with proper authorization.""" + # Mock authorization + patch_token_info(TestJwtClaims.staff_admin_role, monkeypatch) + factory_staff_user_model(external_id=TestJwtClaims.staff_admin_role['sub']) + timeline_event, widget_timeline, language = timeline_event_model_with_language() + translation = factory_timeline_event_translation_model( + { + **TestTimelineEventTranslationInfo.timeline_event_info1.value, + 'timeline_event_id': timeline_event.id, + 'language_id': language.id, + } + ) + session.commit() + + TimelineEventTranslationService.delete_timeline_event_translation( + widget_timeline.id, translation.id + ) + deleted_translation = TimelineEventTranslationService.get_by_id( + translation.id + ) + assert deleted_translation is None diff --git a/met-api/tests/utilities/factory_scenarios.py b/met-api/tests/utilities/factory_scenarios.py index 2349521c9..37508402f 100644 --- a/met-api/tests/utilities/factory_scenarios.py +++ b/met-api/tests/utilities/factory_scenarios.py @@ -926,6 +926,51 @@ class TestSurveyTranslationInfo(dict, Enum): } +class TestPollAnswerTranslationInfo(dict, Enum): + """Test scenarios of Poll Answer Translation.""" + + translation1 = { + 'poll_answer_id': 1, + 'language_id': 2, + 'answer_text': 'Answer 1' + } + + +class TestSubscribeItemTranslationInfo(dict, Enum): + """Test scenarios of Subscribe Item Translation.""" + + translate_info1 = { + 'subscribe_item_id': 1, + 'language_id': 2, + 'description': fake.text(), + } + + +class TestEventItemTranslationInfo(dict, Enum): + """Test scenarios of Event Item Translation.""" + + event_item_info1 = { + 'event_item_id': 1, + 'language_id': 2, + 'description': fake.text(), + 'location_name': 'Location name', + 'location_address': 'location address', + 'url': fake.url(), + 'url_label': fake.name(), + } + + +class TestTimelineEventTranslationInfo(dict, Enum): + """Test scenarios of TimeLine Event Translation.""" + + timeline_event_info1 = { + 'timeline_event_id': 1, + 'language_id': 2, + 'description': fake.text(), + 'time': datetime.now().strftime('%Y-%m-%d'), + } + + class TestEngagementTranslationInfo(dict, Enum): """Test scenarios of engagement translation content.""" diff --git a/met-api/tests/utilities/factory_utils.py b/met-api/tests/utilities/factory_utils.py index d2c3b43cf..522dd6f22 100644 --- a/met-api/tests/utilities/factory_utils.py +++ b/met-api/tests/utilities/factory_utils.py @@ -15,6 +15,7 @@ Test Utility for creating model factory. """ + from typing import Optional from faker import Faker @@ -22,6 +23,7 @@ from met_api.auth import Auth from met_api.config import get_named_config +from met_api.constants.email_verification import EmailVerificationType from met_api.constants.engagement_status import Status from met_api.constants.widget import WidgetType from met_api.models import Tenant @@ -32,36 +34,45 @@ from met_api.models.engagement_settings import EngagementSettingsModel from met_api.models.engagement_slug import EngagementSlug as EngagementSlugModel from met_api.models.engagement_translation import EngagementTranslation as EngagementTranslationModel +from met_api.models.event_item import EventItem as EventItemModel +from met_api.models.event_item_translation import EventItemTranslation as EventItemTranslationModel from met_api.models.feedback import Feedback as FeedbackModel from met_api.models.language import Language as LanguageModel from met_api.models.membership import Membership as MembershipModel from met_api.models.participant import Participant as ParticipantModel +from met_api.models.poll_answer_translation import PollAnswerTranslation as PollAnswerTranslationModel from met_api.models.poll_answers import PollAnswer as PollAnswerModel from met_api.models.poll_responses import PollResponse as PollResponseModel from met_api.models.report_setting import ReportSetting as ReportSettingModel from met_api.models.staff_user import StaffUser as StaffUserModel from met_api.models.submission import Submission as SubmissionModel +from met_api.models.subscribe_item import SubscribeItem as SubscribeItemModel +from met_api.models.subscribe_item_translation import SubscribeItemTranslation as SubscribeItemTranslationModel from met_api.models.subscription import Subscription as SubscriptionModel from met_api.models.survey import Survey as SurveyModel from met_api.models.survey_translation import SurveyTranslation as SurveyTranslationModel from met_api.models.timeline_event import TimelineEvent as TimelineEventModel +from met_api.models.timeline_event_translation import TimelineEventTranslation as TimelineEventTranslationModel from met_api.models.widget import Widget as WidgetModal from met_api.models.widget_documents import WidgetDocuments as WidgetDocumentModel +from met_api.models.widget_events import WidgetEvents as WidgetEventsModel from met_api.models.widget_item import WidgetItem as WidgetItemModal from met_api.models.widget_map import WidgetMap as WidgetMapModel -from met_api.models.widget_translation import WidgetTranslation as WidgetTranslationModel from met_api.models.widget_poll import Poll as WidgetPollModel from met_api.models.widget_timeline import WidgetTimeline as WidgetTimelineModel +from met_api.models.widget_translation import WidgetTranslation as WidgetTranslationModel from met_api.models.widget_video import WidgetVideo as WidgetVideoModel +from met_api.models.widgets_subscribe import WidgetSubscribe as WidgetSubscribeModel from met_api.utils.constants import TENANT_ID_HEADER from met_api.utils.enums import MembershipStatus -from met_api.constants.email_verification import EmailVerificationType from tests.utilities.factory_scenarios import ( TestCommentInfo, TestEngagementInfo, TestEngagementMetadataInfo, TestEngagementMetadataTaxonInfo, - TestEngagementSlugInfo, TestEngagementTranslationInfo, TestFeedbackInfo, TestJwtClaims, TestLanguageInfo, - TestParticipantInfo, TestPollAnswerInfo, TestPollResponseInfo, TestReportSettingInfo, TestSubmissionInfo, - TestSurveyInfo, TestSurveyTranslationInfo, TestTenantInfo, TestTimelineInfo, TestUserInfo, TestWidgetDocumentInfo, - TestWidgetInfo, TestWidgetItemInfo, TestWidgetMap, TestWidgetPollInfo, TestWidgetTranslationInfo, TestWidgetVideo) + TestEngagementSlugInfo, TestEngagementTranslationInfo, TestEventItemTranslationInfo, TestEventnfo, TestFeedbackInfo, + TestJwtClaims, TestLanguageInfo, TestParticipantInfo, TestPollAnswerInfo, TestPollAnswerTranslationInfo, + TestPollResponseInfo, TestReportSettingInfo, TestSubmissionInfo, TestSubscribeInfo, + TestSubscribeItemTranslationInfo, TestSurveyInfo, TestSurveyTranslationInfo, TestTenantInfo, + TestTimelineEventTranslationInfo, TestTimelineInfo, TestUserInfo, TestWidgetDocumentInfo, TestWidgetInfo, + TestWidgetItemInfo, TestWidgetMap, TestWidgetPollInfo, TestWidgetTranslationInfo, TestWidgetVideo) fake = Faker() @@ -145,9 +156,7 @@ def factory_email_verification(survey_id, type=None): return email_verification -def factory_engagement_model( - eng_info: dict = TestEngagementInfo.engagement1, name=None, status=None -): +def factory_engagement_model(eng_info: dict = TestEngagementInfo.engagement1, name=None, status=None): """Produce a engagement model.""" engagement = EngagementModel( name=name if name else fake.name(), @@ -198,9 +207,7 @@ def factory_metadata_requirements(auth: Optional[Auth] = None): """Create a tenant, an associated staff user, and engagement, for tests.""" tenant = factory_tenant_model() tenant.short_name = fake.lexify(text='????').upper() - (engagement_info := TestEngagementInfo.engagement1.copy())[ - 'tenant_id' - ] = tenant.id + (engagement_info := TestEngagementInfo.engagement1.copy())['tenant_id'] = tenant.id engagement = factory_engagement_model(engagement_info) (staff_info := TestUserInfo.user_staff_1.copy())['tenant_id'] = tenant.id factory_staff_user_model(TestJwtClaims.staff_admin_role['sub'], staff_info) @@ -220,9 +227,7 @@ def factory_taxon_requirements(auth: Optional[Auth] = None): tenant = factory_tenant_model() tenant.short_name = fake.lexify(text='????').upper() (staff_info := TestUserInfo.user_staff_1.copy())['tenant_id'] = tenant.id - factory_staff_user_model( - TestJwtClaims.staff_admin_role.get('sub'), staff_info - ) + factory_staff_user_model(TestJwtClaims.staff_admin_role.get('sub'), staff_info) if auth: headers = factory_auth_header( auth, @@ -251,9 +256,7 @@ def factory_metadata_taxon_model( return taxon -def factory_staff_user_model( - external_id=None, user_info: dict = TestUserInfo.user_staff_1 -): +def factory_staff_user_model(external_id=None, user_info: dict = TestUserInfo.user_staff_1): """Produce a staff user model.""" # Generate a external id if not passed external_id = external_id or fake.uuid4() @@ -275,9 +278,7 @@ def factory_participant_model( ): """Produce a participant model.""" participant = ParticipantModel( - email_address=ParticipantModel.encode_email( - participant['email_address'] - ), + email_address=ParticipantModel.encode_email(participant['email_address']), ) participant.save() return participant @@ -304,9 +305,7 @@ def factory_membership_model( return membership -def factory_feedback_model( - feedback_info: dict = TestFeedbackInfo.feedback1, status=None -): +def factory_feedback_model(feedback_info: dict = TestFeedbackInfo.feedback1, status=None): """Produce a feedback model.""" feedback = FeedbackModel( status=feedback_info.get('status'), @@ -323,9 +322,7 @@ def factory_auth_header(jwt, claims, tenant_id=None): """Produce JWT tokens for use in tests.""" return { 'Authorization': 'Bearer ' + jwt.create_jwt(claims=claims, header=JWT_HEADER), - TENANT_ID_HEADER: ( - tenant_id or current_app.config.get('DEFAULT_TENANT_SHORT_NAME') - ), + TENANT_ID_HEADER: (tenant_id or current_app.config.get('DEFAULT_TENANT_SHORT_NAME')), } @@ -383,9 +380,7 @@ def factory_submission_model( return submission -def factory_comment_model( - survey_id, submission_id, comment_info: dict = TestCommentInfo.comment1 -): +def factory_comment_model(survey_id, submission_id, comment_info: dict = TestCommentInfo.comment1): """Produce a comment model.""" comment = CommentModel( survey_id=survey_id, @@ -421,9 +416,7 @@ def token_info(): """Return token info.""" return claims - monkeypatch.setattr( - 'met_api.utils.user_context._get_token_info', token_info - ) + monkeypatch.setattr('met_api.utils.user_context._get_token_info', token_info) # Add a database user that matches the token # factory_staff_user_model(external_id=claims.get('sub')) @@ -481,20 +474,14 @@ def factory_poll_model(widget, poll_info: dict = TestWidgetPollInfo.poll1): return poll -def factory_poll_answer_model( - poll, answer_info: dict = TestPollAnswerInfo.answer1 -): +def factory_poll_answer_model(poll, answer_info: dict = TestPollAnswerInfo.answer1): """Produce a Poll model.""" - answer = PollAnswerModel( - answer_text=answer_info.get('answer_text'), poll_id=poll.id - ) + answer = PollAnswerModel(answer_text=answer_info.get('answer_text'), poll_id=poll.id) answer.save() return answer -def factory_poll_response_model( - poll, answer, response_info: dict = TestPollResponseInfo.response1 -): +def factory_poll_response_model(poll, answer, response_info: dict = TestPollResponseInfo.response1): """Produce a Poll model.""" response = PollResponseModel( participant_id=response_info.get('participant_id'), @@ -573,7 +560,9 @@ def factory_language_model(lang_info: dict = TestLanguageInfo.language1): return language_model -def factory_widget_translation_model(widget_translation: dict = TestWidgetTranslationInfo.widgettranslation1): +def factory_widget_translation_model( + widget_translation: dict = TestWidgetTranslationInfo.widgettranslation1, +): """Produce a widget translation model.""" widget_translation = WidgetTranslationModel( widget_id=widget_translation.get('widget_id'), @@ -608,15 +597,222 @@ def factory_survey_translation_and_engagement_model(): survey_id=survey.id, language_id=lang.id, name=TestSurveyTranslationInfo.survey_translation1.get('name'), - form_json=TestSurveyTranslationInfo.survey_translation1.get( - 'form_json'), + form_json=TestSurveyTranslationInfo.survey_translation1.get('form_json'), ) translation.save() return translation, survey, lang +def factory_poll_answer_translation_model( + translate_info: dict = TestPollAnswerTranslationInfo.translation1, +): + """Produce a translation model.""" + translation = PollAnswerTranslationModel( + poll_answer_id=translate_info.get('poll_answer_id'), + language_id=translate_info.get('language_id'), + answer_text=translate_info.get('answer_text'), + ) + translation.save() + return translation + + +def poll_answer_model_with_poll_enagement(): + """Produce a poll answer model along with related engagement and poll models.""" + engagement = factory_engagement_model() + widget_model = factory_widget_model({'engagement_id': engagement.id}) + poll_model = factory_poll_model(widget_model) + answer = factory_poll_answer_model(poll_model) + language = factory_language_model({'code': 'en', 'name': 'English'}) + return answer, poll_model, language + + +def factory_widget_subscribe_model(widget_model=None): + """Produce a SubscribeItemTranslation model instance.""" + engagement = factory_engagement_model() + + TestWidgetInfo.widget1['engagement_id'] = engagement.id + if widget_model is None: + widget_model = factory_widget_model(TestWidgetInfo.widget1) + + subscribe_info = { + **TestSubscribeInfo.subscribe_info_1.value, + } + + widget_subcribe_model = WidgetSubscribeModel( + widget_id=widget_model.id, + type=subscribe_info.get('type'), + sort_index=subscribe_info.get('sort_index'), + ) + + widget_subcribe_model.save() + return widget_subcribe_model + + +def factory_subscribe_item_model(widget_subscribe=None, subscribe_item_info: dict = None): + """Produce a SubscribeItem model instance.""" + if subscribe_item_info is None: + subscribe_item_info = TestSubscribeInfo.subscribe_info_1.value['items'][0] + + if widget_subscribe is None: + widget_subscribe = factory_widget_subscribe_model() + + subscribe_item = SubscribeItemModel( + description=subscribe_item_info.get('description', ''), + rich_description=subscribe_item_info.get('rich_description', ''), + call_to_action_text=subscribe_item_info.get('call_to_action_text', ''), + call_to_action_type=subscribe_item_info.get('call_to_action_type', ''), + sort_index=subscribe_item_info.get('sort_index', 1), + widget_subscribe_id=widget_subscribe.id, + ) + subscribe_item.save() + return subscribe_item + + +def factory_subscribe_item_translation_model( + translate_info: dict = TestSubscribeItemTranslationInfo.translate_info1, +): + """Produce a translation model for Subscribe items.""" + translation = SubscribeItemTranslationModel( + subscribe_item_id=translate_info.get('subscribe_item_id'), + language_id=translate_info.get('language_id'), + description=translate_info.get('description'), + ) + translation.save() + return translation + + +def factory_subscribe_item_model_with_enagement(): + """Produce a SubscribeItem model instance with engagement.""" + engagement = factory_engagement_model() + widget_model = factory_widget_model({'engagement_id': engagement.id}) + widget_subscribe = factory_widget_subscribe_model(widget_model) + subscribe_item_model = factory_subscribe_item_model(widget_subscribe) + language_model = factory_language_model({'code': 'en', 'name': 'English'}) + return subscribe_item_model, language_model + + +def factory_widget_event_model(widget_model=None): + """Produce a Widget Event model instance.""" + engagement = factory_engagement_model() + + TestWidgetInfo.widget1['engagement_id'] = engagement.id + if widget_model is None: + widget_model = factory_widget_model(TestWidgetInfo.widget1) + + event_info = { + **TestEventnfo.event_meetup.value, + } + + widget_event_model = WidgetEventsModel( + widget_id=widget_model.id, + type=event_info.get('type'), + sort_index=event_info.get('sort_index'), + title=event_info.get('title'), + ) + + widget_event_model.save() + return widget_event_model + + +def factory_event_item_model(widget_event=None, event_item_info: dict = None): + """Produce a EventItem model instance.""" + if event_item_info is None: + event_item_info = TestEventnfo.event_meetup.value['items'][0] + + if widget_event is None: + widget_event = factory_widget_event_model() + + event_item = EventItemModel( + description=event_item_info.get('description', ''), + location_name=event_item_info.get('location_name', ''), + location_address=event_item_info.get('location_address', ''), + start_date=event_item_info.get('start_date', ''), + end_date=event_item_info.get('end_date', ''), + url=event_item_info.get('url', ''), + url_label=event_item_info.get('url_label', ''), + sort_index=event_item_info.get('sort_index', 1), + widget_events_id=widget_event.id, + ) + event_item.save() + return event_item + + +def event_item_model_with_language(): + """Produce an event item model instance with language.""" + engagement = factory_engagement_model() + widget_model = factory_widget_model({'engagement_id': engagement.id}) + widget_event = factory_widget_event_model(widget_model) + event_item_model = factory_event_item_model(widget_event) + language_model = factory_language_model({'code': 'en', 'name': 'English'}) + return event_item_model, widget_event, language_model + + +def factory_event_item_translation_model( + event_translation_info: dict = TestEventItemTranslationInfo.event_item_info1, +): + """Produce a translation model for Event items.""" + event_translation = EventItemTranslationModel( + event_item_id=event_translation_info.get('event_item_id'), + language_id=event_translation_info.get('language_id'), + description=event_translation_info.get('description'), + location_name=event_translation_info.get('location_name'), + location_address=event_translation_info.get('location_address'), + url=event_translation_info.get('url'), + url_label=event_translation_info.get('url_label'), + ) + event_translation.save() + return event_translation + + +def timeline_event_model_with_language(): + """Produce a timeline event model instance with language.""" + engagement = factory_engagement_model() + widget_model = factory_widget_model({'engagement_id': engagement.id}) + widget_timeline = factory_widget_timeline_model( + { + **TestTimelineInfo.widget_timeline.value, + 'widget_id': widget_model.id, + 'engagement_id': engagement.id, + } + ) + timeline_event = factory_timeline_event_model( + { + **TestTimelineInfo.timeline_event.value, + 'timeline_id': widget_timeline.id, + 'widget_id': widget_model.id, + 'engagement_id': engagement.id, + } + ) + language_model = factory_language_model({'code': 'en', 'name': 'English'}) + return timeline_event, widget_timeline, language_model + + +def factory_timeline_event_translation_model( + timeline_translation_info: dict = TestTimelineEventTranslationInfo.timeline_event_info1, +): + """Produce a translation model for Timeline items.""" + timeline_translation = TimelineEventTranslationModel( + timeline_event_id=timeline_translation_info.get('timeline_event_id'), + language_id=timeline_translation_info.get('language_id'), + time=timeline_translation_info.get('time'), + description=timeline_translation_info.get('description'), + ) + timeline_translation.save() + return timeline_translation + + +def subscribe_item_model_with_language(): + """Produce a subscribe item model instance with language.""" + engagement = factory_engagement_model() + widget_model = factory_widget_model({'engagement_id': engagement.id}) + widget_subscribe = factory_widget_subscribe_model(widget_model) + subscribe_item_model = factory_subscribe_item_model(widget_subscribe) + language_model = factory_language_model({'code': 'en', 'name': 'English'}) + return subscribe_item_model, widget_subscribe, language_model + + def factory_engagement_translation_model( - engagement_translation: dict = TestEngagementTranslationInfo.engagementtranslation1, + engagement_translation: dict = TestEngagementTranslationInfo.engagementtranslation1, ): """Produce a engagement translation model.""" engagement_translation = EngagementTranslationModel( From 30588918979e39ccf6bd07e87262b0a537a228c0 Mon Sep 17 00:00:00 2001 From: Baelx <16845197+Baelx@users.noreply.github.com> Date: Fri, 15 Mar 2024 13:26:12 -0700 Subject: [PATCH 21/42] DESENG-512 Add results tab to engagements (#2419) Co-authored-by: Alex --- CHANGELOG.MD | 3 + .../form/EngagementFormTabs/FormTabs.tsx | 5 ++ .../Results/EngagementResults.tsx | 65 +++++++++++++++++++ .../form/EngagementFormTabs/constants.ts | 3 +- 4 files changed, 75 insertions(+), 1 deletion(-) create mode 100644 met-web/src/components/engagement/form/EngagementFormTabs/Results/EngagementResults.tsx diff --git a/CHANGELOG.MD b/CHANGELOG.MD index 6006949be..741973b95 100644 --- a/CHANGELOG.MD +++ b/CHANGELOG.MD @@ -1,3 +1,4 @@ +<<<<<<< HEAD ## March 15, 2024 - **Task**: Multi-language - Create event, subcribe_item, poll, timeline widget translation tables & API routes [DESENG-515](https://apps.itsm.gov.bc.ca/jira/browse/DESENG-515) @@ -6,6 +7,8 @@ - Added Subscribe Item translation API. - Added Event item translation API - Added Unit tests. +- **Task** Add "Results" page to engagements [DESENG-512](https://apps.itsm.gov.bc.ca/jira/browse/DESENG-512) + ## March 08, 2024 diff --git a/met-web/src/components/engagement/form/EngagementFormTabs/FormTabs.tsx b/met-web/src/components/engagement/form/EngagementFormTabs/FormTabs.tsx index 48ac387f1..c94720e8a 100644 --- a/met-web/src/components/engagement/form/EngagementFormTabs/FormTabs.tsx +++ b/met-web/src/components/engagement/form/EngagementFormTabs/FormTabs.tsx @@ -6,6 +6,7 @@ import { MetTab, MetTabList, MetTabPanel } from '../StyledTabComponents'; import { EngagementFormTabValues, ENGAGEMENT_FORM_TABS } from './constants'; import EngagementUserManagement from './UserManagement/EngagementUserManagement'; import EngagementSettingsForm from './Settings/EngagementSettingsForm'; +import EngagementResults from './Results/EngagementResults'; import { ActionContext } from '../ActionContext'; import { MetTooltip } from 'components/common'; import AdditionalTabContent from './AdditionalDetails/AdditionalTabContent'; @@ -50,6 +51,7 @@ const FormTabs = () => { {generateTab('Additional Details', ENGAGEMENT_FORM_TABS.ADDITIONAL)} {generateTab('User Management', ENGAGEMENT_FORM_TABS.USER_MANAGEMENT)} {generateTab('Settings', ENGAGEMENT_FORM_TABS.SETTINGS)} + {generateTab('Results', ENGAGEMENT_FORM_TABS.RESULTS)} @@ -64,6 +66,9 @@ const FormTabs = () => { + + + ); diff --git a/met-web/src/components/engagement/form/EngagementFormTabs/Results/EngagementResults.tsx b/met-web/src/components/engagement/form/EngagementFormTabs/Results/EngagementResults.tsx new file mode 100644 index 000000000..390a8512a --- /dev/null +++ b/met-web/src/components/engagement/form/EngagementFormTabs/Results/EngagementResults.tsx @@ -0,0 +1,65 @@ +import React, { useContext } from 'react'; +import { Grid, Box } from '@mui/material'; +import { MetPaper, PrimaryButton, SecondaryButton } from 'components/common'; +import { EngagementTabsContext } from '../EngagementTabsContext'; +import { ActionContext } from '../../ActionContext'; + +const EngagementResults = () => { + const { isSaving } = useContext(ActionContext); + const { handleSaveAndContinueEngagement, handleSaveAndExitEngagement, handlePreviewEngagement } = + useContext(EngagementTabsContext); + + return ( + + + + + handleSaveAndContinueEngagement()} + loading={isSaving} + > + Save and Continue + + handleSaveAndExitEngagement()} + loading={isSaving} + > + Save and Exit + + handlePreviewEngagement()} + disabled={isSaving} + > + {'Preview'} + + + + + + ); +}; + +export default EngagementResults; diff --git a/met-web/src/components/engagement/form/EngagementFormTabs/constants.ts b/met-web/src/components/engagement/form/EngagementFormTabs/constants.ts index 1cd6983d8..c33f8d763 100644 --- a/met-web/src/components/engagement/form/EngagementFormTabs/constants.ts +++ b/met-web/src/components/engagement/form/EngagementFormTabs/constants.ts @@ -1,10 +1,11 @@ -export type EngagementFormTabValues = 'content' | 'settings' | 'User Management' | 'additional'; +export type EngagementFormTabValues = 'content' | 'settings' | 'User Management' | 'additional' | 'results'; export const ENGAGEMENT_FORM_TABS: { [x: string]: EngagementFormTabValues } = { CONTENT: 'content', ADDITIONAL: 'additional', USER_MANAGEMENT: 'User Management', SETTINGS: 'settings', + RESULTS: 'results', }; export const ENGAGEMENT_UPLOADER_HEIGHT = '360px'; From 29ea71f01fa5ad562393128b61ba31fbd49dfb4f Mon Sep 17 00:00:00 2001 From: VineetBala-AOT <90332175+VineetBala-AOT@users.noreply.github.com> Date: Wed, 20 Mar 2024 10:57:52 -0700 Subject: [PATCH 22/42] [TO MAIN] Enable string translations for public view static text (#2423) * [To Feature] DESENG-467 Remaining public pages to fetch static text from the translation file. (#2420) * [To Feature] DESENG-467 Enable string translations for public view static text (#2417) --- CHANGELOG.MD | 9 +- met-web/sample.env | 3 + met-web/src/App.tsx | 67 ++++- met-web/src/components/FormCAC/FirstTab.tsx | 57 ++-- met-web/src/components/FormCAC/Form.tsx | 7 +- .../src/components/FormCAC/FormContext.tsx | 34 ++- met-web/src/components/FormCAC/SecondTab.tsx | 24 +- met-web/src/components/FormCAC/Tabs.tsx | 6 +- .../components/common/LanguageSelector.tsx | 104 +++++++ .../dashboard/comment/CommentTable.tsx | 8 +- .../dashboard/comment/CommentsBlock.tsx | 27 +- .../engagement/view/SurveyBlock.tsx | 13 +- .../widgets/Subscribe/FormSignUpSection.tsx | 3 +- .../ManageSubscription/Subscription.tsx | 16 +- .../feedback/FeedbackModal/index.tsx | 26 +- .../src/components/landing/EngagementTile.tsx | 13 +- .../components/landing/LandingComponent.tsx | 18 +- .../src/components/layout/Footer/index.tsx | 39 +-- .../components/layout/Header/PublicHeader.tsx | 21 +- .../components/publicDashboard/Dashboard.tsx | 34 ++- .../components/publicDashboard/ErrorBox.tsx | 7 +- .../publicDashboard/KPI/ProjectLocation.tsx | 6 +- .../publicDashboard/KPI/SurveyEmailsSent.tsx | 6 +- .../publicDashboard/KPI/SurveysCompleted.tsx | 6 +- .../src/components/publicDashboard/NoData.tsx | 5 +- .../SubmissionTrend/SubmissionTrend.tsx | 18 +- .../SurveyBar/QuestionBlock.tsx | 6 +- .../publicDashboard/SurveyBar/index.tsx | 22 +- .../SurveyBarPrintable/index.tsx | 6 +- .../components/survey/edit/ActionContext.tsx | 16 +- .../src/components/survey/edit/EditForm.tsx | 8 +- .../components/survey/edit/FormWrapped.tsx | 3 +- .../survey/edit/InvalidTokenModal.tsx | 15 +- .../survey/submit/ActionContext.tsx | 19 +- .../survey/submit/InvalidTokenModal.tsx | 17 +- .../components/survey/submit/SurveyForm.tsx | 8 +- .../survey/submit/SurveySubmitWrapped.tsx | 5 +- met-web/src/config.ts | 5 + met-web/src/constants/language.ts | 7 + met-web/src/locales/en/default.json | 265 +++++++++++++++- met-web/src/locales/en/eao.json | 29 -- met-web/src/locales/en/gdx.json | 284 ++++++++++++++++++ met-web/src/reduxSlices/languageSlice.ts | 33 ++ met-web/src/routes/NotAvailable.tsx | 12 +- met-web/src/routes/NotFound.tsx | 104 ++++--- met-web/src/services/userService/index.ts | 4 +- met-web/src/store.ts | 2 + .../unit/components/FormCAC/FormCAC.test.tsx | 21 ++ .../landingPage/LandingPage.test.tsx | 12 +- .../components/layout/PublicHeader.test.tsx | 53 ++++ .../publicDashboard/ProjectLocation.test.tsx | 15 + .../publicDashboard/PublicDashboard.test.tsx | 33 +- .../publicDashboard/SubmissionTrend.test.tsx | 16 + .../publicDashboard/SurveyEmailsSent.test.tsx | 12 + .../publicDashboard/SurveysCompleted.test.tsx | 12 + 55 files changed, 1300 insertions(+), 321 deletions(-) create mode 100644 met-web/src/components/common/LanguageSelector.tsx create mode 100644 met-web/src/constants/language.ts delete mode 100644 met-web/src/locales/en/eao.json create mode 100644 met-web/src/locales/en/gdx.json create mode 100644 met-web/src/reduxSlices/languageSlice.ts create mode 100644 met-web/tests/unit/components/layout/PublicHeader.test.tsx diff --git a/CHANGELOG.MD b/CHANGELOG.MD index 741973b95..f02e9400a 100644 --- a/CHANGELOG.MD +++ b/CHANGELOG.MD @@ -1,4 +1,10 @@ -<<<<<<< HEAD +## March 19, 2024 + +- **Task**: Change static english text to be able to support string translations [DESENG-467](https://apps.itsm.gov.bc.ca/jira/browse/DESENG-467) + - Implemented a language selector in the public header. + - Incorporated logic to dynamically adjust the unauthenticated route based on the selected language and load the appropriate translation file. + - Enhanced all public pages to fetch static text from the translation file. + ## March 15, 2024 - **Task**: Multi-language - Create event, subcribe_item, poll, timeline widget translation tables & API routes [DESENG-515](https://apps.itsm.gov.bc.ca/jira/browse/DESENG-515) @@ -9,7 +15,6 @@ - Added Unit tests. - **Task** Add "Results" page to engagements [DESENG-512](https://apps.itsm.gov.bc.ca/jira/browse/DESENG-512) - ## March 08, 2024 - **Task**: Multi-language - Create engagement translation table & API routes [DESENG-510](https://apps.itsm.gov.bc.ca/jira/browse/DESENG-510) diff --git a/met-web/sample.env b/met-web/sample.env index 30b6433ba..21222f3aa 100644 --- a/met-web/sample.env +++ b/met-web/sample.env @@ -28,6 +28,9 @@ REACT_APP_ANALYTICS_API_URL=http://localhost:5001/api # Users visiting the root URL will be redirected to this tenant REACT_APP_DEFAULT_TENANT=gdx +# Users visiting the root URL will be redirected to this language of default tenant +REACT_APP_DEFAULT_LANGUAGE_ID=en + # Whether to skip certain auth checks. Should be false in production. # Must match the value set for IS_SINGLE_TENANT_ENVIRONMENT in the API. REACT_APP_IS_SINGLE_TENANT_ENVIRONMENT=false \ No newline at end of file diff --git a/met-web/src/App.tsx b/met-web/src/App.tsx index f74512a21..5aa53f544 100644 --- a/met-web/src/App.tsx +++ b/met-web/src/App.tsx @@ -1,4 +1,4 @@ -import React, { useEffect } from 'react'; +import React, { useEffect, useState } from 'react'; import './App.scss'; import { Route, BrowserRouter as Router, Routes } from 'react-router-dom'; import UserService from './services/userService'; @@ -20,9 +20,15 @@ import NotFound from 'routes/NotFound'; import Footer from 'components/layout/Footer'; import { ZIndex } from 'styles/Theme'; import { TenantState, loadingTenant, saveTenant } from 'reduxSlices/tenantSlice'; +import { LanguageState } from 'reduxSlices/languageSlice'; import { openNotification } from 'services/notificationService/notificationSlice'; import i18n from './i18n'; import DocumentTitle from 'DocumentTitle'; +import { Language } from 'constants/language'; + +interface Translations { + [languageId: string]: { [key: string]: string }; +} const App = () => { const drawerWidth = 280; @@ -32,9 +38,10 @@ const App = () => { const isLoggedIn = useAppSelector((state) => state.user.authentication.authenticated); const authenticationLoading = useAppSelector((state) => state.user.authentication.loading); const pathSegments = window.location.pathname.split('/'); - const language = 'en'; // Default language is English, change as needed + const language: LanguageState = useAppSelector((state) => state.language); const basename = pathSegments[1].toLowerCase(); const tenant: TenantState = useAppSelector((state) => state.tenant); + const [translations, setTranslations] = useState({}); useEffect(() => { UserService.initKeycloak(dispatch); @@ -82,36 +89,70 @@ const App = () => { if (basename) { fetchTenant(basename); + if (pathSegments.length === 2) { + const defaultLanguage = AppConfig.language.defaultLanguageId; // Set the default language here + const defaultUrl = `/${basename}/${defaultLanguage}`; + window.location.replace(defaultUrl); + } return; } if (!basename && AppConfig.tenant.defaultTenant) { - window.location.replace(`/${AppConfig.tenant.defaultTenant}`); + const defaultLanguage = AppConfig.language.defaultLanguageId; // Set the default language here + const defaultUrl = `/${AppConfig.tenant.defaultTenant}/${defaultLanguage}`; + window.location.replace(defaultUrl); } dispatch(loadingTenant(false)); }; - const getTranslationFile = async () => { + const preloadTranslations = async () => { + if (!tenant.id) { + return; + } + + try { + const supportedLanguages = Object.values(Language); + const translationPromises = supportedLanguages.map((languageId) => getTranslationFile(languageId)); + const translationFiles = await Promise.all(translationPromises); + + const translationsObj: Translations = {}; + + translationFiles.forEach((file, index) => { + if (file) { + translationsObj[supportedLanguages[index]] = file.default; + } + }); + + setTranslations(translationsObj); + } catch (error) { + console.error('Error preloading translations:', error); + } + }; + + const getTranslationFile = async (languageId: string) => { try { - const translationFile = await import(`./locales/${language}/${tenant.id}.json`); + const translationFile = await import(`./locales/${languageId}/${tenant.id}.json`); return translationFile; } catch (error) { - const defaultTranslationFile = await import(`./locales/${language}/default.json`); + const defaultTranslationFile = await import(`./locales/${languageId}/default.json`); return defaultTranslationFile; } }; + useEffect(() => { + preloadTranslations(); + }, [tenant.id]); // Preload translations when tenant id changes + const loadTranslation = async () => { - if (!tenant.id) { + if (!tenant.id || !translations[language.id]) { return; } - i18n.changeLanguage(language); // Set the language for react-i18next + i18n.changeLanguage(language.id); // Set the language for react-i18next try { - const translationFile = await getTranslationFile(); - i18n.addResourceBundle(language, tenant.id, translationFile); + i18n.addResourceBundle(language.id, tenant.id, translations[language.id]); dispatch(loadingTenant(false)); } catch (error) { dispatch(loadingTenant(false)); @@ -126,7 +167,7 @@ const App = () => { useEffect(() => { loadTranslation(); - }, [tenant.id]); + }, [language.id, translations]); if (authenticationLoading || tenant.loading) { return ; @@ -151,7 +192,9 @@ const App = () => { - + + } /> +