diff --git a/.github/workflows/test_bi_scheduler.yaml b/.github/workflows/test_bi_scheduler.yaml index 9195f092..1a709a64 100644 --- a/.github/workflows/test_bi_scheduler.yaml +++ b/.github/workflows/test_bi_scheduler.yaml @@ -17,4 +17,6 @@ jobs: - name: Build image run: bash -x build.sh bi_scheduler build - name: Build test image and run tests - run: bash -x docker_images/bi_scheduler/run_test.sh \ No newline at end of file + run: bash -x docker_images/run_test.sh bi_scheduler + - name: Build test image and run unit tests + run: bash -x docker_images/bi_scheduler/run_test.sh diff --git a/.github/workflows/test_booking_observer.yaml b/.github/workflows/test_booking_observer.yaml index 20cd9c5a..9c50ce45 100644 --- a/.github/workflows/test_booking_observer.yaml +++ b/.github/workflows/test_booking_observer.yaml @@ -16,3 +16,5 @@ jobs: uses: actions/checkout@v3 - name: Build image run: bash -x build.sh booking_observer build + - name: Build test image and run tests + run: bash -x docker_images/run_test.sh booking_observer diff --git a/.github/workflows/test_calendar_observer.yaml b/.github/workflows/test_calendar_observer.yaml index 95b0789c..74509bcd 100644 --- a/.github/workflows/test_calendar_observer.yaml +++ b/.github/workflows/test_calendar_observer.yaml @@ -16,3 +16,5 @@ jobs: uses: actions/checkout@v3 - name: Build image run: bash -x build.sh calendar_observer build + - name: Build test image and run tests + run: bash -x docker_images/run_test.sh calendar_observer diff --git a/.github/workflows/test_cleaninfluxdb.yaml b/.github/workflows/test_cleaninfluxdb.yaml index 5c063d3e..674f0728 100644 --- a/.github/workflows/test_cleaninfluxdb.yaml +++ b/.github/workflows/test_cleaninfluxdb.yaml @@ -16,3 +16,5 @@ jobs: uses: actions/checkout@v3 - name: Build image run: bash -x build.sh cleaninfluxdb build + - name: Build test image and run tests + run: bash -x docker_images/run_test.sh cleaninfluxdb *.py diff --git a/.github/workflows/test_cleanmongodb.yaml b/.github/workflows/test_cleanmongodb.yaml index 829ba652..cde34ffb 100644 --- a/.github/workflows/test_cleanmongodb.yaml +++ b/.github/workflows/test_cleanmongodb.yaml @@ -16,3 +16,5 @@ jobs: uses: actions/checkout@v3 - name: Build image run: bash -x build.sh cleanmongodb build + - name: Build test image and run tests + run: bash -x docker_images/run_test.sh cleanmongodb *.py diff --git a/.github/workflows/test_configurator.yaml b/.github/workflows/test_configurator.yaml index dbbb0b44..3fb0f78e 100644 --- a/.github/workflows/test_configurator.yaml +++ b/.github/workflows/test_configurator.yaml @@ -16,3 +16,5 @@ jobs: uses: actions/checkout@v3 - name: Build image run: bash -x build.sh configurator build + - name: Build test image and run tests + run: bash -x docker_images/run_test.sh configurator diff --git a/.github/workflows/test_demo_org_cleanup.yaml b/.github/workflows/test_demo_org_cleanup.yaml index 646c3b67..d0be44a3 100644 --- a/.github/workflows/test_demo_org_cleanup.yaml +++ b/.github/workflows/test_demo_org_cleanup.yaml @@ -16,3 +16,5 @@ jobs: uses: actions/checkout@v3 - name: Build image run: bash -x build.sh demo_org_cleanup build + - name: Build test image and run tests + run: bash -x docker_images/run_test.sh demo_org_cleanup diff --git a/.github/workflows/test_failed_imports_dataset_generator.yaml b/.github/workflows/test_failed_imports_dataset_generator.yaml index a43f08f0..eb105f76 100644 --- a/.github/workflows/test_failed_imports_dataset_generator.yaml +++ b/.github/workflows/test_failed_imports_dataset_generator.yaml @@ -16,3 +16,5 @@ jobs: uses: actions/checkout@v3 - name: Build image run: bash -x build.sh failed_imports_dataset_generator build + - name: Build test image and run tests + run: bash -x docker_images/run_test.sh failed_imports_dataset_generator diff --git a/.github/workflows/test_herald_executor.yaml b/.github/workflows/test_herald_executor.yaml index ac972141..7c55a073 100644 --- a/.github/workflows/test_herald_executor.yaml +++ b/.github/workflows/test_herald_executor.yaml @@ -16,3 +16,5 @@ jobs: uses: actions/checkout@v3 - name: Build image run: bash -x build.sh herald_executor build + - name: Build test image and run tests + run: bash -x docker_images/run_test.sh herald_executor diff --git a/.github/workflows/test_keeper_executor.yaml b/.github/workflows/test_keeper_executor.yaml index d8a9b8fc..cfe75083 100644 --- a/.github/workflows/test_keeper_executor.yaml +++ b/.github/workflows/test_keeper_executor.yaml @@ -16,3 +16,5 @@ jobs: uses: actions/checkout@v3 - name: Build image run: bash -x build.sh keeper_executor build + - name: Build test image and run tests + run: bash -x docker_images/run_test.sh keeper_executor diff --git a/.github/workflows/test_live_demo_generator.yaml b/.github/workflows/test_live_demo_generator.yaml index a91777e7..dde97d24 100644 --- a/.github/workflows/test_live_demo_generator.yaml +++ b/.github/workflows/test_live_demo_generator.yaml @@ -16,3 +16,5 @@ jobs: uses: actions/checkout@v3 - name: Build image run: bash -x build.sh live_demo_generator build + - name: Build test image and run tests + run: bash -x docker_images/run_test.sh live_demo_generator diff --git a/.github/workflows/test_ohsu.yaml b/.github/workflows/test_ohsu.yaml index fc460193..a6dfb10f 100644 --- a/.github/workflows/test_ohsu.yaml +++ b/.github/workflows/test_ohsu.yaml @@ -16,3 +16,5 @@ jobs: uses: actions/checkout@v3 - name: Build image run: bash -x build.sh ohsu build + - name: Build test image and run tests + run: bash -x docker_images/run_test.sh ohsu diff --git a/.github/workflows/test_organization_violations.yaml b/.github/workflows/test_organization_violations.yaml index 0c883f31..5e0f9fcc 100644 --- a/.github/workflows/test_organization_violations.yaml +++ b/.github/workflows/test_organization_violations.yaml @@ -16,3 +16,5 @@ jobs: uses: actions/checkout@v3 - name: Build image run: bash -x build.sh organization_violations build + - name: Build test image and run tests + run: bash -x docker_images/run_test.sh organization_violations diff --git a/.github/workflows/test_resource_discovery.yaml b/.github/workflows/test_resource_discovery.yaml index bbd2da5e..9a8fc96d 100644 --- a/.github/workflows/test_resource_discovery.yaml +++ b/.github/workflows/test_resource_discovery.yaml @@ -16,3 +16,5 @@ jobs: uses: actions/checkout@v3 - name: Build image run: bash -x build.sh resource_discovery build + - name: Build test image and run tests + run: bash -x docker_images/run_test.sh resource_discovery diff --git a/.github/workflows/test_resource_observer.yaml b/.github/workflows/test_resource_observer.yaml index 716fb166..580b136c 100644 --- a/.github/workflows/test_resource_observer.yaml +++ b/.github/workflows/test_resource_observer.yaml @@ -16,3 +16,5 @@ jobs: uses: actions/checkout@v3 - name: Build image run: bash -x build.sh resource_observer build + - name: Build test image and run tests + run: bash -x docker_images/run_test.sh resource_observer diff --git a/.github/workflows/test_resource_violations.yaml b/.github/workflows/test_resource_violations.yaml index 8205ea70..9c5aa19e 100644 --- a/.github/workflows/test_resource_violations.yaml +++ b/.github/workflows/test_resource_violations.yaml @@ -16,3 +16,5 @@ jobs: uses: actions/checkout@v3 - name: Build image run: bash -x build.sh resource_violations build + - name: Build test image and run tests + run: bash -x docker_images/run_test.sh resource_violations diff --git a/.github/workflows/test_slacker_executor.yaml b/.github/workflows/test_slacker_executor.yaml index 34f47e4c..2bda92d6 100644 --- a/.github/workflows/test_slacker_executor.yaml +++ b/.github/workflows/test_slacker_executor.yaml @@ -16,3 +16,5 @@ jobs: uses: actions/checkout@v3 - name: Build image run: bash -x build.sh slacker_executor build + - name: Build test image and run tests + run: bash -x docker_images/run_test.sh slacker_executor diff --git a/.github/workflows/test_users_dataset_generator.yaml b/.github/workflows/test_users_dataset_generator.yaml index 0d6d40aa..30a0375f 100644 --- a/.github/workflows/test_users_dataset_generator.yaml +++ b/.github/workflows/test_users_dataset_generator.yaml @@ -16,3 +16,5 @@ jobs: uses: actions/checkout@v3 - name: Build image run: bash -x build.sh users_dataset_generator build + - name: Build test image and run tests + run: bash -x docker_images/run_test.sh users_dataset_generator diff --git a/.github/workflows/test_webhook_executor.yaml b/.github/workflows/test_webhook_executor.yaml index 42ba20e1..cb7c65fa 100644 --- a/.github/workflows/test_webhook_executor.yaml +++ b/.github/workflows/test_webhook_executor.yaml @@ -16,3 +16,5 @@ jobs: uses: actions/checkout@v3 - name: Build image run: bash -x build.sh webhook_executor build + - name: Build test image and run tests + run: bash -x docker_images/run_test.sh webhook_executor diff --git a/docker_images/.pylintrc b/docker_images/.pylintrc new file mode 100644 index 00000000..4585034d --- /dev/null +++ b/docker_images/.pylintrc @@ -0,0 +1,10 @@ +[REPORTS] +reports=no + +[MESSAGES CONTROL] +disable=R,C0111,C0103 + +[BASIC] +good-names=i, db, id +generated-members=deleted, id, state, metadata + diff --git a/docker_images/Dockerfile_tests b/docker_images/Dockerfile_tests new file mode 100644 index 00000000..47f56ec1 --- /dev/null +++ b/docker_images/Dockerfile_tests @@ -0,0 +1,7 @@ +ARG BUILDTAG=build +ARG IMAGE= +FROM ${IMAGE}:${BUILDTAG} +LABEL org.opencontainers.image.authors="Hystax" + +COPY docker_images/test-requirements.txt docker_images/.pylintrc ./ +RUN pip install --no-cache-dir -r test-requirements.txt diff --git a/docker_images/booking_observer/worker.py b/docker_images/booking_observer/worker.py index 95858213..a1ce9a79 100644 --- a/docker_images/booking_observer/worker.py +++ b/docker_images/booking_observer/worker.py @@ -1,6 +1,5 @@ #!/usr/bin/env python import os -import requests import time from pymongo import MongoClient, UpdateOne from datetime import datetime @@ -11,7 +10,7 @@ from kombu.utils.debug import setup_logging from kombu import Connection as QConnection, Exchange, Queue from kombu.pools import producers -from requests.packages.urllib3.exceptions import InsecureRequestWarning +import urllib3 from optscale_client.config_client.client import Client as ConfigClient from optscale_client.rest_api_client.client_v2 import Client as RestClient @@ -119,8 +118,8 @@ def _process(self, organization_id, observe_time): def _publish_activities_tasks(self, tasks): queue_conn = QConnection('amqp://{user}:{pass}@{host}:{port}'.format( - **self.config_cl.read_branch('/rabbit')), - transport_options=RETRY_POLICY) + **self.config_cl.read_branch('/rabbit')), + transport_options=RETRY_POLICY) task_exchange = Exchange(ACTIVITIES_EXCHANGE_NAME, type='topic') with producers[queue_conn].acquire(block=True) as producer: for task_params in tasks: @@ -149,7 +148,7 @@ def heartbeat(self): if __name__ == '__main__': - requests.packages.urllib3.disable_warnings(InsecureRequestWarning) + urllib3.disable_warnings(category=urllib3.exceptions.InsecureRequestWarning) debug = os.environ.get('DEBUG', False) log_level = 'INFO' if not debug else 'DEBUG' setup_logging(loglevel=log_level, loggers=['']) diff --git a/docker_images/calendar_observer/worker.py b/docker_images/calendar_observer/worker.py index bf0de3a4..4a51989a 100644 --- a/docker_images/calendar_observer/worker.py +++ b/docker_images/calendar_observer/worker.py @@ -1,6 +1,5 @@ #!/usr/bin/env python import os -import requests import time from threading import Thread @@ -10,7 +9,7 @@ from kombu import Connection from kombu.utils.debug import setup_logging from kombu import Exchange, Queue -from requests.packages.urllib3.exceptions import InsecureRequestWarning +import urllib3 from optscale_client.config_client.client import Client as ConfigClient from optscale_client.rest_api_client.client_v2 import Client as RestClient @@ -69,7 +68,7 @@ def heartbeat(self): if __name__ == '__main__': - requests.packages.urllib3.disable_warnings(InsecureRequestWarning) + urllib3.disable_warnings(category=urllib3.exceptions.InsecureRequestWarning) debug = os.environ.get('DEBUG', False) log_level = 'DEBUG' if debug else 'INFO' setup_logging(loglevel=log_level, loggers=['']) diff --git a/docker_images/cleanmongodb/clean-mongo-db.py b/docker_images/cleanmongodb/clean-mongo-db.py index 4500b0f0..8f70dc08 100644 --- a/docker_images/cleanmongodb/clean-mongo-db.py +++ b/docker_images/cleanmongodb/clean-mongo-db.py @@ -81,7 +81,7 @@ def update_cleaned_at(self, cloud_account_id): session.close() @staticmethod - def delete_rows(collection, cloud_account_id, chunk_size, rows_limit): + def delete_rows(collection, cloud_account_id, chunk_size, rows_limit): row_ids = list(collection.find( {'cloud_account_id': cloud_account_id}, ['_id']).limit(rows_limit)) for j in range(0, len(row_ids), chunk_size): diff --git a/docker_images/failed_imports_dataset_generator/failed_imports_dataset_generator.py b/docker_images/failed_imports_dataset_generator/failed_imports_dataset_generator.py index d9ebb7f8..0a987089 100644 --- a/docker_images/failed_imports_dataset_generator/failed_imports_dataset_generator.py +++ b/docker_images/failed_imports_dataset_generator/failed_imports_dataset_generator.py @@ -104,7 +104,7 @@ def _get_failed_cloud_accounts(mydb): ca_t.last_import_at, ca_t.last_import_attempt_error, ca_t.organization_id, org_t.name FROM ( - SELECT id, name, organization_id, type, created_at, last_import_at, + SELECT id, name, organization_id, type, created_at, last_import_at, last_import_attempt_error FROM cloudaccount WHERE deleted_at=0 diff --git a/docker_images/herald_executor/worker.py b/docker_images/herald_executor/worker.py index afc6f9b1..5a85ef6b 100644 --- a/docker_images/herald_executor/worker.py +++ b/docker_images/herald_executor/worker.py @@ -1,7 +1,6 @@ #!/usr/bin/env python import calendar import os -import requests import time import uuid from enum import Enum @@ -13,7 +12,7 @@ from kombu import Connection from kombu.utils.debug import setup_logging from kombu import Exchange, Queue, binding -from requests.packages.urllib3.exceptions import InsecureRequestWarning +import urllib3 from optscale_client.config_client.client import Client as ConfigClient from optscale_client.rest_api_client.client_v2 import Client as RestClient @@ -673,8 +672,8 @@ def _get_org_constraint_link(self, constraint, created_at, filters): end_date = datetime.combine(created, created.time().max) + timedelta( days=1) elif constraint['type'] == 'expiring_budget': - start_date = datetime.utcfromtimestamp(constraint['definition'][ - 'start_date']) + start_date = datetime.utcfromtimestamp( + constraint['definition']['start_date']) end_date = None elif constraint['type'] == 'recurring_budget': start_date = created.replace(day=1, hour=0, minute=0, second=0) @@ -741,21 +740,21 @@ def _get_org_constraint_template_params(self, organization, constraint, if constraint['type'] not in resource_types else round(latest_hit['value'])) params = { - 'texts': { - 'title': title, - 'organization': self._get_organization_params(organization), - 'organization_constraint': {**constraint_data}, - 'limit_hit': { - 'created_at': hit_date, - 'value': value, - 'link': link, - 'constraint_limit': constraint_limit - }, - 'user': { - 'user_display_name': user_info.get('display_name') - }, - } + 'texts': { + 'title': title, + 'organization': self._get_organization_params(organization), + 'organization_constraint': {**constraint_data}, + 'limit_hit': { + 'created_at': hit_date, + 'value': value, + 'link': link, + 'constraint_limit': constraint_limit + }, + 'user': { + 'user_display_name': user_info.get('display_name') + }, } + } if constraint['type'] == 'tagging_policy': tag = constraint['definition']['conditions'].get('tag') without_tag = constraint['definition']['conditions'].get('without_tag') @@ -966,7 +965,7 @@ def heartbeat(self): if __name__ == '__main__': - requests.packages.urllib3.disable_warnings(InsecureRequestWarning) + urllib3.disable_warnings(category=urllib3.exceptions.InsecureRequestWarning) debug = os.environ.get('DEBUG', False) log_level = 'DEBUG' if debug else 'INFO' setup_logging(loglevel=log_level, loggers=['']) diff --git a/docker_images/keeper_executor/worker.py b/docker_images/keeper_executor/worker.py index c187be26..b0fca4c9 100644 --- a/docker_images/keeper_executor/worker.py +++ b/docker_images/keeper_executor/worker.py @@ -8,7 +8,7 @@ from kombu.log import get_logger from kombu.utils.debug import setup_logging from kombu import Exchange, Queue, binding, Connection -from requests.packages.urllib3.exceptions import InsecureRequestWarning +import urllib3 from optscale_client.config_client.client import Client as ConfigClient from optscale_client.rest_api_client.client_v2 import Client as RestClient @@ -626,7 +626,7 @@ def heartbeat(self): if __name__ == '__main__': - requests.packages.urllib3.disable_warnings(InsecureRequestWarning) + urllib3.disable_warnings(category=urllib3.exceptions.InsecureRequestWarning) debug = os.environ.get('DEBUG', False) log_level = 'DEBUG' if debug else 'INFO' setup_logging(loglevel=log_level, loggers=['']) diff --git a/docker_images/live_demo_generator/scheduler.py b/docker_images/live_demo_generator/scheduler.py index 68442d04..7728aa00 100644 --- a/docker_images/live_demo_generator/scheduler.py +++ b/docker_images/live_demo_generator/scheduler.py @@ -44,7 +44,7 @@ def main(config_cl): }) if count > 0: publish_tasks(count) - LOG.info('Published %s tasks' % count ) + LOG.info('Published %s tasks' % count) deleted = live_demos_collection.delete_many({ 'created_at': {'$lt': int(dt.timestamp())} }).deleted_count diff --git a/docker_images/live_demo_generator/worker.py b/docker_images/live_demo_generator/worker.py index aee13287..a305a7fc 100644 --- a/docker_images/live_demo_generator/worker.py +++ b/docker_images/live_demo_generator/worker.py @@ -1,6 +1,5 @@ #!/usr/bin/env python import os -import requests import time from threading import Thread @@ -10,7 +9,7 @@ from kombu import Connection from kombu.utils.debug import setup_logging from kombu import Exchange, Queue -from requests.packages.urllib3.exceptions import InsecureRequestWarning +import urllib3 from datetime import datetime from optscale_client.config_client.client import Client as ConfigClient @@ -72,11 +71,11 @@ def generate_live_demo(self): response['created_at'] = int(d_start.timestamp()) self.mongo_cl.restapi.live_demos.insert_one(response) LOG.info('Live demo generated in %s seconds' % ( - datetime.utcnow() - d_start).total_seconds()) + datetime.utcnow() - d_start).total_seconds()) if __name__ == '__main__': - requests.packages.urllib3.disable_warnings(InsecureRequestWarning) + urllib3.disable_warnings(category=urllib3.exceptions.InsecureRequestWarning) debug = os.environ.get('DEBUG', False) log_level = 'INFO' if not debug else 'DEBUG' setup_logging(loglevel=log_level, loggers=['']) diff --git a/docker_images/ohsu/handlers/v2/base.py b/docker_images/ohsu/handlers/v2/base.py index af775db0..f78b0e0d 100644 --- a/docker_images/ohsu/handlers/v2/base.py +++ b/docker_images/ohsu/handlers/v2/base.py @@ -30,7 +30,7 @@ def initialize(self, config): self._controller = None def raise405(self): - raise OptHTTPError(405, Err.OH0002, [self.request.method]) + raise OptHTTPError(405, Err.OHE0002, [self.request.method]) def head(self, *args, **kwargs): self.raise405() diff --git a/docker_images/organization_violations/worker.py b/docker_images/organization_violations/worker.py index 691f544b..9541c9cd 100644 --- a/docker_images/organization_violations/worker.py +++ b/docker_images/organization_violations/worker.py @@ -1,7 +1,6 @@ #!/usr/bin/env python import calendar import os -import requests import time import uuid from copy import deepcopy @@ -14,7 +13,7 @@ from kombu import Connection as QConnection from kombu import Exchange, Queue from kombu.pools import producers -from requests.packages.urllib3.exceptions import InsecureRequestWarning +import urllib3 from optscale_client.config_client.client import Client as ConfigClient from optscale_client.rest_api_client.client_v2 import Client as RestClient @@ -517,7 +516,7 @@ def process_organization_constraints(self, task): self.month_start(date), start.time())).days + 1, # hit_days for resource_quota and anomalies constraints (start - datetime.combine( - date - timedelta(days=1), start.time())).days + 1] + date - timedelta(days=1), start.time())).days + 1] _, response = self.rest_cl.organization_constraint_list( org_id, hit_days=max(hit_days)) constraints = response.get('organization_constraints') @@ -536,8 +535,8 @@ def process_organization_constraints(self, task): 'Error: %s' % (constr['id'], str(exc))) self.publish_activities_tasks(notif_tasks) LOG.info('Organization violation process for organization %s completed' - ' in %s seconds' % ( - org_id, int(datetime.utcnow().timestamp()) - start_ts)) + ' in %s seconds' % + (org_id, int(datetime.utcnow().timestamp()) - start_ts)) def process_task(self, body, message): try: @@ -553,7 +552,7 @@ def heartbeat(self): if __name__ == '__main__': - requests.packages.urllib3.disable_warnings(InsecureRequestWarning) + urllib3.disable_warnings(category=urllib3.exceptions.InsecureRequestWarning) debug = os.environ.get('DEBUG', False) log_level = 'INFO' if not debug else 'DEBUG' setup_logging(loglevel=log_level, loggers=['']) diff --git a/docker_images/resource_discovery/scheduler.py b/docker_images/resource_discovery/scheduler.py index 68e814bf..8ee6f253 100644 --- a/docker_images/resource_discovery/scheduler.py +++ b/docker_images/resource_discovery/scheduler.py @@ -123,7 +123,7 @@ def process(config_cl): (ca['id'], di_info['resource_type'])) except requests.exceptions.HTTPError as ex: LOG.error( - 'Failed to publish %s tasks for cloud account %s: %s', + 'Failed to publish tasks for cloud account %s: %s', ca['id'], str(ex)) continue return tasks_map diff --git a/docker_images/resource_discovery/worker.py b/docker_images/resource_discovery/worker.py index 4fe3f2f7..463a9fd5 100644 --- a/docker_images/resource_discovery/worker.py +++ b/docker_images/resource_discovery/worker.py @@ -1,6 +1,5 @@ #!/usr/bin/env python import os -import requests import time import traceback @@ -13,7 +12,7 @@ from kombu import Connection from kombu.utils.debug import setup_logging from kombu import Exchange, Queue -from requests.packages.urllib3.exceptions import InsecureRequestWarning +import urllib3 from tools.cloud_adapter.cloud import Cloud as CloudAdapter from tools.cloud_adapter.exceptions import InvalidResourceTypeException @@ -136,6 +135,7 @@ def save_bulk_resources(self, resources, resource_type, cloud_acc_id): for resource in resources: resource.post_discover() + class DiscoveryWorker(ConsumerMixin): def __init__(self, connection, config_cl): self.connection = connection @@ -272,7 +272,8 @@ def _discover_resources(self, cloud_acc_id, resource_type): if isinstance(res, Exception): if self.is_404(res): continue - LOG.error("Exception: % %", str(res), traceback.print_tb(res.__traceback__)) + LOG.error("Exception: %s %s", str(res), + traceback.print_tb(res.__traceback__)) gen_list_chunk.remove(gen) errors.add(str(res)) elif res: @@ -342,7 +343,7 @@ def heartbeat(self): if __name__ == '__main__': - requests.packages.urllib3.disable_warnings(InsecureRequestWarning) + urllib3.disable_warnings(category=urllib3.exceptions.InsecureRequestWarning) debug = os.environ.get('DEBUG', False) log_level = 'INFO' if not debug else 'DEBUG' setup_logging(loglevel=log_level, loggers=['']) diff --git a/docker_images/resource_observer/requirements.txt b/docker_images/resource_observer/requirements.txt index e04a0344..e78edefc 100644 --- a/docker_images/resource_observer/requirements.txt +++ b/docker_images/resource_observer/requirements.txt @@ -2,4 +2,4 @@ kombu==4.6.8 # OptScale packages -e optscale_client/config_client --e optscale_client/rest_api_client \ No newline at end of file +-e optscale_client/rest_api_client diff --git a/docker_images/resource_observer/worker.py b/docker_images/resource_observer/worker.py index 162fc577..c7848515 100644 --- a/docker_images/resource_observer/worker.py +++ b/docker_images/resource_observer/worker.py @@ -1,6 +1,5 @@ #!/usr/bin/env python import os -import requests import time from threading import Thread @@ -10,7 +9,7 @@ from kombu import Connection from kombu.utils.debug import setup_logging from kombu import Exchange, Queue -from requests.packages.urllib3.exceptions import InsecureRequestWarning +import urllib3 from optscale_client.config_client.client import Client as ConfigClient from optscale_client.rest_api_client.client_v2 import Client as RestClient @@ -69,7 +68,7 @@ def heartbeat(self): if __name__ == '__main__': - requests.packages.urllib3.disable_warnings(InsecureRequestWarning) + urllib3.disable_warnings(category=urllib3.exceptions.InsecureRequestWarning) debug = os.environ.get('DEBUG', False) log_level = 'INFO' if not debug else 'DEBUG' setup_logging(loglevel=log_level, loggers=['']) diff --git a/docker_images/resource_violations/worker.py b/docker_images/resource_violations/worker.py index 4e96a0a6..3f3f7cb4 100644 --- a/docker_images/resource_violations/worker.py +++ b/docker_images/resource_violations/worker.py @@ -1,6 +1,5 @@ #!/usr/bin/env python import os -import requests import time from threading import Thread @@ -10,7 +9,7 @@ from kombu import Connection from kombu.utils.debug import setup_logging from kombu import Exchange, Queue -from requests.packages.urllib3.exceptions import InsecureRequestWarning +import urllib3 from optscale_client.config_client.client import Client as ConfigClient from optscale_client.rest_api_client.client_v2 import Client as RestClient @@ -69,7 +68,7 @@ def heartbeat(self): if __name__ == '__main__': - requests.packages.urllib3.disable_warnings(InsecureRequestWarning) + urllib3.disable_warnings(category=urllib3.exceptions.InsecureRequestWarning) debug = os.environ.get('DEBUG', False) log_level = 'INFO' if not debug else 'DEBUG' setup_logging(loglevel=log_level, loggers=['']) diff --git a/docker_images/run_test.sh b/docker_images/run_test.sh new file mode 100755 index 00000000..290a541d --- /dev/null +++ b/docker_images/run_test.sh @@ -0,0 +1,18 @@ +#!/usr/bin/env bash +set -e +BUILD_TAG='build' +IMAGE_NAME=$1 +DIR=${2:-'docker_images'} + +TEST_IMAGE=${IMAGE_NAME}_tests:${BUILD_TAG} +docker build -t ${TEST_IMAGE} --build-arg IMAGE=$1 -f docker_images/Dockerfile_tests . + +echo "PEP8 tests>>>" +docker run -i --rm ${TEST_IMAGE} bash -c "pep8 --max-line-length=120 --ignore=E701 ./$DIR" +echo "<<>>" +docker run -i --rm ${TEST_IMAGE} bash -c "pylint --rcfile=.pylintrc ./$DIR; exit \$(( \$? & 3 ))" +echo "<< @@ -71,9 +73,9 @@ async def get(self, id): security: - secret: [] """ - await super().get(id) + await super().get(recipient_id) - async def patch(self, id, **kwargs): + async def patch(self, recipient_id, **kwargs): """ --- tags: [recipients] @@ -140,9 +142,9 @@ async def patch(self, id, **kwargs): security: - secret: [] """ - await super().patch(id, **kwargs) + await super().patch(recipient_id, **kwargs) - async def delete(self, id, **kwargs): + async def delete(self, _recipient_id, **kwargs): self.raise405() @@ -181,9 +183,11 @@ async def get(self, **kwargs): id: {type: string, description: "Unique recipient id"} created_at: {type: integer, - description: "Created timestamp (service field)"} + description: + "Created timestamp (service field)"} role_purpose: {type: string, - description: "Role purpose of recipient"} + description: + "Role purpose of recipient"} scope_id: {type: string, description: "Recipient scope id"} user_id: {type: string, @@ -230,11 +234,13 @@ async def post(self, **url_params): type: object properties: role_purpose: {type: string, - description: "Role purpose of recipient. User id field must not be filled."} + description: "Role purpose of recipient. + User id field must not be filled."} scope_id: {type: string, description: "Recipient scope id"} user_id: {type: string, - description: "Recipient user id. Role Purpose field must not be filled."} + description: "Recipient user id. Role Purpose field + must not be filled."} meta: {type: string, description: "Recipient metadata"} responses: diff --git a/katara/katara_service/handlers/v2/reports.py b/katara/katara_service/handlers/v2/reports.py index 48ea3a27..fe2c068c 100644 --- a/katara/katara_service/handlers/v2/reports.py +++ b/katara/katara_service/handlers/v2/reports.py @@ -37,7 +37,8 @@ async def get(self, **kwargs): id: {type: string, description: "Unique report id"} created_at: {type: integer, - description: "Created timestamp (service field)"} + description: + "Created timestamp (service field)"} name: {type: string, description: "Report name"} module_name: {type: string, @@ -69,13 +70,13 @@ class ReportAsyncItemHandler(BaseAsyncItemHandler): def _get_controller_class(self): return ReportAsyncController - async def delete(self, id, **kwargs): + async def delete(self, _report_id, **kwargs): await self.raise405() - async def patch(self, id, **kwargs): + async def patch(self, _report_id, **kwargs): self.raise405() - async def get(self, id): + async def get(self, report_id): """ --- description: > @@ -124,4 +125,4 @@ async def get(self, id): security: - secret: [] """ - await super().get(id) + await super().get(report_id) diff --git a/katara/katara_service/handlers/v2/schedules.py b/katara/katara_service/handlers/v2/schedules.py index 5e53b2c6..ef68c241 100644 --- a/katara/katara_service/handlers/v2/schedules.py +++ b/katara/katara_service/handlers/v2/schedules.py @@ -17,7 +17,7 @@ class ScheduleAsyncItemHandler(BaseAsyncItemHandler): def _get_controller_class(self): return ScheduleAsyncController - async def get(self, id): + async def get(self, schedule_id): """ --- description: > @@ -48,9 +48,11 @@ async def get(self, id): crontab: {type: string, description: "Schedule in crontab format"} last_run: {type: integer, - description: "Last job run timestamp (service field)"} + description: + "Last job run timestamp (service field)"} next_run: {type: integer, - description: "Next job run timestamp (service field)"} + description: + "Next job run timestamp (service field)"} 401: description: | Unauthorized: @@ -66,9 +68,9 @@ async def get(self, id): security: - secret: [] """ - await super().get(id) + await super().get(schedule_id) - async def patch(self, id, **kwargs): + async def patch(self, schedule_id, **kwargs): """ --- tags: [schedules] @@ -120,9 +122,9 @@ async def patch(self, id, **kwargs): security: - secret: [] """ - await super().patch(id, **kwargs) + await super().patch(schedule_id, **kwargs) - async def delete(self, id, **kwargs): + async def delete(self, schedule_id, **kwargs): """ --- tags: [schedules] @@ -153,7 +155,7 @@ async def delete(self, id, **kwargs): security: - secret: [] """ - await super().delete(id, **kwargs) + await super().delete(schedule_id, **kwargs) class ScheduleAsyncCollectionHandler(BaseAsyncCollectionHandler): @@ -206,7 +208,8 @@ async def post(self, **url_params): 409: description: | Conflict: - - OKA0005: Report is already scheduled for recipient with rule + - OKA0005: Report is already scheduled for recipient + with rule security: - secret: [] """ @@ -245,17 +248,23 @@ async def get(self, **kwargs): id: {type: string, description: "Unique schedule id"} created_at: {type: integer, - description: "Created timestamp (service field)"} + description: + "Created timestamp (service field)"} report_id: {type: string, description: "Report id"} recipient_id: {type: string, description: "Recipient id"} crontab: {type: string, - description: "Schedule in crontab format"} + description: + "Schedule in crontab format"} last_run: {type: integer, - description: "Last job run timestamp (service field)"} + description: + "Last job run timestamp + (service field)"} next_run: {type: integer, - description: "Next job run timestamp (service field)"} + description: + "Next job run timestamp ( + service field)"} 400: description: | Wrong arguments: diff --git a/katara/katara_service/handlers/v2/tasks.py b/katara/katara_service/handlers/v2/tasks.py index 3870804c..16d9a3fc 100644 --- a/katara/katara_service/handlers/v2/tasks.py +++ b/katara/katara_service/handlers/v2/tasks.py @@ -1,13 +1,5 @@ import json -from katara.katara_service.controllers.task import TaskAsyncController -from katara.katara_service.exceptions import Err -from katara.katara_service.handlers.v2.base import ( - BaseAsyncItemHandler, - BaseAsyncCollectionHandler -) -from katara.katara_service.utils import strtobool, check_list_attribute, ModelEncoder - from tools.optscale_exceptions.common_exc import ( WrongArgumentsException, NotFoundException, @@ -17,15 +9,24 @@ ) from tools.optscale_exceptions.http_exc import OptHTTPError +from katara.katara_service.controllers.task import TaskAsyncController +from katara.katara_service.exceptions import Err +from katara.katara_service.handlers.v2.base import ( + BaseAsyncItemHandler, + BaseAsyncCollectionHandler +) +from katara.katara_service.utils import ( + strtobool, check_list_attribute, ModelEncoder) + class TaskAsyncItemHandler(BaseAsyncItemHandler): def _get_controller_class(self): return TaskAsyncController - async def delete(self, id, **kwargs): + async def delete(self, _task_id, **kwargs): self.raise405() - async def get(self, id): + async def get(self, task_id): """ --- description: > @@ -61,28 +62,35 @@ async def get(self, id): result: {type: string, description: "Task result (report data)"} schedule_id: {type: string, - description: "Schedule id (part of simple response)"} + description: "Schedule id + (part of simple response)"} schedule: type: object - description: "Task schedule (part of expanded response)" + description: "Task schedule + (part of expanded response)" properties: id: {type: string, description: "Unique schedule id"} created_at: {type: integer, - description: "Created timestamp (service field)"} + description: "Created timestamp + (service field)"} crontab: {type: string, description: "Schedule in crontab format"} last_run: {type: integer, - description: "Last job run timestamp (service field)"} + description: "Last job run timestamp + (service field)"} next_run: {type: integer, - description: "Next job run timestamp (service field)"} + description: "Next job run timestamp + (service field)"} report: type: object properties: id: {type: string, description: "Unique report id"} created_at: {type: integer, - description: "Created timestamp (service field)"} + description: + "Created timestamp + (service field)"} name: {type: string, description: "Report name"} module_name: {type: string, @@ -99,9 +107,11 @@ async def get(self, id): id: {type: string, description: "Unique recipient id"} created_at: {type: integer, - description: "Created timestamp (service field)"} + description: "Created timestamp + (service field)"} role_purpose: {type: string, - description: "Role purpose of recipient"} + description: "Role purpose of + recipient"} scope_id: {type: string, description: "Recipient scope id"} user_id: {type: string, @@ -132,10 +142,10 @@ async def get(self, id): expanded = strtobool(expanded) except ValueError: raise OptHTTPError(400, Err.OKA0026, ['Expanded']) - item = await self._get_item(id) + item = await self._get_item(task_id) self.write(item.to_json(expanded)) - async def patch(self, id, **kwargs): + async def patch(self, task_id, **kwargs): """ --- tags: [tasks] @@ -187,7 +197,7 @@ async def patch(self, id, **kwargs): security: - secret: [] """ - await super().patch(id, **kwargs) + await super().patch(task_id, **kwargs) class TaskAsyncCollectionHandler(BaseAsyncCollectionHandler): @@ -266,4 +276,5 @@ async def post(self, **url_params): except UnauthorizedException as ex: raise OptHTTPError.from_opt_exception(401, ex) self.set_status(201) - self.write(json.dumps([item.to_dict() for item in items], cls=ModelEncoder)) + self.write(json.dumps([item.to_dict() for item in items], + cls=ModelEncoder)) diff --git a/katara/katara_service/main.py b/katara/katara_service/main.py index 609165da..0a8852d8 100644 --- a/katara/katara_service/main.py +++ b/katara/katara_service/main.py @@ -35,6 +35,7 @@ def __str__(self): def get_handlers(handler_kwargs): + # pylint: disable=E1101 return [ (urls_v2.tasks, h_v2.tasks.TaskAsyncItemHandler, handler_kwargs), @@ -61,10 +62,12 @@ def setup_api(db, config_cl): "engine": db.engine, "config": config_cl, } + # pylint: disable=E1101 + swagger_handler = [ + (urls_v2.swagger, h_v2.swagger.SwaggerStaticFileHandler, + {'path': SWAGGER_PATH, 'default_filename': 'index.html'})] app = tornado.web.Application( - get_handlers(handler_kwargs) + - [(urls_v2.swagger, h_v2.swagger.SwaggerStaticFileHandler, - {'path': SWAGGER_PATH, 'default_filename': 'index.html'})], + get_handlers(handler_kwargs) + swagger_handler, default_handler_class=h_v2.base.DefaultHandler ) config_cl.tell_everybody_that_i_am_ready() @@ -91,7 +94,8 @@ def make_app(db_type, role, etcd_host, etcd_port, wait=False): Roles.scheduler: setup_scheduler, } setup_func = applications_map.get(role) - config_cl = optscale_client.config_client.client.Client(host=etcd_host, port=etcd_port) + config_cl = optscale_client.config_client.client.Client( + host=etcd_host, port=etcd_port) if wait: config_cl.wait_configured() @@ -113,7 +117,7 @@ def main(): default=role) args = parser.parse_args() - app = make_app(DBType.MySQL, args.role, args.etcdhost, + app = make_app(DBType.MYSQL, args.role, args.etcdhost, args.etcdport, wait=True) if isinstance(app, tornado.ioloop.PeriodicCallback): LOG.info("Starting periodic") diff --git a/katara/katara_service/migrate.py b/katara/katara_service/migrate.py index a583f17e..c094e134 100644 --- a/katara/katara_service/migrate.py +++ b/katara/katara_service/migrate.py @@ -30,7 +30,8 @@ def save(self, host, username, password, db, file_name='alembic.ini'): 'alembic', 'sqlalchemy.url', ConfigTemplate.connection_string % (username, password, host, db)) - with open(os.path.join(self.path, file_name), 'w') as fh: + with open(os.path.join(self.path, file_name), 'w', + encoding='utf-8') as fh: config.write(fh) @@ -47,7 +48,7 @@ def execute(cmd, path='..'): def migrate(args): template = ConfigTemplate() template.save(args.host, args.username, args.password, args.dbname) - cmd = ['alembic', 'revision', '--autogenerate', '-m', '"%s"' % args.name] + cmd = ['alembic', 'revision', '--autogenerate', '-m', f'"{args.name}"'] execute(cmd) diff --git a/katara/katara_service/models/db_base.py b/katara/katara_service/models/db_base.py index 20b4f43e..415d72ad 100644 --- a/katara/katara_service/models/db_base.py +++ b/katara/katara_service/models/db_base.py @@ -4,7 +4,7 @@ import katara.katara_service.models.models as model_base -def should_retry(exception): +def should_retry(_exception): return True diff --git a/katara/katara_service/models/db_factory.py b/katara/katara_service/models/db_factory.py index 777fa0a5..a242cfbb 100644 --- a/katara/katara_service/models/db_factory.py +++ b/katara/katara_service/models/db_factory.py @@ -9,8 +9,8 @@ class DBType(Enum): - Test = "test" - MySQL = "mysql" + TEST = "test" + MYSQL = "mysql" LOG = logging.getLogger(__name__) @@ -18,10 +18,11 @@ class DBType(Enum): class DBFactory: DBS = { - DBType.Test: TestDB, - DBType.MySQL: MySQLDB + DBType.TEST: TestDB, + DBType.MYSQL: MySQLDB } _instances = {} + _db = None @staticmethod def _get_db(db_type, config): diff --git a/katara/katara_service/models/db_mysql.py b/katara/katara_service/models/db_mysql.py index f92b6f3d..1c866e01 100644 --- a/katara/katara_service/models/db_mysql.py +++ b/katara/katara_service/models/db_mysql.py @@ -6,9 +6,10 @@ class MySQLDB(BaseDB): def _get_engine(self): + user, password, host, db = self._config.katara_db_params() return create_engine( - 'mysql+mysqlconnector://%s:%s@%s/%s?charset=utf8mb4' % - self._config.katara_db_params(), + f'mysql+mysqlconnector://{user}:{password}@{host}/{db}' + f'?charset=utf8mb4', # inactive connections are invalidated in ~10 minutes (600 seconds) pool_recycle=500, pool_size=200, diff --git a/katara/katara_service/models/migrator.py b/katara/katara_service/models/migrator.py index 2c98c2f7..7d9b259a 100644 --- a/katara/katara_service/models/migrator.py +++ b/katara/katara_service/models/migrator.py @@ -32,11 +32,11 @@ def __init__(self, engine): self.alembic_env = EnvironmentContext(self.alembic_cfg, self.alembic_script) - def do_upgrade(self, revision, context): + def do_upgrade(self, revision, _context): """ - Do upgrade for alembic coontext + Do upgrade for alembic context :param revision: - :param context: + :param _context: :return: """ return self.alembic_script._upgrade_revs( diff --git a/katara/katara_service/models/models.py b/katara/katara_service/models/models.py index a859101e..9f1128df 100644 --- a/katara/katara_service/models/models.py +++ b/katara/katara_service/models/models.py @@ -1,9 +1,10 @@ -import croniter -from datetime import datetime import enum import json +from datetime import datetime -from sqlalchemy import Column, Integer, String, Enum, TEXT, ForeignKey +import croniter + +from sqlalchemy import Column, Integer, String, Enum, TEXT, ForeignKey, Table from sqlalchemy import inspect, UniqueConstraint, or_ from sqlalchemy.ext.declarative import declarative_base, declared_attr from sqlalchemy.ext.declarative.base import _declarative_constructor @@ -58,6 +59,9 @@ class ColumnPermissions(Enum): class Base(object): + __name__: str + __table__: Table + def __init__(self, **kwargs): init_columns = list(filter(lambda x: x.info.get( PermissionKeys.is_creatable) is True, self.__table__.c)) @@ -67,10 +71,8 @@ def __init__(self, **kwargs): _declarative_constructor(self, **kwargs) @declared_attr - # pylint: disable=E0213 - def __tablename__(cls): - # pylint: disable=E1101 - return cls.__name__.lower() + def __tablename__(self): + return self.__name__.lower() def to_dict(self, expanded=False): mapper = inspect(self).mapper @@ -100,6 +102,7 @@ class BaseModel(object): id = Column(String(36), primary_key=True, default=gen_id) created_at = Column(Integer, default=get_current_timestamp, nullable=False) + __table__: Table @hybrid_property def unique_fields(self): @@ -215,7 +218,8 @@ class Report(Base, BaseModel): class Task(Base, BaseModel): __tablename__ = 'task' - schedule_id = Column(String(36), ForeignKey('schedule.id', ondelete='SET NULL'), + schedule_id = Column(String(36), ForeignKey('schedule.id', + ondelete='SET NULL'), nullable=True, info=ColumnPermissions.create_only) schedule = relationship('Schedule') completed_at = Column(Integer, nullable=True) diff --git a/katara/katara_service/run_test.sh b/katara/katara_service/run_test.sh index 08ffcf9a..f94f00d6 100755 --- a/katara/katara_service/run_test.sh +++ b/katara/katara_service/run_test.sh @@ -6,13 +6,14 @@ TEST_IMAGE=katara_service_tests:${BUILD_TAG} docker build -t ${TEST_IMAGE} --build-arg BUILDTAG=${BUILD_TAG} -f katara/katara_service/Dockerfile_tests . -echo "PEP8 tests>>>" +echo "Pycodestyle tests>>>" docker run -i --rm ${TEST_IMAGE} \ - bash -c "pep8 --max-line-length=120 --ignore=E701 ." -echo "<<>>" -docker run -i --rm ${TEST_IMAGE} bash -c "cd katara/katara_service && ls && pylint --rcfile=.pylintrc ./" +docker run -i --rm ${TEST_IMAGE} \ + bash -c "pylint --rcfile=katara/katara_service/.pylintrc --fail-under=9 --fail-on=E,C,F ./katara" echo "Nose tests>>>" docker run -i --rm ${TEST_IMAGE} \ diff --git a/katara/katara_service/test-requirements.txt b/katara/katara_service/test-requirements.txt index f73424c2..2378cfbe 100644 --- a/katara/katara_service/test-requirements.txt +++ b/katara/katara_service/test-requirements.txt @@ -1,7 +1,7 @@ coverage==4.2 nose==1.3.7 -pep8==1.7.1 -pylint==2.6.0 +pycodestyle==2.11.1 +pylint==3.0.2 freezegun==0.3.8 concurrencytest==0.1.2 diff --git a/katara/katara_service/tests/unittests/test_api_base.py b/katara/katara_service/tests/unittests/test_api_base.py index 4689c82e..bf6a9b02 100644 --- a/katara/katara_service/tests/unittests/test_api_base.py +++ b/katara/katara_service/tests/unittests/test_api_base.py @@ -1,13 +1,20 @@ -import tornado.testing -from unittest.mock import patch import uuid +from unittest.mock import patch + +import tornado.testing + +from optscale_client.katara_client.client import ( + FetchMethodHttpProvider, + Client as KataraClient +) -from katara.katara_service.models.models import * +from katara.katara_service.models.models import ( + Recipient, Report, ReportFormat, Schedule, Task +) from katara.katara_service.main import make_app, Roles from katara.katara_service.models.db_factory import DBType, DBFactory from katara.katara_service.models.db_base import BaseDB - -import optscale_client.katara_client.client +from katara.katara_service.utils import gen_id class TestBase(tornado.testing.AsyncHTTPTestCase): @@ -17,11 +24,11 @@ def __init__(self, *args, **kwargs): self._db_session = None def get_app(self): - return make_app(DBType.Test, Roles.api, '127.0.0.1', 80) + return make_app(DBType.TEST, Roles.api, '127.0.0.1', 80) @property def db_session(self): - db = DBFactory(DBType.Test, None).db + db = DBFactory(DBType.TEST, None).db engine = db.engine if not self._db_session: self._db_session = BaseDB.session(engine)() @@ -32,22 +39,20 @@ def setUp(self): secret = gen_id() patch('optscale_client.config_client.client.Client.cluster_secret', return_value=secret).start() - http_provider = optscale_client.katara_client.client.FetchMethodHttpProvider( - self.fetch, rethrow=False) - self.client = optscale_client.katara_client.client.Client( - http_provider=http_provider) + http_provider = FetchMethodHttpProvider(self.fetch, rethrow=False) + self.client = KataraClient(http_provider=http_provider) self.client.secret = secret def tearDown(self): - DBFactory.clean_type(DBType.Test) + DBFactory.clean_type(DBType.TEST) super().tearDown() def generate_reports(self, count=1): session = self.db_session reports = [] for i in range(count): - report = Report(module_name='module_%s' % i, - name='report_%s' % i, + report = Report(module_name=f'module_{i}', + name=f'report_{i}', report_format=ReportFormat.html) session.add(report) reports.append(report) @@ -57,7 +62,7 @@ def generate_reports(self, count=1): def generate_recipients(self, count=1): session = self.db_session recipients = [] - for i in range(count): + for _ in range(count): recipient = Recipient( role_purpose='optscale_manager', scope_id=str(uuid.uuid4())) diff --git a/katara/katara_service/tests/unittests/test_api_recipient.py b/katara/katara_service/tests/unittests/test_api_recipient.py index 081fe520..837057f3 100644 --- a/katara/katara_service/tests/unittests/test_api_recipient.py +++ b/katara/katara_service/tests/unittests/test_api_recipient.py @@ -5,8 +5,6 @@ class TestRecipientApi(TestBase): - def setUp(self): - super().setUp() def test_recipient_get(self): recipients = self.generate_recipients(1) @@ -15,13 +13,13 @@ def test_recipient_get(self): self.assertEqual(recipients[0].id, recipient['id']) def test_recipient_get_nonexisting(self): - id = str(uuid.uuid4()) - code, _ = self.client.recipient_get(id) + id_ = str(uuid.uuid4()) + code, _ = self.client.recipient_get(id_) self.assertEqual(code, 404) def test_recipient_list(self): - recipients = self.generate_recipients(3) - code, api_recipients = self.client.recipient_list(None) + self.generate_recipients(3) + code, _ = self.client.recipient_list(None) self.assertEqual(code, 400) def test_recipient_list_filtered(self): @@ -54,14 +52,14 @@ def test_recipient_delete(self): self.assertEqual(code, 404) def test_recipient_delete_nonexisting(self): - id = str(uuid.uuid4()) - delete_criteria = [id] + id_ = str(uuid.uuid4()) + delete_criteria = [id_] code, _ = self.client.recipients_delete(recipient_ids=delete_criteria) self.assertEqual(code, 204) def test_recipient_delete_nonlist_param(self): - id = str(uuid.uuid4()) - delete_criteria = id + id_ = str(uuid.uuid4()) + delete_criteria = id_ code, _ = self.client.recipients_delete(recipient_ids=delete_criteria) self.assertEqual(code, 204) @@ -106,7 +104,7 @@ def test_recipient_create_unassigned(self): payload = { "scope_id": str(uuid.uuid4()), } - code, recipient = self.client.recipient_create(**payload) + code, _ = self.client.recipient_create(**payload) self.assertEqual(code, 400) def test_recipient_create_multi_assigned(self): @@ -115,7 +113,7 @@ def test_recipient_create_multi_assigned(self): "user_id": str(uuid.uuid4()), "scope_id": str(uuid.uuid4()), } - code, recipient = self.client.recipient_create(**payload) + code, _ = self.client.recipient_create(**payload) self.assertEqual(code, 400) def test_recipient_create_wrong_porpose(self): @@ -123,7 +121,7 @@ def test_recipient_create_wrong_porpose(self): "role_purpose": 'optscale_slave', "scope_id": str(uuid.uuid4()), } - code, recipient = self.client.recipient_create(**payload) + code, _ = self.client.recipient_create(**payload) self.assertEqual(code, 400) def test_recipient_create_wrong_meta(self): diff --git a/katara/katara_service/tests/unittests/test_api_report.py b/katara/katara_service/tests/unittests/test_api_report.py index 88b6fa13..2f89b7a5 100644 --- a/katara/katara_service/tests/unittests/test_api_report.py +++ b/katara/katara_service/tests/unittests/test_api_report.py @@ -4,8 +4,6 @@ class TestReportApi(TestBase): - def setUp(self): - super().setUp() def test_report_get(self): reports = self.generate_reports(1) @@ -14,8 +12,8 @@ def test_report_get(self): self.assertEqual(reports[0].id, report['id']) def test_report_get_nonexisting(self): - id = str(uuid.uuid4()) - code, _ = self.client.report_get(id) + id_ = str(uuid.uuid4()) + code, _ = self.client.report_get(id_) self.assertEqual(code, 404) def test_report_list(self): diff --git a/katara/katara_service/tests/unittests/test_api_task.py b/katara/katara_service/tests/unittests/test_api_task.py index ed9cac2b..b871ad8b 100644 --- a/katara/katara_service/tests/unittests/test_api_task.py +++ b/katara/katara_service/tests/unittests/test_api_task.py @@ -6,9 +6,11 @@ from katara.katara_service.tests.unittests.test_api_base import TestBase +PUT_TASK = "katara.katara_service.controllers.schedule." \ + "ScheduleController.put_tasks" + + class TestTasktApi(TestBase): - def setUp(self): - super().setUp() def test_task_get(self): tasks = self.generate_tasks(1) @@ -33,8 +35,8 @@ def test_task_get_expanded_no_schedule(self): self.assertEqual(None, task['schedule']) def test_task_get_nonexisting(self): - id = str(uuid.uuid4()) - code, _ = self.client.task_get(id) + id_ = str(uuid.uuid4()) + code, _ = self.client.task_get(id_) self.assertEqual(code, 404) def test_task_update(self): @@ -76,7 +78,7 @@ def test_task_update_restriction(self): tasks[0].id, schedule_id=str(uuid.uuid4())) self.assertEqual(code, 400) - @patch("katara.katara_service.controllers.schedule.ScheduleController.put_tasks") + @patch(PUT_TASK) def test_task_create(self, p_put_tasks): schedules = self.generate_schedules(1) task_payload = { @@ -86,7 +88,7 @@ def test_task_create(self, p_put_tasks): self.assertEqual(code, 201) self.assertEqual(1, p_put_tasks.call_count) - @patch("katara.katara_service.controllers.schedule.ScheduleController.put_tasks") + @patch(PUT_TASK) def test_task_create_several(self, p_put_tasks): schedules = self.generate_schedules(2) task_payload = [ @@ -102,7 +104,7 @@ def test_task_create_several(self, p_put_tasks): self.assertEqual(len(task_payload), len(tasks)) self.assertEqual(1, p_put_tasks.call_count) - @patch("katara.katara_service.controllers.schedule.ScheduleController.put_tasks") + @patch(PUT_TASK) def test_task_create_parent(self, p_put_tasks): schedules = self.generate_schedules(1) task_payload = { @@ -120,17 +122,17 @@ def test_task_create_parent(self, p_put_tasks): self.assertEqual(tasks2[0]['parent_id'], tasks[0]['id']) - @patch("katara.katara_service.controllers.schedule.ScheduleController.put_tasks") - def test_task_create_nonexisting_parent(self, p_put_tasks): + @patch(PUT_TASK) + def test_task_create_nonexisting_parent(self, _p_put_tasks): schedules = self.generate_schedules(1) task_payload = { 'schedule_id': schedules[0].id, 'parent_id': str(uuid.uuid4()) } - code, tasks2 = self.client.tasks_create([task_payload]) + code, _ = self.client.tasks_create([task_payload]) self.assertEqual(code, 404) - @patch("katara.katara_service.controllers.schedule.ScheduleController.put_tasks") + @patch(PUT_TASK) def test_task_create_nonexisting_schedule(self, p_put_tasks): task_payload = { 'schedule_id': str(uuid.uuid4()) @@ -139,8 +141,8 @@ def test_task_create_nonexisting_schedule(self, p_put_tasks): self.assertEqual(code, 404) self.assertEqual(0, p_put_tasks.call_count) - @patch("katara.katara_service.controllers.schedule.ScheduleController.put_tasks") - def test_task_create_completed(self, p_put_tasks): + @patch(PUT_TASK) + def test_task_create_completed(self, _p_put_tasks): schedules = self.generate_schedules(1) task_payload = { 'schedule_id': schedules[0].id, diff --git a/katara/katara_service/tests/unittests/test_controller_base.py b/katara/katara_service/tests/unittests/test_controller_base.py index 676e893b..a19586de 100644 --- a/katara/katara_service/tests/unittests/test_controller_base.py +++ b/katara/katara_service/tests/unittests/test_controller_base.py @@ -12,7 +12,7 @@ def __init__(self, *args, **kwargs): @property def db_session(self): - db = DBFactory(DBType.Test, None).db + db = DBFactory(DBType.TEST, None).db engine = db.engine if not self._db_session: self._db_session = BaseDB.session(engine)() @@ -20,9 +20,9 @@ def db_session(self): def setUp(self): super().setUp() - self.db = DBFactory(DBType.Test, None).db + self.db = DBFactory(DBType.TEST, None).db self.db.create_all() def tearDown(self): - DBFactory.clean_type(DBType.Test) + DBFactory.clean_type(DBType.TEST) super().tearDown() diff --git a/katara/katara_service/tests/unittests/test_schedule_api.py b/katara/katara_service/tests/unittests/test_schedule_api.py index c0215267..649f7dd4 100644 --- a/katara/katara_service/tests/unittests/test_schedule_api.py +++ b/katara/katara_service/tests/unittests/test_schedule_api.py @@ -4,8 +4,6 @@ class TestScheduleApi(TestBase): - def setUp(self): - super().setUp() def test_schedule_get(self): schedules = self.generate_schedules(1) @@ -14,8 +12,8 @@ def test_schedule_get(self): self.assertEqual(schedules[0].id, schedule['id']) def test_schedule_get_nonexisting(self): - id = str(uuid.uuid4()) - code, _ = self.client.schedule_get(id) + id_ = str(uuid.uuid4()) + code, _ = self.client.schedule_get(id_) self.assertEqual(code, 404) def test_schedule_delete(self): @@ -95,7 +93,7 @@ def test_schedule_list_nonexisting_recipient(self): report = self.generate_reports(1)[0] recipient = self.generate_recipients(1)[0] crontab = '*/2 * * * *' - _, schedule = self.client.schedule_create( + self.client.schedule_create( crontab=crontab, recipient_id=recipient.id, report_id=report.id) code, api_recipients = self.client.schedule_list( recipient_id=str(uuid.uuid4())) @@ -109,10 +107,12 @@ def test_schedule_list_by_report(self): schedules = [] for i in range(2): _, schedule = self.client.schedule_create( - crontab=crontab, recipient_id=recipients[i].id, report_id=reports[i].id) + crontab=crontab, recipient_id=recipients[i].id, + report_id=reports[i].id) schedules.append(schedule) code, api_recipients = self.client.schedule_list( recipient_id=recipients[1].id, report_id=reports[1].id) self.assertEqual(code, 200) self.assertEqual(len(api_recipients['schedules']), 1) - self.assertEqual(api_recipients['schedules'][0]['id'], schedules[1]['id']) + self.assertEqual(api_recipients['schedules'][0]['id'], + schedules[1]['id']) diff --git a/katara/katara_service/tests/unittests/test_scheduler.py b/katara/katara_service/tests/unittests/test_scheduler.py index 485c1a64..4e918e2c 100644 --- a/katara/katara_service/tests/unittests/test_scheduler.py +++ b/katara/katara_service/tests/unittests/test_scheduler.py @@ -1,16 +1,20 @@ -from freezegun import freeze_time +from datetime import datetime from unittest.mock import patch +from freezegun import freeze_time - -from katara.katara_service.models.models import * +from katara.katara_service.models.models import ( + Recipient, Report, ReportFormat, Schedule) from katara.katara_service.controllers.schedule import ScheduleController -from katara.katara_service.tests.unittests.test_controller_base import TestControllerBase +from katara.katara_service.tests.unittests.test_controller_base import ( + TestControllerBase +) +BULK_SIZE = "katara.katara_service.controllers.schedule.BULK_SIZE" +PUT_TASK = "katara.katara_service.controllers.schedule." \ + "ScheduleController.put_tasks" -class TestScheduler(TestControllerBase): - def setUp(self): - super().setUp() +class TestScheduler(TestControllerBase): def generate_schedules(self, schedules_count): reports = [] @@ -23,8 +27,8 @@ def generate_schedules(self, schedules_count): for i in range(schedules_count): report = Report( - module_name='some_module_%s' % i, - name='test report_%s' % i, + module_name=f'some_module_{i}', + name=f'test report_{i}', report_format=ReportFormat.html) self.db_session.add(report) reports.append(report) @@ -40,30 +44,30 @@ def generate_schedules(self, schedules_count): self.db_session.commit() return schedules - @patch("katara.katara_service.controllers.schedule.ScheduleController.put_tasks") + @patch(PUT_TASK) def test_no_schedules(self, p_put_tasks): self.generate_schedules(0) controller = ScheduleController(db_session=self.db_session) controller.generate_tasks() self.assertEqual(0, p_put_tasks.call_count) - @patch("katara.katara_service.controllers.schedule.ScheduleController.put_tasks") + @patch(PUT_TASK) def test_below_bulk_schedules(self, p_put_tasks): self.generate_schedules(2) controller = ScheduleController(db_session=self.db_session) controller.generate_tasks() self.assertEqual(1, p_put_tasks.call_count) - @patch("katara.katara_service.controllers.schedule.ScheduleController.put_tasks") - @patch("katara.katara_service.controllers.schedule.BULK_SIZE", 4) + @patch(PUT_TASK) + @patch(BULK_SIZE, 4) def test_upper_bulk_schedules_1(self, p_put_tasks): self.generate_schedules(5) controller = ScheduleController(db_session=self.db_session) controller.generate_tasks() self.assertEqual(2, p_put_tasks.call_count) - @patch("katara.katara_service.controllers.schedule.ScheduleController.put_tasks") - @patch("katara.katara_service.controllers.schedule.BULK_SIZE", 2) + @patch(PUT_TASK) + @patch(BULK_SIZE, 2) def test_upper_bulk_schedules_2(self, p_put_tasks): self.generate_schedules(51) controller = ScheduleController(db_session=self.db_session) diff --git a/katara/katara_service/urls.py b/katara/katara_service/urls.py index 9d671ead..533e8bdb 100644 --- a/katara/katara_service/urls.py +++ b/katara/katara_service/urls.py @@ -2,13 +2,13 @@ class Urls: url_prefix = '/katara' urls_map = { - 'tasks': r"%s/tasks/(?P[^/]+)", + 'tasks': r"%s/tasks/(?P[^/]+)", 'tasks_collection': r"%s/tasks", - 'recipients': r"%s/recipients/(?P[^/]+)", + 'recipients': r"%s/recipients/(?P[^/]+)", 'recipients_collection': r"%s/recipients", - 'reports': r"%s/reports/(?P[^/]+)", + 'reports': r"%s/reports/(?P[^/]+)", 'reports_collection': r"%s/reports", - 'schedules': r"%s/schedules/(?P[^/]+)", + 'schedules': r"%s/schedules/(?P[^/]+)", 'schedules_collection': r"%s/schedules", 'swagger': r'%s/swagger/(.*)' } diff --git a/katara/katara_service/write_spec.py b/katara/katara_service/write_spec.py index 67dbce54..df27dd63 100644 --- a/katara/katara_service/write_spec.py +++ b/katara/katara_service/write_spec.py @@ -1,9 +1,7 @@ -import yaml -import os.path import re -import json +import os.path +import yaml from apispec import APISpec, utils -from tornado.template import Template import katara.katara_service.main as server @@ -57,7 +55,8 @@ def main(): print("Warning: docstrings for '" + urlspec[0] + "' are not found") # Api spec file - with open(os.path.join(server.SWAGGER_PATH, "spec.yaml"), "w") as file: + with open(os.path.join(server.SWAGGER_PATH, "spec.yaml"), "w", + encoding='utf-8') as file: file.write(spec.to_yaml()) diff --git a/katara/katara_worker/main.py b/katara/katara_worker/main.py index 1d170bb2..d29ec12a 100644 --- a/katara/katara_worker/main.py +++ b/katara/katara_worker/main.py @@ -1,7 +1,7 @@ #!/usr/bin/env python import os import requests -from requests.packages.urllib3.exceptions import InsecureRequestWarning +import urllib3 from kombu import Connection, Exchange, Queue from kombu.log import get_logger from kombu.mixins import ConsumerProducerMixin @@ -51,13 +51,13 @@ def get_consumers(self, Consumer, channel): callbacks=[self.process_task])] def put_herald_task(self, task_params): - task_exchange = Exchange(type='direct') + exchange = Exchange(type='direct') with producers[self.connection].acquire(block=True) as producer: producer.publish( task_params, serializer='json', - exchange=task_exchange, - declare=[task_exchange], + exchange=exchange, + declare=[exchange], routing_key=self.herald_routing_key, retry=True ) @@ -77,7 +77,8 @@ def process_task(self, body, message): _, katara_task = self.katara_cl.task_get(body['task_id']) task = TASKS_TRANSITIONS[katara_task['state']] except Exception as ex: - LOG.exception('Failed to get task %s: %s', body['task_id'], str(ex)) + LOG.exception('Failed to get task %s: %s', + body['task_id'], str(ex)) message.ack() return @@ -91,7 +92,8 @@ def process_task(self, body, message): if __name__ == '__main__': - requests.packages.urllib3.disable_warnings(InsecureRequestWarning) + urllib3.disable_warnings( + category=urllib3.exceptions.InsecureRequestWarning) setup_logging(loglevel='INFO', loggers=['']) config_cl = ConfigClient( @@ -99,8 +101,9 @@ def process_task(self, body, message): port=int(os.environ.get('HX_ETCD_PORT', DEFAULT_ETCD_PORT)), ) config_cl.wait_configured() - conn_str = 'amqp://{user}:{pass}@{host}:{port}'.format( - **config_cl.read_branch('/rabbit')) + params = config_cl.read_branch('/rabbit') + conn_str = f'amqp://{params["user"]}:{params["pass"]}@' \ + f'{params["host"]}:{params["port"]}' with Connection(conn_str) as conn: try: worker = Worker(conn, config_cl) diff --git a/katara/katara_worker/reports_generators/organization_expenses.py b/katara/katara_worker/reports_generators/organization_expenses.py index 1c42ca88..5e030dcd 100644 --- a/katara/katara_worker/reports_generators/organization_expenses.py +++ b/katara/katara_worker/reports_generators/organization_expenses.py @@ -52,10 +52,8 @@ def generate(self): self.organization_id, start, end, {'pool_id': self.get_nil_uuid()}) - st_dt_string = '{day}/{month}/{year}'.format( - day=start_date.day, month=start_date.month, year=start_date.year) - e_dt_string = '{day}/{month}/{year}'.format( - day=today.day, month=today.month, year=today.year) + st_dt_string = f'{start_date.day}/{start_date.month}/{start_date.year}' + e_dt_string = f'{today.day}/{today.month}/{today.year}' return { 'email': [self.report_data['user_email']], 'template_type': 'weekly_expense_report', @@ -81,7 +79,8 @@ def generate(self): 'total_cost': round(organization_total_cost, 2), 'forecast': round(organization_forecast, 2), 'id': self.organization_id, - 'currency_code': self.get_currency_code(org['currency']) + 'currency_code': self.get_currency_code( + org['currency']) } } } @@ -97,4 +96,5 @@ def get_monthly_forecast(cost): def main(organization_id, report_data, config_client): - return OrganizationExpenses(organization_id, report_data, config_client).generate() + return OrganizationExpenses( + organization_id, report_data, config_client).generate() diff --git a/katara/katara_worker/reports_generators/pool_limit_exceed.py b/katara/katara_worker/reports_generators/pool_limit_exceed.py index b0983921..60a47f78 100644 --- a/katara/katara_worker/reports_generators/pool_limit_exceed.py +++ b/katara/katara_worker/reports_generators/pool_limit_exceed.py @@ -35,13 +35,15 @@ def generate(self): return { 'email': [self.report_data['user_email']], 'template_type': 'pool_exceed_report', - 'subject': 'Action Required: Hystax OptScale Pool Limit Exceed Alert', + 'subject': 'Action Required: Hystax OptScale Pool Limit ' + 'Exceed Alert', 'template_params': { 'texts': { 'organization': { 'id': organization['id'], 'name': organization['name'], - 'currency_code': self.get_currency_code(organization['currency']) + 'currency_code': self.get_currency_code( + organization['currency']) }, 'user': self.report_data, 'exceeded': exceeded, diff --git a/katara/katara_worker/reports_generators/pool_limit_exceed_resources.py b/katara/katara_worker/reports_generators/pool_limit_exceed_resources.py index c2b2082a..1b9c856b 100644 --- a/katara/katara_worker/reports_generators/pool_limit_exceed_resources.py +++ b/katara/katara_worker/reports_generators/pool_limit_exceed_resources.py @@ -52,12 +52,14 @@ def generate(self): return { 'email': [self.report_data['user_email']], 'template_type': 'pool_exceed_resources_report', - 'subject': 'Action Required: Hystax OptScale Pool Limit Exceed Alert', + 'subject': 'Action Required: Hystax OptScale Pool Limit ' + 'Exceed Alert', 'template_params': { 'texts': { 'organization': { 'name': organization['name'], - 'currency_code': self.get_currency_code(organization['currency']) + 'currency_code': self.get_currency_code( + organization['currency']) }, 'user': self.report_data, 'exceeded_pools': exceeded_pools, diff --git a/katara/katara_worker/reports_generators/report.py b/katara/katara_worker/reports_generators/report.py index b427f2ea..3e6d3228 100644 --- a/katara/katara_worker/reports_generators/report.py +++ b/katara/katara_worker/reports_generators/report.py @@ -12,7 +12,7 @@ def create_report(module_name, organization_id, report_data, config_client): pythonpath = environ['PYTHONPATH'].split(pathsep)[0] import_base = dirname(__file__)[len(pythonpath):] import_base = import_base.replace('/', '.') - module = __import__('%s.%s' % (import_base, module_name), + module = __import__(f'{import_base}.{module_name}', globals(), locals(), modules, 0) # main function is entrypoint so each module MUST have it return module.main( diff --git a/katara/katara_worker/run_test.sh b/katara/katara_worker/run_test.sh index 2305e0b5..c2f3f4f5 100755 --- a/katara/katara_worker/run_test.sh +++ b/katara/katara_worker/run_test.sh @@ -6,13 +6,14 @@ TEST_IMAGE=katara_worker_tests:${BUILD_TAG} docker build -t ${TEST_IMAGE} --build-arg BUILDTAG=${BUILD_TAG} -f katara/katara_worker/Dockerfile_tests . -echo "PEP8 tests>>>" +echo "Pycodestyle tests>>>" docker run -i --rm ${TEST_IMAGE} \ - bash -c "pep8 --max-line-length=120 --ignore=E701 ." -echo "<<>>" -docker run -i --rm ${TEST_IMAGE} bash -c "cd katara/katara_worker && ls && pylint --rcfile=.pylintrc ./" +docker run -i --rm ${TEST_IMAGE} \ + bash -c "pylint --rcfile=katara/katara_worker/.pylintrc --fail-under=9 --fail-on=E,C,F ./katara" echo "Nose tests>>>" docker run -i --rm ${TEST_IMAGE} \ diff --git a/katara/katara_worker/tasks.py b/katara/katara_worker/tasks.py index f63e25d9..e98f4dd1 100644 --- a/katara/katara_worker/tasks.py +++ b/katara/katara_worker/tasks.py @@ -70,8 +70,7 @@ def s3_client(self): s3_params = self.config_cl.read_branch('/minio') self._s3_client = boto3.client( 's3', - endpoint_url='http://{}:{}'.format( - s3_params['host'], s3_params['port']), + endpoint_url=f"http://{s3_params['host']}:{s3_params['port']}", aws_access_key_id=s3_params['access'], aws_secret_access_key=s3_params['secret'], config=BotoConfig(s3={'addressing_style': 'path'}) @@ -303,10 +302,8 @@ def execute(self): on_continue_cb=self.on_continue_cb, on_complete_cb=self.on_complete_cb).execute() return - report_name = 'task_%s_%s' % ( - self.body['task_id'], - int(datetime.datetime.utcnow().timestamp())) - with open(report_name, 'w') as outfile: + report_name = f"task_{self.body['task_id']}_{self.body['task_id']}" + with open(report_name, 'w', encoding='utf-8') as outfile: json.dump(report_data, outfile) try: with open(report_name, 'rb') as f_report: diff --git a/katara/katara_worker/test-requirements.txt b/katara/katara_worker/test-requirements.txt index c75a21f2..1eed8fbc 100644 --- a/katara/katara_worker/test-requirements.txt +++ b/katara/katara_worker/test-requirements.txt @@ -1,7 +1,7 @@ coverage==4.2 nose==1.3.7 -pep8==1.7.1 -pylint==2.6.0 +pycodestyle==2.11.1 +pylint==3.0.2 freezegun==0.3.8 concurrencytest==0.1.2 diff --git a/rest_api/rest_api_server/controllers/auth_hierarchy.py b/rest_api/rest_api_server/controllers/auth_hierarchy.py index 13c7f1d8..8b58d09c 100644 --- a/rest_api/rest_api_server/controllers/auth_hierarchy.py +++ b/rest_api/rest_api_server/controllers/auth_hierarchy.py @@ -78,6 +78,9 @@ def auth_hierarchy(self, type=None, scope_id=None): } return result_scope.get(type)(type) + def on_finish(self): + pass + class AuthHierarchyAsyncController(BaseAsyncControllerWrapper): def _get_controller_class(self): diff --git a/rest_api/rest_api_server/controllers/base.py b/rest_api/rest_api_server/controllers/base.py index 27693738..be6c8246 100644 --- a/rest_api/rest_api_server/controllers/base.py +++ b/rest_api/rest_api_server/controllers/base.py @@ -328,6 +328,12 @@ def model_column_list(self): return list(map(lambda x: str(x.name), self.model_type.__table__.columns)) + def on_finish(self): + if getattr(self, '_clickhouse_client', None) is not None: + self._clickhouse_client.disconnect() + if getattr(self, '_mongo_client', None) is not None: + self._mongo_client.close() + def _get_model_type(self): raise NotImplementedError diff --git a/rest_api/rest_api_server/controllers/cloud_account.py b/rest_api/rest_api_server/controllers/cloud_account.py index fcc5bca2..780c0c04 100644 --- a/rest_api/rest_api_server/controllers/cloud_account.py +++ b/rest_api/rest_api_server/controllers/cloud_account.py @@ -1,4 +1,3 @@ -import json import logging import re from datetime import datetime, timedelta @@ -53,7 +52,7 @@ check_bool_attribute, check_dict_attribute, check_float_attribute, check_int_attribute, check_string, check_string_attribute, raise_invalid_argument_exception, raise_not_provided_exception, - CURRENCY_MAP, encode_config, decode_config, is_valid_meta) + CURRENCY_MAP, encode_config, decode_config) LOG = logging.getLogger(__name__) diff --git a/rest_api/rest_api_server/controllers/context.py b/rest_api/rest_api_server/controllers/context.py index 9863e6f2..cbf618c7 100644 --- a/rest_api/rest_api_server/controllers/context.py +++ b/rest_api/rest_api_server/controllers/context.py @@ -210,6 +210,10 @@ def get_info_for_resource(self, resource): Pool.id == resource['pool_id']) return type_name, query.one_or_none() + def on_finish(self): + if self._mongo_client is not None: + self._mongo_client.close() + class ContextAsyncController(BaseAsyncControllerWrapper): diff --git a/rest_api/rest_api_server/controllers/discovery_info.py b/rest_api/rest_api_server/controllers/discovery_info.py index 33f08440..707a4183 100644 --- a/rest_api/rest_api_server/controllers/discovery_info.py +++ b/rest_api/rest_api_server/controllers/discovery_info.py @@ -1,6 +1,6 @@ import logging -from sqlalchemy.sql import and_, exists +from sqlalchemy.sql import and_ from tools.cloud_adapter.model import ResourceTypes, RES_MODEL_MAP from rest_api.rest_api_server.controllers.base import BaseController diff --git a/rest_api/rest_api_server/controllers/discovery_info_bulk.py b/rest_api/rest_api_server/controllers/discovery_info_bulk.py index 97e9426d..70a6825f 100644 --- a/rest_api/rest_api_server/controllers/discovery_info_bulk.py +++ b/rest_api/rest_api_server/controllers/discovery_info_bulk.py @@ -6,7 +6,6 @@ from rest_api.rest_api_server.controllers.base_async import BaseAsyncControllerWrapper from rest_api.rest_api_server.exceptions import Err from rest_api.rest_api_server.models.models import DiscoveryInfo -from rest_api.rest_api_server.utils import check_list_attribute from sqlalchemy.exc import IntegrityError from sqlalchemy.sql import and_, exists diff --git a/rest_api/rest_api_server/controllers/invite.py b/rest_api/rest_api_server/controllers/invite.py index a99f173d..75bcd051 100644 --- a/rest_api/rest_api_server/controllers/invite.py +++ b/rest_api/rest_api_server/controllers/invite.py @@ -1,10 +1,7 @@ import datetime -import hashlib import json import logging -import random import requests -import string import uuid from etcd import EtcdKeyNotFound diff --git a/rest_api/rest_api_server/controllers/limit_hit.py b/rest_api/rest_api_server/controllers/limit_hit.py index c1f642ff..c97542c8 100644 --- a/rest_api/rest_api_server/controllers/limit_hit.py +++ b/rest_api/rest_api_server/controllers/limit_hit.py @@ -1,6 +1,4 @@ -from datetime import datetime, timedelta -from kombu import Connection as QConnection, Exchange -from kombu.pools import producers +from datetime import datetime import logging from sqlalchemy import and_ @@ -12,7 +10,6 @@ from rest_api.rest_api_server.controllers.base_async import BaseAsyncControllerWrapper from rest_api.rest_api_server.controllers.pool_policy import PoolPolicyController from rest_api.rest_api_server.controllers.cloud_resource import CloudResourceController -from rest_api.rest_api_server.controllers.expense import ExpenseController from rest_api.rest_api_server.controllers.resource_constraint import ( ResourceConstraintController) from rest_api.rest_api_server.controllers.pool_alert import PoolAlertController @@ -21,7 +18,7 @@ ThresholdBasedTypes) from rest_api.rest_api_server.models.models import ( ConstraintLimitHit, Pool, Employee, Organization, - CloudAccount, RolePurposes) + CloudAccount) from tools.optscale_exceptions.common_exc import NotFoundException diff --git a/rest_api/rest_api_server/controllers/live_demo.py b/rest_api/rest_api_server/controllers/live_demo.py index d7d6e643..550728c6 100644 --- a/rest_api/rest_api_server/controllers/live_demo.py +++ b/rest_api/rest_api_server/controllers/live_demo.py @@ -14,12 +14,11 @@ from optscale_client.config_client.client import etcd from datetime import datetime, timedelta from sqlalchemy import and_, true -from clickhouse_driver import Client as ClickHouseClient from tools.cloud_adapter.model import ResourceTypes from tools.optscale_exceptions.common_exc import InternalServerError from rest_api.rest_api_server.controllers.base import ( - BaseController, MongoMixin, BaseProfilingTokenController) + BaseController, MongoMixin, BaseProfilingTokenController, ClickHouseMixin) from rest_api.rest_api_server.controllers.base_async import ( BaseAsyncControllerWrapper) from rest_api.rest_api_server.controllers.register import RegisterController @@ -112,10 +111,9 @@ def rest_objects(cls): if name not in {'AuthUsers', 'PoolRelations'}) -class LiveDemoController(BaseController, MongoMixin): +class LiveDemoController(BaseController, MongoMixin, ClickHouseMixin): def __init__(self, db_session, config=None, token=None, engine=None): super().__init__(db_session, config, token, engine) - self._clickhouse_cl = None self._recovery_map = {} self._build_obj_map = { ObjectGroups.CloudAccounts: self.build_cloud_account, @@ -179,13 +177,13 @@ def __init__(self, db_session, config=None, token=None, engine=None): ObjectGroups.Runners: self.runners_collection } - self._third_party_objects = [ - ObjectGroups.Metrics, - ObjectGroups.K8sMetrics, - ObjectGroups.CleanExpenses, - ObjectGroups.TrafficExpenses, - ObjectGroups.RiSpUsages - ] + self._clickhouse_table_map = { + ObjectGroups.Metrics: 'average_metrics', + ObjectGroups.K8sMetrics: 'k8s_metrics', + ObjectGroups.CleanExpenses: 'expenses', + ObjectGroups.TrafficExpenses: 'traffic_expenses', + ObjectGroups.RiSpUsages: 'ri_sp_usage' + } self._key_object_group_map = { 'pool_id': ObjectGroups.Pools.value, 'employee_id': ObjectGroups.Employees.value, @@ -269,14 +267,6 @@ def runsets_collection(self): def runners_collection(self): return self.mongo_client.bulldozer.runner - @property - def clickhouse_cl(self): - if not self._clickhouse_cl: - user, password, host, db_name = self._config.clickhouse_params() - self._clickhouse_cl = ClickHouseClient( - host=host, password=password, database=db_name, user=user) - return self._clickhouse_cl - def _get_demo_multiplier(self): try: multiplier = int( @@ -626,10 +616,8 @@ def build_resource(self, obj, objects_group, now, organization_id, **kwargs): obj['total_cost'] = obj.get('total_cost', 0) * self.multiplier return obj - def build_raw_expense(self, obj, objects_group, now, **kwargs): - obj_id = ObjectId() - self._recovery_map[objects_group.value][obj.pop('_id')] = obj_id - obj['_id'] = obj_id + def build_raw_expense(self, obj, now, **kwargs): + obj['_id'] = ObjectId() obj['cost'] = obj['cost'] * self.multiplier obj = self.offsets_to_datetimes(['end_date', 'start_date'], now, obj) obj = self.refresh_relations(['cloud_account_id'], obj) @@ -834,8 +822,8 @@ def build_organization_constraint(self, obj, objects_group, now, self._org_constraint_type_map[new_id] = obj['type'] return OrganizationConstraint(**obj) - def build_organization_limit_hit(self, obj, objects_group, now, - organization_id, **kwargs): + def build_organization_limit_hit(self, obj, now, organization_id, + **kwargs): obj = self.offsets_to_timestamps(['created_at'], now, obj) obj = self.dict_key_offsets_to_timestamps( ['run_result.breakdown'], now, obj) @@ -1104,15 +1092,8 @@ def fill_organization( else: obj_ids = dest.insert_many(bulk).inserted_ids insertions_map[group].extend(obj_ids) - elif res and group in self._third_party_objects: - obj_clickhouse_table_map = { - ObjectGroups.Metrics: 'average_metrics', - ObjectGroups.K8sMetrics: 'k8s_metrics', - ObjectGroups.CleanExpenses: 'expenses', - ObjectGroups.TrafficExpenses: 'traffic_expenses', - ObjectGroups.RiSpUsages: 'ri_sp_usage' - } - table = obj_clickhouse_table_map.get(group) + elif res and group in self._clickhouse_table_map: + table = self._clickhouse_table_map.get(group) if not table: continue for i in range(0, len(res), CLICKHOUSE_BULK_SIZE): @@ -1144,14 +1125,14 @@ def fill_organization( def _insert_clickhouse(self, table, bulk): db = CLICKHOUSE_TABLE_DB_MAP[table] - return self.clickhouse_cl.execute( + return self.clickhouse_client.execute( f'INSERT INTO {db}.{table} VALUES', bulk) def delete_clickhouse_info(self, cloud_accounts): cloud_account_ids = list(map(lambda x: x.id, cloud_accounts)) for table in CLICKHOUSE_TABLE_DB_MAP: db = CLICKHOUSE_TABLE_DB_MAP[table] - self.clickhouse_cl.execute( + self.clickhouse_client.execute( f'ALTER TABLE {db}.{table} DELETE ' f'WHERE cloud_account_id in {cloud_account_ids}') diff --git a/rest_api/rest_api_server/controllers/pool.py b/rest_api/rest_api_server/controllers/pool.py index b88d4669..e4d7176f 100644 --- a/rest_api/rest_api_server/controllers/pool.py +++ b/rest_api/rest_api_server/controllers/pool.py @@ -16,7 +16,7 @@ from rest_api.rest_api_server.models.models import ( Pool, Organization, Employee, Checklist, CloudAccount, Rule, AssignmentRequest, PoolAlert, InviteAssignment, PoolPolicy, - PoolExpensesExport, OrganizationConstraint, OrganizationLimitHit) + PoolExpensesExport) from rest_api.rest_api_server.controllers.base import ( BaseController, MongoMixin, BaseHierarchicalController) from rest_api.rest_api_server.controllers.base_async import BaseAsyncControllerWrapper diff --git a/rest_api/rest_api_server/controllers/pool_alert.py b/rest_api/rest_api_server/controllers/pool_alert.py index 6699f6cc..11681278 100644 --- a/rest_api/rest_api_server/controllers/pool_alert.py +++ b/rest_api/rest_api_server/controllers/pool_alert.py @@ -2,19 +2,15 @@ import uuid import datetime -import requests from sqlalchemy.exc import IntegrityError from sqlalchemy.sql import and_ -from optscale_client.herald_client.client_v2 import Client as HeraldClient -from optscale_client.auth_client.client_v2 import Client as AuthClient from tools.optscale_exceptions.common_exc import (NotFoundException, - WrongArgumentsException, - HeraldException) + WrongArgumentsException) from rest_api.rest_api_server.controllers.pool import PoolController from rest_api.rest_api_server.exceptions import Err from rest_api.rest_api_server.models.enums import ThresholdTypes, ThresholdBasedTypes from rest_api.rest_api_server.models.models import (PoolAlert, Pool, AlertContact, - Organization, Employee) + Organization) from rest_api.rest_api_server.controllers.base import (BaseController, BaseHierarchicalController) from rest_api.rest_api_server.controllers.employee import EmployeeController diff --git a/rest_api/rest_api_server/controllers/pool_expense.py b/rest_api/rest_api_server/controllers/pool_expense.py index 83d71fd1..7ed58e8b 100644 --- a/rest_api/rest_api_server/controllers/pool_expense.py +++ b/rest_api/rest_api_server/controllers/pool_expense.py @@ -9,7 +9,7 @@ from rest_api.rest_api_server.controllers.base import (BaseController, BaseHierarchicalController) from rest_api.rest_api_server.exceptions import Err -from rest_api.rest_api_server.models.models import Organization, CloudAccount, Pool +from rest_api.rest_api_server.models.models import Organization, Pool from rest_api.rest_api_server.utils import get_nil_uuid diff --git a/rest_api/rest_api_server/controllers/pool_policy.py b/rest_api/rest_api_server/controllers/pool_policy.py index 34e27ff8..721ecaa3 100644 --- a/rest_api/rest_api_server/controllers/pool_policy.py +++ b/rest_api/rest_api_server/controllers/pool_policy.py @@ -7,7 +7,7 @@ from tools.optscale_exceptions.common_exc import ConflictException from rest_api.rest_api_server.models.enums import ConstraintTypes from rest_api.rest_api_server.models.models import ( - PoolPolicy, Pool, ResourceConstraint, Organization) + PoolPolicy, Pool, ResourceConstraint) from rest_api.rest_api_server.controllers.base_async import BaseAsyncControllerWrapper from rest_api.rest_api_server.controllers.constraint_base import ConstraintBaseController from rest_api.rest_api_server.utils import check_int_attribute diff --git a/rest_api/rest_api_server/controllers/relevant_flavor.py b/rest_api/rest_api_server/controllers/relevant_flavor.py index c906cfb3..76b9c111 100644 --- a/rest_api/rest_api_server/controllers/relevant_flavor.py +++ b/rest_api/rest_api_server/controllers/relevant_flavor.py @@ -2,9 +2,8 @@ import requests import operator from collections import defaultdict -from tools.optscale_exceptions.common_exc import (NotFoundException, - WrongArgumentsException) -from rest_api.rest_api_server.controllers.base import BaseController, MongoMixin +from tools.optscale_exceptions.common_exc import (NotFoundException) +from rest_api.rest_api_server.controllers.base import BaseController from rest_api.rest_api_server.controllers.base_async import BaseAsyncControllerWrapper from concurrent.futures import ThreadPoolExecutor from optscale_client.insider_client.client import Client as InsiderClient diff --git a/rest_api/rest_api_server/controllers/report_import.py b/rest_api/rest_api_server/controllers/report_import.py index 178be768..a0fbe5bf 100644 --- a/rest_api/rest_api_server/controllers/report_import.py +++ b/rest_api/rest_api_server/controllers/report_import.py @@ -1,4 +1,3 @@ -import json import logging import uuid from sqlalchemy import and_, true, or_, exists diff --git a/rest_api/rest_api_server/controllers/resource.py b/rest_api/rest_api_server/controllers/resource.py index de0ac704..5dc65b78 100644 --- a/rest_api/rest_api_server/controllers/resource.py +++ b/rest_api/rest_api_server/controllers/resource.py @@ -62,6 +62,9 @@ def resources_get(self, **kwargs): str(ex)) return {} + def on_finish(self): + pass + class ResourceAsyncController(BaseAsyncControllerWrapper): def _get_controller_class(self): diff --git a/rest_api/rest_api_server/controllers/resource_constraint.py b/rest_api/rest_api_server/controllers/resource_constraint.py index 2383d266..34a26585 100644 --- a/rest_api/rest_api_server/controllers/resource_constraint.py +++ b/rest_api/rest_api_server/controllers/resource_constraint.py @@ -1,6 +1,5 @@ import logging -from datetime import datetime, time, timedelta -from collections import defaultdict +from datetime import datetime from sqlalchemy.sql import and_ from rest_api.rest_api_server.exceptions import Err diff --git a/rest_api/rest_api_server/controllers/rule_apply.py b/rest_api/rest_api_server/controllers/rule_apply.py index d20fed82..c0963518 100644 --- a/rest_api/rest_api_server/controllers/rule_apply.py +++ b/rest_api/rest_api_server/controllers/rule_apply.py @@ -2,7 +2,7 @@ import logging from copy import copy -from pymongo import UpdateOne, UpdateMany +from pymongo import UpdateOne from sqlalchemy import and_ from tools.optscale_exceptions.common_exc import NotFoundException diff --git a/rest_api/rest_api_server/controllers/shareable_resource_bulk.py b/rest_api/rest_api_server/controllers/shareable_resource_bulk.py index d1e93f68..50ce24f6 100644 --- a/rest_api/rest_api_server/controllers/shareable_resource_bulk.py +++ b/rest_api/rest_api_server/controllers/shareable_resource_bulk.py @@ -1,10 +1,9 @@ from rest_api.rest_api_server.controllers.base_async import BaseAsyncControllerWrapper from rest_api.rest_api_server.controllers.shareable_resource import ShareableBookingController -from rest_api.rest_api_server.controllers.base import MongoMixin from rest_api.rest_api_server.exceptions import Err -class ShareableResourceBulkController(ShareableBookingController, MongoMixin): +class ShareableResourceBulkController(ShareableBookingController): def _sharing_failed_response(self, not_shared_ids, invalid_ids=None, not_active_ids=None): diff --git a/rest_api/rest_api_server/controllers/webhook.py b/rest_api/rest_api_server/controllers/webhook.py index 805b0e33..c59ac840 100644 --- a/rest_api/rest_api_server/controllers/webhook.py +++ b/rest_api/rest_api_server/controllers/webhook.py @@ -3,7 +3,6 @@ from tools.optscale_exceptions.common_exc import NotFoundException, ConflictException from rest_api.rest_api_server.controllers.base import BaseController, MongoMixin from rest_api.rest_api_server.controllers.base_async import BaseAsyncControllerWrapper -from rest_api.rest_api_server.controllers.organization import OrganizationController from rest_api.rest_api_server.exceptions import Err from rest_api.rest_api_server.models.enums import WebhookObjectTypes, WebhookActionTypes from rest_api.rest_api_server.models.models import Webhook, Organization diff --git a/rest_api/rest_api_server/handlers/v1/base.py b/rest_api/rest_api_server/handlers/v1/base.py index 22a39782..f439cc3c 100644 --- a/rest_api/rest_api_server/handlers/v1/base.py +++ b/rest_api/rest_api_server/handlers/v1/base.py @@ -108,6 +108,9 @@ def set_content_type(self, def on_finish(self): self.session().close() + if self._controller is not None: + self._controller.on_finish() + super().on_finish() @property def controller(self): diff --git a/rest_api/rest_api_server/utils.py b/rest_api/rest_api_server/utils.py index b757e46c..3a1c4f95 100644 --- a/rest_api/rest_api_server/utils.py +++ b/rest_api/rest_api_server/utils.py @@ -40,8 +40,6 @@ BASE_POOL_EXPENSES_EXPORT_LINK_FORMAT = 'https://{0}/restapi/v2/pool_expenses_exports/{1}' tp_executor = ThreadPoolExecutor(30) tp_executor_context = ThreadPoolExecutor(30) -tp_executor_license = ThreadPoolExecutor(10) -tp_executor_bulk_cloud_calls = ThreadPoolExecutor(16) LOG = logging.getLogger(__name__) GB = 1024 * 1024 * 1024 SECONDS_IN_HOUR = 60 * 60