From c5f92c18cc05de7b5606e73b82ff8e0404e30652 Mon Sep 17 00:00:00 2001 From: Steven Silvester Date: Fri, 1 Nov 2024 14:59:34 -0500 Subject: [PATCH] Revert "DRIVERS-3019 Add ruff linter and apply fixes" (#539) --- .evergreen/auth_aws/aws_tester.py | 13 +- .../lib/aws_assign_instance_profile.py | 8 +- .evergreen/auth_aws/lib/aws_assume_role.py | 3 +- .../auth_aws/lib/aws_assume_web_role.py | 3 +- .../auth_aws/lib/aws_handle_oidc_creds.py | 5 +- .../lib/aws_unassign_instance_profile.py | 7 +- .evergreen/auth_aws/lib/container_tester.py | 9 +- .evergreen/auth_aws/lib/util.py | 3 +- .evergreen/auth_oidc/azure/handle_secrets.py | 2 +- .../auth_oidc/azure/remote-scripts/test.py | 13 +- .../azure_func/self-test/function_app.py | 7 +- .../auth_oidc/gcp/remote-scripts/test.py | 8 +- .../auth_oidc/k8s/remote-scripts/test.py | 3 +- .evergreen/auth_oidc/oidc_get_tokens.py | 3 +- .../auth_oidc/oidc_write_orchestration.py | 6 +- .evergreen/auth_oidc/utils.py | 6 +- .evergreen/csfle/fake_azure.py | 10 +- .evergreen/csfle/gcpkms/mock_server.py | 16 +-- .evergreen/csfle/kms_failpoint_server.py | 45 +++---- .evergreen/csfle/kms_http_common.py | 3 +- .evergreen/csfle/kms_http_server.py | 22 ++-- .evergreen/csfle/kms_kmip_client.py | 8 +- .evergreen/csfle/kms_kmip_server.py | 8 +- .evergreen/csfle/setup_secrets.py | 12 +- .evergreen/docker/overwrite_orchestration.py | 2 +- .evergreen/generate_task_config.py | 1 + .evergreen/mongodl.py | 124 ++++++++++-------- .evergreen/mongosh-dl.py | 17 +-- .evergreen/ocsp/mock_ocsp_responder.py | 45 ++++--- .evergreen/ocsp/ocsp_mock.py | 3 +- .evergreen/secrets_handling/setup_secrets.py | 3 +- .evergreen/socks5srv.py | 18 +-- .github/workflows/tests.yml | 4 - .pre-commit-config.yaml | 7 - .../evergreen_config_generator/__init__.py | 4 +- .../evergreen_config_generator/tasks.py | 2 +- ruff.toml | 43 ------ 37 files changed, 241 insertions(+), 255 deletions(-) delete mode 100644 ruff.toml diff --git a/.evergreen/auth_aws/aws_tester.py b/.evergreen/auth_aws/aws_tester.py index a500b587..c0cbc1ce 100644 --- a/.evergreen/auth_aws/aws_tester.py +++ b/.evergreen/auth_aws/aws_tester.py @@ -1,16 +1,17 @@ +#!/usr/bin/env python3 """ Script for testing MONGDOB-AWS authentication. """ import argparse -import json import os -import subprocess +import json import sys +import subprocess from functools import partial -from urllib.parse import quote_plus from pymongo import MongoClient from pymongo.errors import OperationFailure +from urllib.parse import quote_plus HERE = os.path.abspath(os.path.dirname(__file__)) @@ -19,10 +20,10 @@ def join(*parts): sys.path.insert(0, join(HERE, 'lib')) -from aws_assign_instance_profile import _assign_instance_policy +from util import get_key as _get_key from aws_assume_role import _assume_role from aws_assume_web_role import _assume_role_with_web_identity -from util import get_key as _get_key +from aws_assign_instance_profile import _assign_instance_policy ASSUMED_ROLE = "arn:aws:sts::557821124784:assumed-role/authtest_user_assume_role/*" ASSUMED_WEB_ROLE = "arn:aws:sts::857654397073:assumed-role/webIdentityTestRole/*" @@ -43,7 +44,7 @@ def join(*parts): def run(args, env): """Run a python command in a subprocess.""" env.update(os.environ.copy()) - return subprocess.run([sys.executable, *args], env=env, check=False).returncode + return subprocess.run([sys.executable] + args, env=env).returncode def create_user(user, kwargs): diff --git a/.evergreen/auth_aws/lib/aws_assign_instance_profile.py b/.evergreen/auth_aws/lib/aws_assign_instance_profile.py index 305b9588..189c824b 100644 --- a/.evergreen/auth_aws/lib/aws_assign_instance_profile.py +++ b/.evergreen/auth_aws/lib/aws_assign_instance_profile.py @@ -1,18 +1,20 @@ +#!/usr/bin/env python3 """ Script for assign an instance policy to the current machine. """ import argparse -import json +import urllib.request import logging +import json import os import sys import time -import urllib.request from functools import partial import boto3 import botocore + from util import get_key as _get_key sys.path.insert(1, os.path.join(sys.path[0], '..')) @@ -39,7 +41,7 @@ def _has_instance_profile(): try: url = base_url + iam_role print("Reading: " + url) - urllib.request.urlopen(url) + req = urllib.request.urlopen(url) print("Assigned " + iam_role) except urllib.error.HTTPError as e: print(e) diff --git a/.evergreen/auth_aws/lib/aws_assume_role.py b/.evergreen/auth_aws/lib/aws_assume_role.py index 422eb20b..7a230338 100644 --- a/.evergreen/auth_aws/lib/aws_assume_role.py +++ b/.evergreen/auth_aws/lib/aws_assume_role.py @@ -1,10 +1,11 @@ +#!/usr/bin/env python3 """ Script for assuming an aws role. """ import argparse -import logging import uuid +import logging import boto3 diff --git a/.evergreen/auth_aws/lib/aws_assume_web_role.py b/.evergreen/auth_aws/lib/aws_assume_web_role.py index 9e79a1fd..de0a9c63 100644 --- a/.evergreen/auth_aws/lib/aws_assume_web_role.py +++ b/.evergreen/auth_aws/lib/aws_assume_web_role.py @@ -1,11 +1,12 @@ +#!/usr/bin/env python3 """ Script for assuming an aws role using AssumeRoleWithWebIdentity. """ import argparse -import logging import os import uuid +import logging import boto3 diff --git a/.evergreen/auth_aws/lib/aws_handle_oidc_creds.py b/.evergreen/auth_aws/lib/aws_handle_oidc_creds.py index d719f63b..bf277db6 100644 --- a/.evergreen/auth_aws/lib/aws_handle_oidc_creds.py +++ b/.evergreen/auth_aws/lib/aws_handle_oidc_creds.py @@ -1,3 +1,4 @@ +#!/usr/bin/env python3 """ Script for handling OIDC credentials. """ @@ -14,6 +15,7 @@ from pyop.userinfo import Userinfo + class CustomSubjectIdentifierFactory(HashBasedSubjectIdentifierFactory): """ Implements a hash based algorithm for creating a pairwise subject identifier. @@ -31,7 +33,7 @@ def create_pairwise_identifier(self, user_id, sector_identifier): def get_default_config(): - return { + config = { "issuer": os.getenv('IDP_ISSUER', ''), "jwks_uri": os.getenv('IDP_JWKS_URI', ''), 'rsa_key': os.getenv('IDP_RSA_KEY', ''), @@ -40,6 +42,7 @@ def get_default_config(): 'username': os.getenv("IDP_USERNAME", 'test_user'), 'token_file': os.getenv('AWS_WEB_IDENTITY_TOKEN_FILE') } + return config def get_provider(config=None, expires=None): diff --git a/.evergreen/auth_aws/lib/aws_unassign_instance_profile.py b/.evergreen/auth_aws/lib/aws_unassign_instance_profile.py index c299bd10..02899e8f 100644 --- a/.evergreen/auth_aws/lib/aws_unassign_instance_profile.py +++ b/.evergreen/auth_aws/lib/aws_unassign_instance_profile.py @@ -1,13 +1,14 @@ +#!/usr/bin/env python3 """ Script for unassigning an instance policy from the current machine. """ import argparse +import urllib.error +import urllib.request import logging import sys import time -import urllib.error -import urllib.request import boto3 import botocore @@ -31,7 +32,7 @@ def _has_instance_profile(): try: url = base_url + iam_role print("Reading: " + url) - urllib.request.urlopen(url) + req = urllib.request.urlopen(url) except urllib.error.HTTPError as e: print(e) if e.code == 404: diff --git a/.evergreen/auth_aws/lib/container_tester.py b/.evergreen/auth_aws/lib/container_tester.py index 7500fc0e..ad259e94 100644 --- a/.evergreen/auth_aws/lib/container_tester.py +++ b/.evergreen/auth_aws/lib/container_tester.py @@ -1,3 +1,4 @@ +#!/usr/bin/env python3 """ Script for testing mongodb in containers. @@ -40,7 +41,7 @@ def _run_process(params, cwd=None): LOGGER.info("RUNNING COMMAND: %s", params) - ret = subprocess.run(params, cwd=cwd, check=False) + ret = subprocess.run(params, cwd=cwd) return ret.returncode def _userandhostandport(endpoint): @@ -136,7 +137,7 @@ def remote_ps_container(cluster): assert private_ip_address eni = ec2_client.describe_network_interfaces(NetworkInterfaceIds=enis) - public_ip = next(iter(n["Association"]["PublicIp"] for n in eni["NetworkInterfaces"])) + public_ip = [n["Association"]["PublicIp"] for n in eni["NetworkInterfaces"]][0] for container in task['containers']: taskArn = container['taskArn'] @@ -145,7 +146,7 @@ def remote_ps_container(cluster): task_id = task_id + "/" + name lastStatus = container['lastStatus'] - print(f"{task_id:<43}{lastStatus:<9}{public_ip:<25}{private_ip_address:<25}{taskDefinition_short:<16}") + print("{:<43}{:<9}{:<25}{:<25}{:<16}".format(task_id, lastStatus, public_ip, private_ip_address, taskDefinition_short )) def _remote_create_container_args(args): remote_create_container(args.cluster, args.task_definition, args.service, args.subnets, args.security_group) @@ -246,7 +247,7 @@ def remote_get_public_endpoint_str(cluster, service_name): assert enis eni = ec2_client.describe_network_interfaces(NetworkInterfaceIds=enis) - public_ip = next(iter(n["Association"]["PublicIp"] for n in eni["NetworkInterfaces"])) + public_ip = [n["Association"]["PublicIp"] for n in eni["NetworkInterfaces"]][0] break return f"root@{public_ip}:22" diff --git a/.evergreen/auth_aws/lib/util.py b/.evergreen/auth_aws/lib/util.py index 2bbaf2e6..72c75616 100644 --- a/.evergreen/auth_aws/lib/util.py +++ b/.evergreen/auth_aws/lib/util.py @@ -1,4 +1,5 @@ def get_key(key: str, uppercase: bool) -> str: if uppercase: return key.upper() - return key + else: + return key diff --git a/.evergreen/auth_oidc/azure/handle_secrets.py b/.evergreen/auth_oidc/azure/handle_secrets.py index f8866baf..7651e480 100644 --- a/.evergreen/auth_oidc/azure/handle_secrets.py +++ b/.evergreen/auth_oidc/azure/handle_secrets.py @@ -2,8 +2,8 @@ import os from base64 import b64decode -from azure.identity import DefaultAzureCredential from azure.keyvault.secrets import SecretClient +from azure.identity import DefaultAzureCredential def main(): diff --git a/.evergreen/auth_oidc/azure/remote-scripts/test.py b/.evergreen/auth_oidc/azure/remote-scripts/test.py index ba7980f7..46644132 100644 --- a/.evergreen/auth_oidc/azure/remote-scripts/test.py +++ b/.evergreen/auth_oidc/azure/remote-scripts/test.py @@ -1,8 +1,7 @@ -import json -import os -from urllib.request import Request, urlopen - from pymongo import MongoClient +import os +import json +from urllib.request import urlopen, Request from pymongo.auth_oidc import OIDCCallback, OIDCCallbackContext, OIDCCallbackResult app_id = os.environ['AZUREOIDC_APPID'] @@ -23,7 +22,7 @@ def fetch(self, context: OIDCCallbackContext) -> OIDCCallbackResult: body = response.read().decode('utf8') except Exception as e: msg = "Failed to acquire IMDS access token: %s" % e - raise ValueError(msg) from e + raise ValueError(msg) if status != 200: print(body) @@ -31,8 +30,8 @@ def fetch(self, context: OIDCCallbackContext) -> OIDCCallbackResult: raise ValueError(msg) try: data = json.loads(body) - except Exception as e: - raise ValueError("Azure IMDS response must be in JSON format.") from e + except Exception: + raise ValueError("Azure IMDS response must be in JSON format.") for key in ["access_token", "expires_in"]: if not data.get(key): diff --git a/.evergreen/auth_oidc/azure_func/self-test/function_app.py b/.evergreen/auth_oidc/azure_func/self-test/function_app.py index 0c836c20..dbe5ff21 100644 --- a/.evergreen/auth_oidc/azure_func/self-test/function_app.py +++ b/.evergreen/auth_oidc/azure_func/self-test/function_app.py @@ -1,9 +1,8 @@ -import json +import azure.functions as func import logging import os -from urllib.request import Request, urlopen - -import azure.functions as func +from urllib.request import urlopen, Request +import json from pymongo import MongoClient from pymongo.auth_oidc import OIDCCallback, OIDCCallbackContext, OIDCCallbackResult diff --git a/.evergreen/auth_oidc/gcp/remote-scripts/test.py b/.evergreen/auth_oidc/gcp/remote-scripts/test.py index 293fa9af..c8fa9aaa 100644 --- a/.evergreen/auth_oidc/gcp/remote-scripts/test.py +++ b/.evergreen/auth_oidc/gcp/remote-scripts/test.py @@ -1,7 +1,7 @@ -import os -from urllib.request import Request, urlopen - from pymongo import MongoClient +import os +import json +from urllib.request import urlopen, Request from pymongo.auth_oidc import OIDCCallback, OIDCCallbackContext, OIDCCallbackResult audience = os.environ['GCPOIDC_AUDIENCE'] @@ -20,7 +20,7 @@ def fetch(self, context: OIDCCallbackContext) -> OIDCCallbackResult: body = response.read().decode('utf8') except Exception as e: msg = "Failed to acquire IMDS access token: %s" % e - raise ValueError(msg) from e + raise ValueError(msg) if status != 200: print(body) diff --git a/.evergreen/auth_oidc/k8s/remote-scripts/test.py b/.evergreen/auth_oidc/k8s/remote-scripts/test.py index dd583b6d..06e429df 100644 --- a/.evergreen/auth_oidc/k8s/remote-scripts/test.py +++ b/.evergreen/auth_oidc/k8s/remote-scripts/test.py @@ -1,6 +1,5 @@ -import os - from pymongo import MongoClient +import os from pymongo.auth_oidc import OIDCCallback, OIDCCallbackContext, OIDCCallbackResult atlas_uri = os.environ["MONGODB_URI"] diff --git a/.evergreen/auth_oidc/oidc_get_tokens.py b/.evergreen/auth_oidc/oidc_get_tokens.py index d02d07aa..6f1e4104 100644 --- a/.evergreen/auth_oidc/oidc_get_tokens.py +++ b/.evergreen/auth_oidc/oidc_get_tokens.py @@ -3,8 +3,7 @@ HERE = os.path.abspath(os.path.dirname(__file__)) sys.path.insert(0, HERE) -from utils import DEFAULT_CLIENT, get_id_token, get_secrets, join - +from utils import get_secrets, get_id_token, DEFAULT_CLIENT, join TOKEN_DIR = os.environ['OIDC_TOKEN_DIR'].replace(os.sep, '/') def generate_tokens(config, base_name): diff --git a/.evergreen/auth_oidc/oidc_write_orchestration.py b/.evergreen/auth_oidc/oidc_write_orchestration.py index 16e66279..97abc9ac 100644 --- a/.evergreen/auth_oidc/oidc_write_orchestration.py +++ b/.evergreen/auth_oidc/oidc_write_orchestration.py @@ -1,13 +1,15 @@ +#!/usr/bin/env python3 """ Script for managing OIDC. """ -import json import os +import json import sys + HERE = os.path.abspath(os.path.dirname(__file__)) sys.path.insert(0, HERE) -from utils import DEFAULT_CLIENT, get_secrets +from utils import get_secrets, MOCK_ENDPOINT, DEFAULT_CLIENT def azure(): diff --git a/.evergreen/auth_oidc/utils.py b/.evergreen/auth_oidc/utils.py index 5bcd3640..a3220b30 100644 --- a/.evergreen/auth_oidc/utils.py +++ b/.evergreen/auth_oidc/utils.py @@ -1,6 +1,9 @@ +import json import os import sys +import boto3 + HERE = os.path.abspath(os.path.dirname(__file__)) def join(*args): @@ -8,8 +11,7 @@ def join(*args): aws_lib = join(os.path.dirname(HERE), 'auth_aws', 'lib') sys.path.insert(0, aws_lib) -from aws_handle_oidc_creds import MOCK_ENDPOINT, get_id_token # noqa: F401 - +from aws_handle_oidc_creds import get_id_token, MOCK_ENDPOINT secrets_root = join(os.path.dirname(HERE), 'secrets_handling') sys.path.insert(0, secrets_root) from setup_secrets import get_secrets as root_get_secrets diff --git a/.evergreen/csfle/fake_azure.py b/.evergreen/csfle/fake_azure.py index 26f5953f..fec1c341 100644 --- a/.evergreen/csfle/fake_azure.py +++ b/.evergreen/csfle/fake_azure.py @@ -11,8 +11,7 @@ imds = Bottle(autojson=True) """An Azure IMDS server""" -from collections.abc import Iterable -from typing import TYPE_CHECKING, Any, Callable, cast, overload +from typing import TYPE_CHECKING, Any, Callable, Iterable, cast, overload if not TYPE_CHECKING: from bottle import request @@ -116,7 +115,7 @@ def get_oauth2_token(): if case == 'slow': return _slow() - assert case in (None, ''), f'Unknown HTTP test case "{case}"' + assert case in (None, ''), 'Unknown HTTP test case "{}"'.format(case) return { 'access_token': 'magic-cookie', @@ -149,6 +148,7 @@ def _slow() -> Iterable[bytes]: if __name__ == '__main__': print( - f'RECOMMENDED: Run this script using bottle.py (e.g. [{sys.executable} {Path(__file__).resolve().parent}/bottle.py fake_azure:imds])' - ) + 'RECOMMENDED: Run this script using bottle.py (e.g. [{} {}/bottle.py fake_azure:imds])' + .format(sys.executable, + Path(__file__).resolve().parent)) imds.run() diff --git a/.evergreen/csfle/gcpkms/mock_server.py b/.evergreen/csfle/gcpkms/mock_server.py index d50bcc28..51071f6c 100644 --- a/.evergreen/csfle/gcpkms/mock_server.py +++ b/.evergreen/csfle/gcpkms/mock_server.py @@ -2,15 +2,14 @@ Mock a GCP Metadata Server. Returns a valid access_token. """ -import base64 import http.server -import json -import os -import textwrap import time - +import base64 +import json import jwt import requests +import textwrap +import os def b64_to_b64url(b64): @@ -20,7 +19,8 @@ def b64_to_b64url(b64): def dict_to_b64url(arg): as_json = json.dumps(arg).encode("utf8") as_b64 = base64.b64encode(as_json).decode("utf8") - return b64_to_b64url(as_b64) + as_b64url = b64_to_b64url(as_b64) + return as_b64url def get_access_token(): @@ -34,7 +34,7 @@ def get_access_token(): if "GOOGLE_APPLICATION_CREDENTIALS" not in os.environ: raise Exception( "please set GOOGLE_APPLICATION_CREDENTIALS environment variable to a JSON Service account key") - creds = json.load(open(os.environ["GOOGLE_APPLICATION_CREDENTIALS"])) + creds = json.load(open(os.environ["GOOGLE_APPLICATION_CREDENTIALS"], "r")) private_key = creds["private_key"].encode("utf8") client_email = creds["client_email"] @@ -82,7 +82,7 @@ def main(): global private_key port = 5000 server = http.server.HTTPServer(("localhost", port), Handler) - print (f"Listening on port {port}") + print ("Listening on port {}".format(port)) server.serve_forever() diff --git a/.evergreen/csfle/kms_failpoint_server.py b/.evergreen/csfle/kms_failpoint_server.py index 9122a88a..460040d5 100644 --- a/.evergreen/csfle/kms_failpoint_server.py +++ b/.evergreen/csfle/kms_failpoint_server.py @@ -19,12 +19,12 @@ """ import argparse -import base64 import http.server import json -import os -import ssl import urllib +import ssl +import base64 +import os from pathlib import PurePosixPath # A new instance of Handler is created for every request, so these have to be global variables @@ -64,7 +64,7 @@ def _send_json(self, data: dict): def _send_not_found(self): self.send_response(http.HTTPStatus.NOT_FOUND) - msg = b"Not found" + msg = "Not found".encode("utf8") self.send_header("Content-Type", "text/plain") self.send_header("Content-Length", len(msg)) self.end_headers() @@ -93,18 +93,18 @@ def do_POST(self): remaining_http_fails = data['count'] else: self._send_not_found() - return None - print(f"Enabling failpoint for type: {failpoint_type}") + return + print("Enabling failpoint for type: {}".format(failpoint_type)) self._send_json( - {"message": f"failpoint set for type: '{failpoint_type}'"} + {"message": "failpoint set for type: '{}'".format(failpoint_type)} ) - return None + return if path.match("/reset"): remaining_http_fails = 0 remaining_network_fails = 0 self._send_json({"message": "failpoints reset"}) - return None + return # If a failpoint was set, fail the request. if remaining_network_fails > 0: @@ -116,39 +116,40 @@ def do_POST(self): aws_op = self.headers['X-Amz-Target'] if aws_op == "TrentService.Encrypt": self._send_json({"CiphertextBlob": base64.b64encode(fake_ciphertext.encode()).decode()}) - return None - if aws_op == "TrentService.Decrypt": + return + elif aws_op == "TrentService.Decrypt": if remaining_http_fails > 0: self._http_fail() - return None + return self._send_json({"Plaintext": base64.b64encode(fake_plaintext.encode()).decode()}) - return None - self._send_not_found() - return None + return + else: + self._send_not_found() + return # GCP or Azure auth path: /c01df00d-cafe-g00d-dea1-decea5sedbeef/oauth2/v2.0/token if path.match("*token"): if remaining_http_fails > 0: self._http_fail() - return None + return return self._send_json({"access_token": "foo", "expires_in": 99999}) # GCP encrypt path: /v1/projects/{project}/locations/{location}/keyRings/{key-ring}/cryptoKeys/{key}:encrypt - if path.match("*encrypt"): + elif path.match("*encrypt"): return self._send_json({"ciphertext": base64.b64encode(fake_ciphertext.encode()).decode()}) # GCP decrypt path: /v1/projects/{project}/locations/{location}/keyRings/{key-ring}/cryptoKeys/{key}:decrypt - if path.match("*decrypt"): + elif path.match("*decrypt"): if remaining_http_fails > 0: self._http_fail() - return None + return return self._send_json({"plaintext": base64.b64encode(fake_plaintext.encode()).decode()}) # Azure decrypt path: /keys/{key-name}/{key-version}/unwrapkey - if path.match("*unwrapkey"): + elif path.match("*unwrapkey"): if remaining_http_fails > 0: self._http_fail() - return None + return return self._send_json({"value": base64.b64encode(fake_plaintext.encode()).decode()}) # Azure encrypt path: /keys/{key-name}/{key-version}/wrapkey - if path.match("*wrapkey"): + elif path.match("*wrapkey"): return self._send_json({"value": base64.b64encode(fake_ciphertext.encode()).decode()}) self._send_not_found() diff --git a/.evergreen/csfle/kms_http_common.py b/.evergreen/csfle/kms_http_common.py index d363e231..1dae48b5 100644 --- a/.evergreen/csfle/kms_http_common.py +++ b/.evergreen/csfle/kms_http_common.py @@ -71,11 +71,12 @@ def do_GET(self): else: self.send_response(http.HTTPStatus.NOT_FOUND) self.end_headers() - self.wfile.write(b"Unknown URL") + self.wfile.write("Unknown URL".encode()) @abstractmethod def do_POST(self): """Serve a POST request.""" + pass def _send_reply(self, data, status=http.HTTPStatus.OK): print("Sending Response: " + data.decode()) diff --git a/.evergreen/csfle/kms_http_server.py b/.evergreen/csfle/kms_http_server.py index 802914ea..9ab05a70 100644 --- a/.evergreen/csfle/kms_http_server.py +++ b/.evergreen/csfle/kms_http_server.py @@ -1,3 +1,4 @@ +#! /usr/bin/env python3 """ Mock AWS KMS Endpoint. @@ -14,11 +15,12 @@ import sys import urllib.parse -import kms_http_common from botocore.auth import SigV4Auth from botocore.awsrequest import AWSRequest from botocore.credentials import Credentials +import kms_http_common + SECRET_PREFIX = "00SECRET" # List of supported fault types @@ -54,7 +56,7 @@ def do_POST(self): else: self.send_response(http.HTTPStatus.NOT_FOUND) self.end_headers() - self.wfile.write(b"Unknown URL") + self.wfile.write("Unknown URL".encode()) def _do_post(self): c_len = int(self.headers.get('content-length')) @@ -63,7 +65,7 @@ def _do_post(self): print("RAW INPUT: " + str(raw_input)) - if self.headers["Host"] != "localhost": + if not self.headers["Host"] == "localhost": data = "Unexpected host" self._send_reply(data.encode("utf-8")) @@ -133,9 +135,9 @@ def _do_encrypt_faults(self, raw_ciphertext): kms_http_common.stats.fault_calls += 1 if kms_http_common.fault_type == kms_http_common.FAULT_ENCRYPT: - self._send_reply(b"Internal Error of some sort.", http.HTTPStatus.INTERNAL_SERVER_ERROR) + self._send_reply("Internal Error of some sort.".encode(), http.HTTPStatus.INTERNAL_SERVER_ERROR) return - if kms_http_common.fault_type == kms_http_common.FAULT_ENCRYPT_WRONG_FIELDS: + elif kms_http_common.fault_type == kms_http_common.FAULT_ENCRYPT_WRONG_FIELDS: response = { "SomeBlob" : raw_ciphertext, "KeyId" : "foo", @@ -143,7 +145,7 @@ def _do_encrypt_faults(self, raw_ciphertext): self._send_reply(json.dumps(response).encode('utf-8')) return - if kms_http_common.fault_type == kms_http_common.FAULT_ENCRYPT_BAD_BASE64: + elif kms_http_common.fault_type == kms_http_common.FAULT_ENCRYPT_BAD_BASE64: response = { "CiphertextBlob" : "foo", "KeyId" : "foo", @@ -151,7 +153,7 @@ def _do_encrypt_faults(self, raw_ciphertext): self._send_reply(json.dumps(response).encode('utf-8')) return - if kms_http_common.fault_type == kms_http_common.FAULT_ENCRYPT_CORRECT_FORMAT: + elif kms_http_common.fault_type == kms_http_common.FAULT_ENCRYPT_CORRECT_FORMAT: response = { "__type" : "NotFoundException", "Message" : "Error encrypting message", @@ -189,9 +191,9 @@ def _do_decrypt_faults(self, blob): kms_http_common.stats.fault_calls += 1 if kms_http_common.fault_type == kms_http_common.FAULT_DECRYPT: - self._send_reply(b"Internal Error of some sort.", http.HTTPStatus.INTERNAL_SERVER_ERROR) + self._send_reply("Internal Error of some sort.".encode(), http.HTTPStatus.INTERNAL_SERVER_ERROR) return - if kms_http_common.fault_type == kms_http_common.FAULT_DECRYPT_WRONG_KEY: + elif kms_http_common.fault_type == kms_http_common.FAULT_DECRYPT_WRONG_KEY: response = { "Plaintext" : "ta7DXE7J0OiCRw03dYMJSeb8nVF5qxTmZ9zWmjuX4zW/SOorSCaY8VMTWG+cRInMx/rr/+QeVw2WjU2IpOSvMg==", "KeyId" : "Not a clue", @@ -199,7 +201,7 @@ def _do_decrypt_faults(self, blob): self._send_reply(json.dumps(response).encode('utf-8')) return - if kms_http_common.fault_type == kms_http_common.FAULT_DECRYPT_CORRECT_FORMAT: + elif kms_http_common.fault_type == kms_http_common.FAULT_DECRYPT_CORRECT_FORMAT: response = { "__type" : "NotFoundException", "Message" : "Error decrypting message", diff --git a/.evergreen/csfle/kms_kmip_client.py b/.evergreen/csfle/kms_kmip_client.py index 770f6b8d..52c0032d 100644 --- a/.evergreen/csfle/kms_kmip_client.py +++ b/.evergreen/csfle/kms_kmip_client.py @@ -1,14 +1,14 @@ +#! /usr/bin/env python3 """ Ensures a SecretData is registered with the unique identifier "1" on the KMS KMIP test server. This is a utility, and not meant for CI testing. """ -import os - -import kmip.core.enums import kmip.pie.client -import kmip.pie.exceptions import kmip.pie.objects +import kmip.pie.exceptions +import kmip.core.enums +import os HOSTNAME = "localhost" PORT = 5698 diff --git a/.evergreen/csfle/kms_kmip_server.py b/.evergreen/csfle/kms_kmip_server.py index 1b995faa..335aa160 100644 --- a/.evergreen/csfle/kms_kmip_server.py +++ b/.evergreen/csfle/kms_kmip_server.py @@ -1,14 +1,14 @@ +#! /usr/bin/env python3 """ KMS KMIP test server. """ -import argparse -import logging +from kmip.services.server import KmipServer import os +import logging +import argparse import shutil -from kmip.services.server import KmipServer - HOSTNAME = "localhost" PORT = 5698 diff --git a/.evergreen/csfle/setup_secrets.py b/.evergreen/csfle/setup_secrets.py index c9e8cd2f..023ad5fb 100644 --- a/.evergreen/csfle/setup_secrets.py +++ b/.evergreen/csfle/setup_secrets.py @@ -1,8 +1,8 @@ +#!/usr/bin/env python3 """ Set up encryption secrets. """ import os - import boto3 os.environ['AWS_ACCESS_KEY_ID']=os.environ['FLE_AWS_KEY'] @@ -15,13 +15,13 @@ credentials = client.get_session_token()["Credentials"] with open('secrets-export.sh', 'ab') as fid: - fid.write(f'\nexport CSFLE_AWS_TEMP_ACCESS_KEY_ID="{credentials["AccessKeyId"]}"'.encode()) - fid.write(f'\nexport CSFLE_AWS_TEMP_SECRET_ACCESS_KEY="{credentials["SecretAccessKey"]}"'.encode()) - fid.write(f'\nexport CSFLE_AWS_TEMP_SESSION_TOKEN="{credentials["SessionToken"]}"'.encode()) + fid.write(f'\nexport CSFLE_AWS_TEMP_ACCESS_KEY_ID="{credentials["AccessKeyId"]}"'.encode('utf8')) + fid.write(f'\nexport CSFLE_AWS_TEMP_SECRET_ACCESS_KEY="{credentials["SecretAccessKey"]}"'.encode('utf8')) + fid.write(f'\nexport CSFLE_AWS_TEMP_SESSION_TOKEN="{credentials["SessionToken"]}"'.encode('utf8')) for key in ['AWS_ACCESS_KEY_ID', 'AWS_SECRET_ACCESS_KEY', 'AWS_DEFAULT_REGION', 'AWS_SESSION_TOKEN', 'CSFLE_TLS_CA_FILE', 'CSFLE_TLS_CERT_FILE', 'CSFLE_TLS_CLIENT_CERT_FILE']: - fid.write(f'\nexport {key}="{os.environ[key]}"'.encode()) - fid.write(b'\n') + fid.write(f'\nexport {key}="{os.environ[key]}"'.encode('utf8')) + fid.write('\n'.encode('utf8')) print("Getting CSFLE temp creds...done") diff --git a/.evergreen/docker/overwrite_orchestration.py b/.evergreen/docker/overwrite_orchestration.py index d79176c6..2d46b1c1 100644 --- a/.evergreen/docker/overwrite_orchestration.py +++ b/.evergreen/docker/overwrite_orchestration.py @@ -1,5 +1,5 @@ -import json import os +import json orch_file = os.environ['ORCHESTRATION_FILE'] diff --git a/.evergreen/generate_task_config.py b/.evergreen/generate_task_config.py index 8c45cec6..3820cc40 100644 --- a/.evergreen/generate_task_config.py +++ b/.evergreen/generate_task_config.py @@ -1,5 +1,6 @@ import itertools + TASK_TEMPLATE = ''' - name: "test-{version}-{topology}" tags: ["{version}", "{topology}"] diff --git a/.evergreen/mongodl.py b/.evergreen/mongodl.py index 42743a39..28d91a59 100755 --- a/.evergreen/mongodl.py +++ b/.evergreen/mongodl.py @@ -29,19 +29,11 @@ import urllib.request import zipfile from collections import namedtuple -from collections.abc import Iterable, Iterator, Sequence from contextlib import contextmanager from fnmatch import fnmatch from pathlib import Path, PurePath, PurePosixPath -from typing import ( - IO, - TYPE_CHECKING, - Any, - Callable, - NamedTuple, - Optional, - cast, -) +from typing import (IO, TYPE_CHECKING, Any, Callable, Iterable, Iterator, Optional, + NamedTuple, Sequence, cast) # These versions are used for performance benchmarking. Do not update to a newer version. PERF_VERSIONS = { @@ -165,7 +157,8 @@ def infer_target_from_os_release(osr: Path) -> str: # Extract the "ID" field id_re = re.compile(r'\bID=("?)(.*)\1') mat = id_re.search(os_rel) - assert mat, f'Unable to detect ID from [{osr}] content:\n{os_rel}' + assert mat, 'Unable to detect ID from [{}] content:\n{}'.format( + osr, os_rel) os_id = mat.group(2) if os_id == 'arch': # There are no Archlinux-specific MongoDB downloads, so we'll just use @@ -175,7 +168,8 @@ def infer_target_from_os_release(osr: Path) -> str: # Extract the "VERSION_ID" field ver_id_re = re.compile(r'VERSION_ID=("?)(.*)\1') mat = ver_id_re.search(os_rel) - assert mat, f'Unable to detect VERSION_ID from [{osr}] content:\n{os_rel}' + assert mat, 'Unable to detect VERSION_ID from [{}] content:\n{}'.format( + osr, os_rel) ver_id = mat.group(2) # Map the ID to the download ID mapped_id = DISTRO_ID_MAP.get(os_id) @@ -190,22 +184,23 @@ def infer_target_from_os_release(osr: Path) -> str: if mapped_version is None: # If this raises, a version/pattern needs to be added # to DISTRO_VERSION_MAP - raise RuntimeError(f"We don't know how to map {os_id} version '{ver_id}' " - f"to an upstream {mapped_id} version. Please contribute!") + raise RuntimeError("We don't know how to map {} version '{}' " + "to an upstream {} version. Please contribute!" + "".format(os_id, ver_id, mapped_id)) ver_id = mapped_version os_id = mapped_id os_id = os_id.lower() if os_id not in DISTRO_ID_TO_TARGET: - raise RuntimeError(f"We don't know how to map '{os_id}' to a distribution " - "download target. Please contribute!") + raise RuntimeError("We don't know how to map '{}' to a distribution " + "download target. Please contribute!".format(os_id)) # Find the download target based on a filename-style pattern: ver_table = DISTRO_ID_TO_TARGET[os_id] for pattern, target in ver_table.items(): if fnmatch(ver_id, pattern): return target raise RuntimeError( - f"We don't know how to map '{os_id}' version '{ver_id}' to a distribution " - "download target. Please contribute!") + "We don't know how to map '{}' version '{}' to a distribution " + "download target. Please contribute!".format(os_id, ver_id)) def user_caches_root() -> Path: @@ -258,7 +253,7 @@ def version_tup(version: str) -> 'tuple[int, int, int, int, int]': return tuple([int(maj), int(min), 0, 0, 0]) mat = VERSION_RE.match(version) - assert mat, (f'Failed to parse "{version}" as a version number') + assert mat, ('Failed to parse "{}" as a version number'.format(version)) major, minor, patch, tag, tagnum = list(mat.groups()) if tag is None: # No rc tag is greater than an equal base version with any rc tag @@ -574,7 +569,7 @@ def download_file(self, url: str) -> DownloadResult: except urllib.error.HTTPError as e: if e.code != 304: raise RuntimeError( - f'Failed to download [{url}]') from e + 'Failed to download [{u}]'.format(u=url)) from e assert dest.is_file(), ( 'The download cache is missing an expected file', dest) return DownloadResult(False, dest) @@ -638,18 +633,19 @@ def _print_list(db: CacheDB, version: 'str | None', target: 'str | None', component=component) for version, target, arch, edition, comp_key, comp_data in matching: counter += 1 - print(f'Download: {comp_key}\n' - f' Version: {version}\n' - f' Target: {target}\n' - f' Arch: {arch}\n' - f' Edition: {edition}\n' - f' Info: {comp_data}\n\n') + print('Download: {}\n' + ' Version: {}\n' + ' Target: {}\n' + ' Arch: {}\n' + ' Edition: {}\n' + ' Info: {}\n\n'.format(comp_key, version, target, arch, + edition, comp_data)) if counter == 1: print('Only one matching item') elif counter == 0: print('No items matched the listed filters') else: - print(f'{counter} available downloadable components') + print('{} available downloadable components'.format(counter)) print('(Omit filter arguments for a list of available filters)') return @@ -682,15 +678,15 @@ def _print_list(db: CacheDB, version: 'str | None', target: 'str | None', initial_indent=' ', subsequent_indent=' ')) print('Architectures:\n' - f' {arches}\n' + ' {}\n' 'Targets:\n' - f'{targets}\n' + '{}\n' 'Editions:\n' - f' {editions}\n' + ' {}\n' 'Versions:\n' - f'{versions}\n' + '{}\n' 'Components:\n' - f' {components}\n') + ' {}\n'.format(arches, targets, editions, versions, components)) def infer_arch(): @@ -726,7 +722,8 @@ def _published_build_url(cache: Cache, version: str, target: str, arch: str, if tup is None: raise ValueError( 'No download was found for ' - f'version="{version}" target="{target}" arch="{arch}" edition="{edition}" component="{component}"') + 'version="{}" target="{}" arch="{}" edition="{}" component="{}"'.format( + version, target, arch, edition, component)) data = json.loads(tup.data_json) return data[value] @@ -755,21 +752,28 @@ def _latest_build_url(target: str, arch: str, edition: str, component: str, 'archive': 'mongodb', 'crypt_shared': 'mongo_crypt_shared_v1', }.get(component, component) - base = f'https://downloads.10gen.com/{platform}' + base = 'https://downloads.10gen.com/{plat}'.format(plat=platform) # Windows has Zip files ext = 'zip' if target == 'windows' else 'tgz' # Enterprise builds have an "enterprise" infix ent_infix = 'enterprise-' if edition == 'enterprise' else '' # Some platforms have a filename infix - tgt_infix = ((target + '-') - if target not in ('windows', 'win32', 'macos') + tgt_infix = ((target + '-') # + if target not in ('windows', 'win32', 'macos') # else '') # Non-master branch uses a filename infix br_infix = ((branch + '-') if - (branch is not None and branch != 'master') + (branch is not None and branch != 'master') # else '') - filename = f'{component_name}-{typ}-{arch}-{ent_infix}{tgt_infix}{br_infix}latest.{ext}' - return f'{base}/{filename}' + filename = '{comp}-{typ}-{arch}-{enterprise_}{target_}{br_}latest.{ext}'.format( + comp=component_name, + typ=typ, + arch=arch, + enterprise_=ent_infix, + target_=tgt_infix, + br_=br_infix, + ext=ext) + return '{}/{}'.format(base, filename) def _dl_component(cache: Cache, out_dir: Path, version: str, target: str, @@ -777,7 +781,8 @@ def _dl_component(cache: Cache, out_dir: Path, version: str, target: str, pattern: 'str | None', strip_components: int, test: bool, no_download: bool, latest_build_branch: 'str|None') -> ExpandResult: - print(f'Download {component} {version}-{edition} for {target}-{arch}', file=sys.stderr) + print('Download {} {}-{} for {}-{}'.format(component, version, edition, + target, arch), file=sys.stderr) if version == 'latest-build': dl_url = _latest_build_url(target, arch, edition, component, latest_build_branch) @@ -786,7 +791,7 @@ def _dl_component(cache: Cache, out_dir: Path, version: str, target: str, component) if no_download: print(dl_url) - return None + return cached = cache.download_file(dl_url).path return _expand_archive(cached, out_dir, @@ -840,8 +845,8 @@ def _expand_archive(ar: Path, dest: Path, pattern: 'str | None', Expand the archive members from 'ar' into 'dest'. If 'pattern' is not-None, only extracts members that match the pattern. ''' - print(f'Extract from: [{ar.name}]', file=sys.stderr) - print(f' into: [{dest}]', file=sys.stderr) + print('Extract from: [{}]'.format(ar.name), file=sys.stderr) + print(' into: [{}]'.format(dest), file=sys.stderr) if ar.suffix == '.zip': n_extracted = _expand_zip(ar, dest, @@ -859,22 +864,27 @@ def _expand_archive(ar: Path, dest: Path, pattern: 'str | None', verb = 'would be' if test else 'were' if n_extracted == 0: if pattern and strip_components: - print(f'NOTE: No files {verb} extracted. Likely all files {verb} ' - f'excluded by "--only={pattern}" and/or "--strip-components={strip_components}"', file=sys.stderr) + print('NOTE: No files {verb} extracted. Likely all files {verb} ' + 'excluded by "--only={p}" and/or "--strip-components={s}"'. + format(p=pattern, s=strip_components, verb=verb), file=sys.stderr) elif pattern: - print(f'NOTE: No files {verb} extracted. Likely all files {verb} ' - f'excluded by the "--only={pattern}" filter', file=sys.stderr) + print('NOTE: No files {verb} extracted. Likely all files {verb} ' + 'excluded by the "--only={p}" filter'.format(p=pattern, + verb=verb), file=sys.stderr) elif strip_components: - print(f'NOTE: No files {verb} extracted. Likely all files {verb} ' - f'excluded by "--strip-components={strip_components}"', file=sys.stderr) + print('NOTE: No files {verb} extracted. Likely all files {verb} ' + 'excluded by "--strip-components={s}"'.format( + s=strip_components, verb=verb), file=sys.stderr) else: - print(f'NOTE: No files {verb} extracted. Empty archive?', file=sys.stderr) + print('NOTE: No files {verb} extracted. Empty archive?'.format( + verb=verb), file=sys.stderr) return ExpandResult.Empty - if n_extracted == 1: + elif n_extracted == 1: print('One file {v} extracted'.format(v='would be' if test else 'was'), file=sys.stderr) return ExpandResult.Okay - print(f'{n_extracted} files {verb} extracted', file=sys.stderr) - return ExpandResult.Okay + else: + print('{n} files {verb} extracted'.format(n=n_extracted, verb=verb), file=sys.stderr) + return ExpandResult.Okay def _expand_tgz(ar: Path, dest: Path, pattern: 'str | None', @@ -889,7 +899,7 @@ def _expand_tgz(ar: Path, dest: Path, pattern: 'str | None', pattern, strip_components, mem.isdir(), - lambda: cast('IO[bytes]', tf.extractfile(mem)), # noqa: B023 + lambda: cast('IO[bytes]', tf.extractfile(mem)), mem.mode, test=test, ) @@ -908,7 +918,7 @@ def _expand_zip(ar: Path, dest: Path, pattern: 'str | None', pattern, strip_components, item.filename.endswith('/'), ## Equivalent to: item.is_dir(), - lambda: zf.open(item, 'r'), # noqa: B023 + lambda: zf.open(item, 'r'), 0o655, test=test, ) @@ -936,7 +946,7 @@ def _maybe_extract_member(out: Path, relpath: PurePath, pattern: 'str | None', return 0 stripped = _pathjoin(relpath.parts[strip:]) dest = Path(out) / stripped - print(f'\n -> [{dest}]', file=sys.stderr) + print('\n -> [{}]'.format(dest), file=sys.stderr) if test: # We are running in test-only mode: Do not do anything return 1 @@ -1048,7 +1058,7 @@ def main(argv: 'Sequence[str]'): if args.list: _print_list(cache.db, args.version, args.target, args.arch, args.edition, args.component) - return None + return if args.version is None: raise argparse.ArgumentError(None, 'A "--version" is required') diff --git a/.evergreen/mongosh-dl.py b/.evergreen/mongosh-dl.py index 6961e26a..69cb8e86 100644 --- a/.evergreen/mongosh-dl.py +++ b/.evergreen/mongosh-dl.py @@ -1,3 +1,4 @@ +#!/usr/bin/env python3 """ Download and extract MongoSH. @@ -5,19 +6,19 @@ """ import argparse import json -import os -import re -import shlex -import subprocess import sys import tempfile -import urllib.request -from collections.abc import Sequence +import subprocess +import shlex +import os from pathlib import Path +from typing import Sequence +import urllib.request +import re HERE = Path(__file__).absolute().parent sys.path.insert(0, str(HERE)) -from mongodl import ExpandResult, _expand_archive, infer_arch +from mongodl import _expand_archive, infer_arch, ExpandResult def _get_latest_version(): @@ -56,7 +57,7 @@ def _download(out_dir: Path, version: str, target: str, arch: str, pattern: 'str | None', strip_components: int, test: bool, no_download: bool,) -> int: - print(f'Download {version} mongosh for {target}-{arch}', file=sys.stderr) + print('Download {} mongosh for {}-{}'.format(version, target, arch), file=sys.stderr) if version == "latest": version = _get_latest_version() if arch == "x86_64": diff --git a/.evergreen/ocsp/mock_ocsp_responder.py b/.evergreen/ocsp/mock_ocsp_responder.py index dcc12cb3..0ffbe7d1 100644 --- a/.evergreen/ocsp/mock_ocsp_responder.py +++ b/.evergreen/ocsp/mock_ocsp_responder.py @@ -39,25 +39,32 @@ # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import unicode_literals, division, absolute_import, print_function +import logging import base64 -import enum import inspect -import logging import re +import enum +import sys import textwrap -from datetime import datetime, timedelta, timezone +from datetime import datetime, timezone, timedelta +from typing import Callable, Tuple, Optional -from asn1crypto import core, keys, ocsp, x509 +from asn1crypto import x509, keys, core, ocsp from asn1crypto.ocsp import OCSPRequest, OCSPResponse -from flask import Flask, Response, request from oscrypto import asymmetric +from flask import Flask, request, Response __version__ = '0.10.2' __version_info__ = (0, 10, 2) logger = logging.getLogger(__name__) +if sys.version_info < (3,): + byte_cls = str +else: + byte_cls = bytes def _pretty_message(string, *params): """ @@ -83,7 +90,9 @@ def _pretty_message(string, *params): if params: output = output % params - return output.strip() + output = output.strip() + + return output def _type_name(value): @@ -111,7 +120,7 @@ def _writer(func): return property(fget=lambda self: getattr(self, '_%s' % name), fset=func) -class OCSPResponseBuilder: +class OCSPResponseBuilder(object): _response_status = None _certificate = None @@ -126,7 +135,7 @@ class OCSPResponseBuilder: _response_data_extensions = None _single_response_extensions = None - def __init__(self, response_status, certificate_status_list=None, revocation_date=None): + def __init__(self, response_status, certificate_status_list=[], revocation_date=None): """ Unless changed, responses will use SHA-256 for the signature, and will be valid from the moment created for one week. @@ -160,7 +169,7 @@ def __init__(self, response_status, certificate_status_list=None, revocation_dat not "good" or "unknown". """ self._response_status = response_status - self._certificate_status_list = certificate_status_list or [] + self._certificate_status_list = certificate_status_list self._revocation_date = revocation_date self._key_hash_algo = 'sha1' @@ -174,7 +183,7 @@ def nonce(self, value): The nonce that was provided during the request. """ - if not isinstance(value, bytes): + if not isinstance(value, byte_cls): raise TypeError(_pretty_message( ''' nonce must be a byte string, not %s @@ -485,10 +494,10 @@ def validate(self): time = datetime(2018, 1, 1, 1, 00, 00, 00, timezone.utc) if self._fault == FAULT_REVOKED: return (CertificateStatus.revoked, time) - if self._fault == FAULT_UNKNOWN: + elif self._fault == FAULT_UNKNOWN: return (CertificateStatus.unknown, None) - if self._fault is not None: - raise NotImplementedError('Fault type could not be found') + elif self._fault != None: + raise NotImplemented('Fault type could not be found') return (CertificateStatus.good, time) def _build_ocsp_response(self, ocsp_request: OCSPRequest) -> OCSPResponse: @@ -500,7 +509,7 @@ def _build_ocsp_response(self, ocsp_request: OCSPRequest) -> OCSPResponse: request_list = tbs_request['request_list'] if len(request_list) < 1: logger.warning('Received OCSP request with no requests') - raise NotImplementedError('Empty requests not supported') + raise NotImplemented('Empty requests not supported') single_request = request_list[0] # TODO: Support more than one request req_cert = single_request['req_cert'] @@ -516,7 +525,11 @@ def _build_ocsp_response(self, ocsp_request: OCSPRequest) -> OCSPResponse: certificate_status_list = [(serial, certificate_status.value)] # Build the response - builder = OCSPResponseBuilder(response_status=ResponseStatus.successful.value, certificate_status_list=certificate_status_list, revocation_date=revocation_date) + builder = OCSPResponseBuilder(**{ + 'response_status': ResponseStatus.successful.value, + 'certificate_status_list': certificate_status_list, + 'revocation_date': revocation_date, + }) # Parse extensions for extension in tbs_request['request_extensions']: @@ -544,7 +557,7 @@ def _build_ocsp_response(self, ocsp_request: OCSPRequest) -> OCSPResponse: return self._fail(ResponseStatus.internal_error) # If it's an unknown non-critical extension, we can safely ignore it. - if unknown is True: + elif unknown is True: logger.info('Ignored unknown non-critical extension: %r', dict(extension.native)) # Set certificate issuer diff --git a/.evergreen/ocsp/ocsp_mock.py b/.evergreen/ocsp/ocsp_mock.py index 532cdfb5..2ca45dfa 100755 --- a/.evergreen/ocsp/ocsp_mock.py +++ b/.evergreen/ocsp/ocsp_mock.py @@ -5,14 +5,13 @@ import argparse import logging -import os import sys +import os sys.path.append(os.path.join(os.getcwd() ,'src', 'third_party', 'mock_ocsp_responder')) import mock_ocsp_responder - def main(): """Main entry point""" parser = argparse.ArgumentParser(description="MongoDB Mock OCSP Responder.") diff --git a/.evergreen/secrets_handling/setup_secrets.py b/.evergreen/secrets_handling/setup_secrets.py index 7f5cff3e..40515e93 100644 --- a/.evergreen/secrets_handling/setup_secrets.py +++ b/.evergreen/secrets_handling/setup_secrets.py @@ -1,3 +1,4 @@ +#!/usr/bin/env python3 """ Script for fetching AWS Secrets Vault secrets for use in testing. """ @@ -27,7 +28,7 @@ def get_secrets(vaults, region, profile): resp = client.assume_role(RoleArn=AWS_ROLE_ARN, RoleSessionName=str(uuid.uuid4())) except Exception as e: print(e) - raise ValueError("Please provide a profile (typically using AWS_PROFILE)") from e + raise ValueError("Please provide a profile (typically using AWS_PROFILE)") creds = resp['Credentials'] diff --git a/.evergreen/socks5srv.py b/.evergreen/socks5srv.py index 1e0448b0..8adaff6d 100755 --- a/.evergreen/socks5srv.py +++ b/.evergreen/socks5srv.py @@ -1,9 +1,9 @@ #!/usr/bin/env python3 -import argparse -import re -import select -import socket import socketserver +import socket +import select +import re +import argparse # Usage: python3 socks5srv.py --port port [--auth username:password] [--map 'host:port to host:port' ...] @@ -34,7 +34,7 @@ def parse_single_mapping(string): match = re.match(full_re, string) if match is None: - raise Exception(f"Mapping {string} does not match format '{{host}}:{{port}} to {{host}}:{{port}}'") + raise Exception("Mapping {} does not match format '{{host}}:{{port}} to {{host}}:{{port}}'".format(string)) src = ((match.group('src_ipv6') or match.group('src_host')).encode('utf8'), int(match.group('src_port'))) dst = ((match.group('dst_ipv6') or match.group('dst_host')).encode('utf8'), int(match.group('dst_port'))) @@ -109,7 +109,7 @@ def read_exact(self, n): while bytes_read < n: try: chunk_length = self.request.recv_into(mv[bytes_read:]) - except OSError: + except OSError as exc: return None if chunk_length == 0: return None @@ -125,11 +125,11 @@ def create_outgoing_tcp_connection(self, dst, port): af, socktype, proto, canonname, sa = res try: outgoing = socket.socket(af, socktype, proto) - except OSError: + except OSError as msg: continue try: outgoing.connect(sa) - except OSError: + except OSError as msg: outgoing.close() continue break @@ -228,7 +228,7 @@ def raw_proxy(self, a, b): while True: try: (readable, _, _) = select.select([a, b], [], []) - except (OSError, ValueError): + except (select.error, ValueError): return if not readable: diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 872a3cc4..30da2c0a 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -1,9 +1,5 @@ name: "Tests" -concurrency: - group: ${{ github.workflow }}-${{ github.ref }} - cancel-in-progress: true - on: pull_request: branches: diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 9cc3562b..eb00664e 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -49,13 +49,6 @@ repos: ) args: ["-L", "fle,aks"] -- repo: https://github.com/astral-sh/ruff-pre-commit - rev: "v0.7.0" - hooks: - - id: ruff - args: ["--fix", "--show-fixes"] - # - id: ruff-format - - repo: local hooks: - id: executable-shell diff --git a/evergreen_config_generator/evergreen_config_generator/__init__.py b/evergreen_config_generator/evergreen_config_generator/__init__.py index 51d9745d..7d32e3ec 100644 --- a/evergreen_config_generator/evergreen_config_generator/__init__.py +++ b/evergreen_config_generator/evergreen_config_generator/__init__.py @@ -25,7 +25,7 @@ raise -class ConfigObject: +class ConfigObject(object): def __init__(self, *args, **kwargs): super(ConfigObject, self).__init__() @@ -60,7 +60,7 @@ def __init__(self, *args, **kwargs): type(self).represent_config_object) def represent_scalar(self, tag, value, style=None): - if isinstance(value, str) and '\n' in value: + if isinstance(value, (str, unicode)) and '\n' in value: style = '|' return super(_Dumper, self).represent_scalar(tag, value, style) diff --git a/evergreen_config_generator/evergreen_config_generator/tasks.py b/evergreen_config_generator/evergreen_config_generator/tasks.py index e43afc65..04e1da47 100644 --- a/evergreen_config_generator/evergreen_config_generator/tasks.py +++ b/evergreen_config_generator/evergreen_config_generator/tasks.py @@ -18,7 +18,7 @@ try: # Python 3 abstract base classes. - from collections import abc + import collections.abc as abc except ImportError: import collections as abc diff --git a/ruff.toml b/ruff.toml deleted file mode 100644 index fdee662c..00000000 --- a/ruff.toml +++ /dev/null @@ -1,43 +0,0 @@ -target-version = "py39" - -exclude = [".evergreen/csfle/bottle.py"] - -[lint] -extend-select = [ - "B", # flake8-bugbear - "EXE", # flake8-executable - "F", # pyflakes - "FURB", # refurb - "I", # isort - "ICN", # flake8-import-conventions - "PGH", # pygrep-hooks - "PIE", # flake8-pie - "PL", # pylint - "PT", # flake8-pytest-style - "RET", # flake8-return - "RUF", # Ruff-specific - "UP", # pyupgrade - "YTT", # flake8-2020 -] -ignore = [ - "ISC001", # Conflicts with formatter - "PLR09", # Too many <...> - "PLR2004", # Magic value used in comparison - "UP008", # Use `super()` instead of `super(__class__, self)` - "ARG002", # Unused method argument: `kwargs` - "PTH123", # `open()` should be replaced by `Path.open()` - "B007", # Loop control variable `canonname` not used within loop body" - "UP031", # Use format specifiers instead of percent format - "PGH003", # Use specific rule codes when ignoring type issues" - "PLR1704", # Redefining argument with the local name" - "RUF012", # Mutable class attributes should be annotated with `typing.ClassVar` - "UP014", # Convert `DownloadableComponent` from `NamedTuple` functional to class syntax" - "RET503", # Missing explicit `return` at the end of function able to return non-`None` value - "E402", # Module level import not at top of file -] -unfixable = ["F401"] - -[lint.per-file-ignores] -".evergreen/ocsp/mock_ocsp_responder.py" = ["PLW"] -".evergreen/csfle/kms_*.py" = ["PLW"] -".evergreen/csfle/gcpkms/mock_server.py" = ["PLW"]