From cfdb33afac3173199e9d23b7fcd6715598cbd344 Mon Sep 17 00:00:00 2001 From: Thomas Schaffter Date: Wed, 22 Jan 2025 15:17:48 -0800 Subject: [PATCH 01/10] chore: fix the start and end date of challenge id 535 (CHALLENGE-597) (#2974) --- .../challenge-service/src/main/resources/db/challenges.csv | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apps/openchallenges/challenge-service/src/main/resources/db/challenges.csv b/apps/openchallenges/challenge-service/src/main/resources/db/challenges.csv index b32cec8a0..9c8a9ea56 100644 --- a/apps/openchallenges/challenge-service/src/main/resources/db/challenges.csv +++ b/apps/openchallenges/challenge-service/src/main/resources/db/challenges.csv @@ -533,4 +533,4 @@ "532","rsna-2024-lumbar-spine-degenerative-classification","RSNA 2024 Lumbar Spine Degenerative Classification","Classify lumbar spine degenerative conditions","Low back pain is the leading cause of disability worldwide, according to the World Health Organization, affecting 619 million people in 2020. Most people experience low back pain at some point in their lives, with the frequency increasing with age. Pain and restricted mobility are often symptoms of spondylosis, a set of degenerative spine conditions including degeneration of intervertebral discs and subsequent narrowing of the spinal canal (spinal stenosis), subarticular recesses, or neural foramen with associated compression or irritations of the nerves in the low back. Magnetic resonance imaging (MRI) provides a detailed view of the lumbar spine vertebra, discs and nerves, enabling radiologists to assess the presence and severity of these conditions. Proper diagnosis and grading of these conditions help guide treatment and potential surgery to help alleviate back pain and improve overall health and quality of life for patients. RSNA has teamed with the American Society of Neur...","","https://www.kaggle.com/competitions/rsna-2024-lumbar-spine-degenerative-classification","completed","8","","2024-05-16","2024-10-08","2648","2024-12-09 17:12:16","2024-12-09 17:12:24" "533","leap-atmospheric-physics-ai-climsim","LEAP - Atmospheric Physics using AI (ClimSim)","Simulate higher resolution atmospheric processes within E3SM-MMF.","Climate models are essential to understanding Earth''s climate system. Because of the complexity of Earth''s climate, these models rely on parameterizations to approximate the effects of physical processes that occur at scales smaller than the size of their grid cells. These approximations are imperfect, however, and their imperfections are a leading source of uncertainty in expected warming, changing precipitation patterns, and the frequency and severity of extreme events. The Multi-scale Modeling Framework (MMF) approach, by contrast, more explicitly represents these subgrid processes, but at a cost too high to be used for operational climate prediction. Your task is to develop ML models that emulate subgrid atmospheric processes–such as storms, clouds, turbulence, rainfall, and radiation–within E3SM-MMF, a multi-scale climate model backed by the U.S. Department of Energy. Because ML emulators are significantly cheaper to inference than MMF, progress on this front can help scie...","","https://www.kaggle.com/competitions/leap-atmospheric-physics-ai-climsim","completed","8","","2024-04-18","2024-07-15","2648","2024-12-09 18:18:19","2024-12-09 18:18:25" "534","owkin-and-servier-ai-hackathon-for-glioblastoma-research","Owkin & Servier AI Hackathon for Glioblastoma Research","Join the hackathon to advance glioblastoma research through AI","Join the hackathon to advance glioblastoma research through the use of AI and multimodal patient data","","https://www.owkin.com/connect/glioblastoma-ai-hackathon","upcoming","\N","","2025-02-03","2025-02-04","2944","2024-12-18 18:43:47","2024-12-18 18:47:44" -"535","deep-learning-epilepsy-detection-challenge","Deep Learning Epilepsy Detection Challenge","","Develop proof-of-concept for a seizure detection system that is sensitive, automated, patient-specific, and tunable to maximise sensitivity while minimizing human annotation times using custom data preparation methods, deep learning analytics and electroencephalography (EEG) data.","","","completed","\N","10.1016/j.ebiom.2021.103275","2020","2020","794","2025-01-13 19:18:38","2025-01-13 19:33:44" +"535","deep-learning-epilepsy-detection-challenge","Deep Learning Epilepsy Detection Challenge","","Develop proof-of-concept for a seizure detection system that is sensitive, automated, patient-specific, and tunable to maximise sensitivity while minimizing human annotation times using custom data preparation methods, deep learning analytics and electroencephalography (EEG) data.","","","completed","\N","10.1016/j.ebiom.2021.103275","2020-01-01","2020-01-01","794","2025-01-13 19:18:38","2025-01-13 19:33:44" From 05c99b23adb0e2bb1fdb53f6384475fc18379fa6 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Wed, 22 Jan 2025 16:12:56 -0800 Subject: [PATCH 02/10] chore(openchallenges): 2025-01-23 DB update (#2976) Co-authored-by: vpchung <9377970+vpchung@users.noreply.github.com> --- .../challenge-service/src/main/resources/db/challenges.csv | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apps/openchallenges/challenge-service/src/main/resources/db/challenges.csv b/apps/openchallenges/challenge-service/src/main/resources/db/challenges.csv index 9c8a9ea56..683e4b7b5 100644 --- a/apps/openchallenges/challenge-service/src/main/resources/db/challenges.csv +++ b/apps/openchallenges/challenge-service/src/main/resources/db/challenges.csv @@ -533,4 +533,4 @@ "532","rsna-2024-lumbar-spine-degenerative-classification","RSNA 2024 Lumbar Spine Degenerative Classification","Classify lumbar spine degenerative conditions","Low back pain is the leading cause of disability worldwide, according to the World Health Organization, affecting 619 million people in 2020. Most people experience low back pain at some point in their lives, with the frequency increasing with age. Pain and restricted mobility are often symptoms of spondylosis, a set of degenerative spine conditions including degeneration of intervertebral discs and subsequent narrowing of the spinal canal (spinal stenosis), subarticular recesses, or neural foramen with associated compression or irritations of the nerves in the low back. Magnetic resonance imaging (MRI) provides a detailed view of the lumbar spine vertebra, discs and nerves, enabling radiologists to assess the presence and severity of these conditions. Proper diagnosis and grading of these conditions help guide treatment and potential surgery to help alleviate back pain and improve overall health and quality of life for patients. RSNA has teamed with the American Society of Neur...","","https://www.kaggle.com/competitions/rsna-2024-lumbar-spine-degenerative-classification","completed","8","","2024-05-16","2024-10-08","2648","2024-12-09 17:12:16","2024-12-09 17:12:24" "533","leap-atmospheric-physics-ai-climsim","LEAP - Atmospheric Physics using AI (ClimSim)","Simulate higher resolution atmospheric processes within E3SM-MMF.","Climate models are essential to understanding Earth''s climate system. Because of the complexity of Earth''s climate, these models rely on parameterizations to approximate the effects of physical processes that occur at scales smaller than the size of their grid cells. These approximations are imperfect, however, and their imperfections are a leading source of uncertainty in expected warming, changing precipitation patterns, and the frequency and severity of extreme events. The Multi-scale Modeling Framework (MMF) approach, by contrast, more explicitly represents these subgrid processes, but at a cost too high to be used for operational climate prediction. Your task is to develop ML models that emulate subgrid atmospheric processes–such as storms, clouds, turbulence, rainfall, and radiation–within E3SM-MMF, a multi-scale climate model backed by the U.S. Department of Energy. Because ML emulators are significantly cheaper to inference than MMF, progress on this front can help scie...","","https://www.kaggle.com/competitions/leap-atmospheric-physics-ai-climsim","completed","8","","2024-04-18","2024-07-15","2648","2024-12-09 18:18:19","2024-12-09 18:18:25" "534","owkin-and-servier-ai-hackathon-for-glioblastoma-research","Owkin & Servier AI Hackathon for Glioblastoma Research","Join the hackathon to advance glioblastoma research through AI","Join the hackathon to advance glioblastoma research through the use of AI and multimodal patient data","","https://www.owkin.com/connect/glioblastoma-ai-hackathon","upcoming","\N","","2025-02-03","2025-02-04","2944","2024-12-18 18:43:47","2024-12-18 18:47:44" -"535","deep-learning-epilepsy-detection-challenge","Deep Learning Epilepsy Detection Challenge","","Develop proof-of-concept for a seizure detection system that is sensitive, automated, patient-specific, and tunable to maximise sensitivity while minimizing human annotation times using custom data preparation methods, deep learning analytics and electroencephalography (EEG) data.","","","completed","\N","10.1016/j.ebiom.2021.103275","2020-01-01","2020-01-01","794","2025-01-13 19:18:38","2025-01-13 19:33:44" +"535","deep-learning-epilepsy-detection-challenge","Deep Learning Epilepsy Detection Challenge","","Develop proof-of-concept for a seizure detection system that is sensitive, automated, patient-specific, and tunable to maximise sensitivity while minimizing human annotation times using custom data preparation methods, deep learning analytics and electroencephalography (EEG) data.","","","completed","\N","10.1016/j.ebiom.2021.103275","2020-01-01","2020-01-01","794","2025-01-13 19:18:38","2025-01-22 23:02:38" From 5a2469d40b210b9de45f7600c945c42b0e8fe308 Mon Sep 17 00:00:00 2001 From: Thomas Schaffter Date: Thu, 23 Jan 2025 10:35:26 -0800 Subject: [PATCH 03/10] chore: remove the deprecated dev container source and workflow (#2977) --- .../workflows/build-devcontainer-image.yml | 87 -------- .../sage/.devcontainer/Dockerfile | 187 ------------------ .../sage/.devcontainer/devcontainer.json | 13 -- tools/devcontainers/sage/README.md | 25 --- 4 files changed, 312 deletions(-) delete mode 100644 .github/workflows/build-devcontainer-image.yml delete mode 100644 tools/devcontainers/sage/.devcontainer/Dockerfile delete mode 100644 tools/devcontainers/sage/.devcontainer/devcontainer.json delete mode 100644 tools/devcontainers/sage/README.md diff --git a/.github/workflows/build-devcontainer-image.yml b/.github/workflows/build-devcontainer-image.yml deleted file mode 100644 index 374925428..000000000 --- a/.github/workflows/build-devcontainer-image.yml +++ /dev/null @@ -1,87 +0,0 @@ -name: Build Dev Container Image -on: - push: - branches: - - 'main' - paths: - - '.github/workflows/build-devcontainer-image.yml' - - 'tools/devcontainers/sage/**/devcontainer.json' - - 'tools/devcontainers/sage/**/Dockerfile' - pull_request: - paths: - - '.github/workflows/build-devcontainer-image.yml' - - 'tools/devcontainers/sage/**/devcontainer.json' - - 'tools/devcontainers/sage/**/Dockerfile' - -env: - IMAGE_REPOSITORY: ghcr.io/sage-bionetworks/sage-devcontainer - DEFAULT_BRANCH: main - -jobs: - devcontainer: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v3 - - - name: Lint Dockerfile - uses: docker://hadolint/hadolint:latest - with: - entrypoint: hadolint - args: tools/devcontainers/sage/.devcontainer/Dockerfile - - - name: Set up QEMU - uses: docker/setup-qemu-action@v2 - - - name: Cache Docker layers - uses: actions/cache@v3 - with: - path: /tmp/.buildx-cache - key: ${{ runner.os }}-single-buildx-${{ github.sha }} - restore-keys: | - ${{ runner.os }}-single-buildx - - - name: Set buildx as the default builder - run: docker buildx install - - - name: Prepare the image build - id: prep - run: | - DEVCONTAINER_VERSION="${GITHUB_SHA::7}" - IMAGE_NAME="${{ env.IMAGE_REPOSITORY }}" - echo "image_name=${IMAGE_NAME}" >> $GITHUB_OUTPUT - echo "devcontainer_version=${DEVCONTAINER_VERSION}" >> $GITHUB_OUTPUT - - - name: Build the image - run: | - WORKSPACE_FOLDER="tools/devcontainers/sage" - IMAGE_NAME=${{ steps.prep.outputs.image_name }} - IMAGE_TAG=${{ steps.prep.outputs.devcontainer_version }} - - # [Optional] Enable buildkit, set output to plain text for logging - export DOCKER_BUILDKIT=1 - export BUILDKIT_PROGRESS=plain - - # Export the devcontainer version, which will be set inside the image. See - # devcontainer.json used to build the image. - export DEVCONTAINER_VERSION=${{ steps.prep.outputs.devcontainer_version }} - - # Build the image - npm install -g @devcontainers/cli@0.69.0 - devcontainer build \ - --image-name "${IMAGE_NAME}:${IMAGE_TAG}" \ - --workspace-folder "${WORKSPACE_FOLDER}" - - - name: Login to GitHub Container Registry - if: ${{ github.event_name != 'pull_request' }} - uses: docker/login-action@v2 - with: - registry: ghcr.io - username: ${{ github.actor }} - password: ${{ secrets.GITHUB_TOKEN }} - - - name: Push the image to GitHub Container Registry - if: ${{ github.event_name != 'pull_request' }} - run: | - IMAGE_NAME=${{ steps.prep.outputs.image_name }} - IMAGE_TAG=${{ steps.prep.outputs.devcontainer_version }} - docker push "${IMAGE_NAME}:${IMAGE_TAG}" diff --git a/tools/devcontainers/sage/.devcontainer/Dockerfile b/tools/devcontainers/sage/.devcontainer/Dockerfile deleted file mode 100644 index d976c3f78..000000000 --- a/tools/devcontainers/sage/.devcontainer/Dockerfile +++ /dev/null @@ -1,187 +0,0 @@ -# Base image -ARG imageVersion=jammy-20240808 -FROM ubuntu:$imageVersion - -# Set safer bash scripts -SHELL ["/bin/bash", "-euxo", "pipefail", "-c"] - -# List of build arguments -# https://github.com/nektos/act -ARG actVersion="0.2.65" -# https://github.com/sharkdp/hyperfine -ARG hyperfineVersion="1.18.0" -# https://www.npmjs.com/package/@devcontainers/cli -ARG devcontainerCliVersion="0.69.0" -# https://pypi.org/project/poetry -ARG poetryVersion="1.8.3" -# https://docs.posit.co/resources/install-r/#specify-r-version -ARG rVersion="4.2.3" -# https://aquasecurity.github.io/trivy -ARG trivyVersion="0.54.1" -# https://github.com/rstudio/renv -ARG renvVersion="1.0.0" -# https://nodejs.org/en/about/previous-releases -ARG nodeVersionMajor="20" -# https://pypi.org/project/pipenv/ -ARG pipenvVersion="2024.0.1" -# https://github.com/pnpm/pnpm/releases -ARG pnpmVersion="9.9.0" -# List of Python versions separated by spaces -ARG pyenvPythonVersions="3.9.2 3.10.14 3.11.8" -# https://github.com/SonarSource/sonar-scanner-cli/releases -ARG sonarScannerVersion="5.0.1.3006" -# https://github.com/hadolint/hadolint -ARG hadolintVersion="2.12.0" -# The version of this dev container image -ARG devcontainerVersion="" -# The username of the non-root user -ARG user="vscode" - -# Set environment variables -ENV DEVCONTAINER_VERSION=${devcontainerVersion} \ - LANG=en_US.UTF-8 \ - LC_ALL=en_US.UTF-8 - -# Install system packages -RUN apt-get update -qq -y && export DEBIAN_FRONTEND=noninteractive \ - && apt-get install --no-install-recommends -qq -y \ - ca-certificates curl git bash-completion gnupg2 lsb-release ssh sudo \ - python3-pip python3-dev python-is-python3 openjdk-17-jdk \ - htop unzip vim wget lsof iproute2 build-essential \ - kafkacat jq ca-certificates-java gdebi-core \ - # Required by AWS CLI - mandoc \ - # Required for setting up locales - locales \ - # Required by pyenv - make build-essential libssl-dev zlib1g-dev libbz2-dev \ - libreadline-dev libsqlite3-dev wget curl llvm libncurses5-dev libncursesw5-dev \ - xz-utils tk-dev libffi-dev liblzma-dev \ - # Required by Hadolint - shellcheck \ - # Add Node.js repository - && curl -fsSL https://deb.nodesource.com/setup_${nodeVersionMajor}.x -o nodesource_setup.sh \ - && bash nodesource_setup.sh \ - # Add GitHub CLI repository - && curl -fsSL https://cli.github.com/packages/githubcli-archive-keyring.gpg | \ - gpg --dearmor -o /usr/share/keyrings/githubcli-archive-keyring.gpg \ - && echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/githubcli-archive-keyring.gpg] https://cli.github.com/packages stable main" | \ - tee /etc/apt/sources.list.d/github-cli.list > /dev/null \ - # Add ngrok repository - && curl -fsSL https://ngrok-agent.s3.amazonaws.com/ngrok.asc | tee /etc/apt/trusted.gpg.d/ngrok.asc >/dev/null \ - && echo "deb https://ngrok-agent.s3.amazonaws.com bullseye main" | tee /etc/apt/sources.list.d/ngrok.list \ - # Add hashicorp repository - && curl -fsSL https://apt.releases.hashicorp.com/gpg | apt-key add - \ - && echo "deb [arch=amd64] https://apt.releases.hashicorp.com $(lsb_release -cs) main" | tee /etc/apt/sources.list.d/hashicorp.list \ - # Install additional packages - && apt-get update -qq -y \ - && apt-get install --no-install-recommends -qq -y nodejs gh ngrok terraform vault \ - # Enable corepack - && corepack enable \ - # Fix Vault CLI - # See https://github.com/hashicorp/vault/issues/10924 - && setcap -r /usr/bin/vault \ - # Set up UTF-8 locale - && echo "en_US.UTF-8 UTF-8" > /etc/locale.gen && locale-gen \ - # Install R (must be done before clearing the apt cache) - && curl -fsSL "https://cdn.rstudio.com/r/ubuntu-2204/pkgs/r-${rVersion}_1_amd64.deb" -o /tmp/r_amd64.deb \ - && gdebi --non-interactive /tmp/r_amd64.deb \ - && rm -fr /tmp/r_amd64.deb \ - && ln -s /opt/R/${rVersion}/bin/R /usr/local/bin/R \ - && ln -s /opt/R/${rVersion}/bin/Rscript /usr/local/bin/Rscript \ - && R -e "options(repos = c(POSIT = \"https://packagemanager.posit.co/all/__linux__/jammy/latest\", CRAN = \"https://mirror.las.iastate.edu/CRAN\")); install.packages(\"renv\", version = \"${renvVersion}\")" \ - # Cleanup - && apt-get -y autoclean \ - && apt-get -y autoremove \ - && rm -rf /var/lib/apt/lists/* - -# Install Poetry -RUN curl -fsSL https://install.python-poetry.org | POETRY_HOME=/etc/poetry python3 - --version "${poetryVersion}" \ - && ln -s /etc/poetry/bin/poetry /usr/local/bin/. - -# Install Trivy -RUN curl -fsSL "https://github.com/aquasecurity/trivy/releases/download/v${trivyVersion}/trivy_${trivyVersion}_Linux-64bit.deb" -o /tmp/trivy.deb \ - && dpkg -i /tmp/trivy.deb \ - && rm -fr /tmp/trivy.deb - -# Install act -RUN curl -fsSL "https://raw.githubusercontent.com/nektos/act/v${actVersion}/install.sh" | bash - - -# Install AWS CLI -RUN curl -fsSL https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip -o awscliv2.zip \ - && unzip awscliv2.zip \ - && ./aws/install \ - && rm -fr awscliv2.zip ./aws \ - # Add AWS Session Manager plugin - && curl -fsSL "https://s3.amazonaws.com/session-manager-downloads/plugin/latest/ubuntu_64bit/session-manager-plugin.deb" -o /tmp/session-manager-plugin.deb \ - && dpkg -i /tmp/session-manager-plugin.deb \ - && rm -fr /tmp/session-manager-plugin.deb - -# Install AWS SAM CLI -RUN curl -Lo aws-sam-cli-linux-x86_64.zip https://github.com/aws/aws-sam-cli/releases/latest/download/aws-sam-cli-linux-x86_64.zip \ - && unzip aws-sam-cli-linux-x86_64.zip -d sam-installation \ - && ./sam-installation/install \ - && rm -rf aws-sam-cli-linux-x86_64.zip sam-installation \ - && sam --version - -# Install the devcontainer CLI -RUN npm install -g "@devcontainers/cli@${devcontainerCliVersion}" - -# Install Hadolint -RUN curl -fsSL https://github.com/hadolint/hadolint/releases/download/v${hadolintVersion}/hadolint-Linux-x86_64 -o hadolint \ - && mv hadolint /usr/local/bin/. \ - && chmod +x /usr/local/bin/hadolint - -# Install hyperfine -RUN curl -fsSL "https://github.com/sharkdp/hyperfine/releases/download/v${hyperfineVersion}/hyperfine_${hyperfineVersion}_amd64.deb" \ - -o /tmp/hyperfine.deb \ - && apt-get install --no-install-recommends -qq -y /tmp/hyperfine.deb \ - && rm -fr /tmp/hyperfine.deb - -# Create non-root user -RUN useradd -m $user \ - && echo "$user:$user" | chpasswd \ - && groupadd docker \ - && usermod -a --groups docker,sudo $user \ - && usermod --shell /bin/bash $user \ - && echo '%sudo ALL=(ALL) NOPASSWD:ALL' >> /etc/sudoers \ - && printf "%s\n" \ - "" \ - "#export PYENV_ROOT=\"\$HOME/.pyenv\"" \ - "#export PATH=\"\$PYENV_ROOT/bin:\$PATH\"" \ - "eval \"\$(pyenv init --path)\"" \ - "eval \"\$(pyenv virtualenv-init -)\"" \ - "" \ - "# source dev-env.sh if found in the current directory" \ - "if [ -f dev-env.sh ]; then" \ - " . ./dev-env.sh" \ - " workspace-initialize-env" \ - "fi" \ - "" | tee -a "/home/$user/.bashrc" - -# Install SonarScanner CLI -ARG SONAR_SCANNER_HOME=/opt/sonar-scanner -ENV SONAR_SCANNER_HOME=${SONAR_SCANNER_HOME} \ - SONAR_USER_HOME=${SONAR_SCANNER_HOME}/.sonar \ - PATH=${SONAR_SCANNER_HOME}/bin:${PATH} -RUN curl -fsSL https://binaries.sonarsource.com/Distribution/sonar-scanner-cli/sonar-scanner-cli-${sonarScannerVersion}.zip --output sonar-scanner-cli.zip \ - && unzip sonar-scanner-cli.zip \ - && mv sonar-scanner-${sonarScannerVersion} ${SONAR_SCANNER_HOME} \ - && mkdir -p "${SONAR_USER_HOME}" "${SONAR_USER_HOME}/cache" \ - && chown -R ${user}:${user} "${SONAR_SCANNER_HOME}" \ - && chmod -R 777 "${SONAR_USER_HOME}" - -# Switch to non-root user -USER $user - -# Install the global version of pnpm -RUN corepack install --global "pnpm@${pnpmVersion}" - -# Set up Python environments -ENV PYENV_ROOT /home/${user}/.pyenv -ENV PATH $PYENV_ROOT/shims:$PYENV_ROOT/bin:$PATH -RUN pip install --no-cache-dir pipenv==${pipenvVersion} \ - && curl -fsSL https://raw.githubusercontent.com/pyenv/pyenv-installer/master/bin/pyenv-installer | bash - \ - && pyenv install ${pyenvPythonVersions} - -CMD ["bash"] \ No newline at end of file diff --git a/tools/devcontainers/sage/.devcontainer/devcontainer.json b/tools/devcontainers/sage/.devcontainer/devcontainer.json deleted file mode 100644 index 0c2245a4c..000000000 --- a/tools/devcontainers/sage/.devcontainer/devcontainer.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "name": "Sage Dev Container (Test)", - "build": { - "dockerfile": "Dockerfile", - "args": { - "imageVersion": "jammy-20240808", - "devcontainerVersion": "${localEnv:DEVCONTAINER_VERSION}" - } - }, - "remoteUser": "vscode", - "shutdownAction": "stopContainer", - "runArgs": ["--name", "sage_devcontainer_test"] -} diff --git a/tools/devcontainers/sage/README.md b/tools/devcontainers/sage/README.md deleted file mode 100644 index efa72f600..000000000 --- a/tools/devcontainers/sage/README.md +++ /dev/null @@ -1,25 +0,0 @@ -# Sage Dev Container - -Learn how to build and run dev containers with the devcontainer CLI: - -https://code.visualstudio.com/docs/remote/devcontainer-cli - -# Build the image with dev container - -```console -devcontainer build \ - --image-name ghcr.io/sage-bionetworks/sage-devcontainer:test \ - --workspace-folder ../sage -``` - -# Start the dev container - -```console -devcontainer up --workspace-folder ../sage -``` - -# Step into the dev container - -```console -docker exec -it sage_devcontainer_test bash -``` From c3ca7cc91d4f1c9b5c20393303469c1120289c97 Mon Sep 17 00:00:00 2001 From: Thomas Schaffter Date: Mon, 27 Jan 2025 09:49:40 -0800 Subject: [PATCH 04/10] chore(schematic): remove Schematic projects and configuration (#2978) --- .devcontainer/devcontainer.json | 20 +- .github/CODEOWNERS | 5 - .github/ISSUE_TEMPLATE/1-bug.yml | 1 - .github/ISSUE_TEMPLATE/2-feature.yml | 1 - .github/ISSUE_TEMPLATE/3-documentation.yml | 1 - .github/ISSUE_TEMPLATE/4-story.yml | 1 - .github/workflows/lint-dockerfiles.yml | 1 - .github/workflows/lint-pr.yml | 1 - .github/workflows/schematic-api-ci.yml | 140 - .github/workflows/sonar-scan-push.yml | 1 - .stylelintignore | 3 - CODE_QUALITY.md | 4 +- README.md | 3 +- apps/iatlas/api/install.sh | 2 +- apps/iatlas/data/install.sh | 2 +- .../data-lambda/.python-version | 2 +- apps/openchallenges/data-lambda/poetry.lock | 24 +- .../openchallenges/data-lambda/pyproject.toml | 2 +- apps/schematic/api-docs/.env.example | 1 - apps/schematic/api-docs/Dockerfile | 6 - apps/schematic/api-docs/README.md | 3 - apps/schematic/api-docs/index.hbs | 20 - apps/schematic/api-docs/project.json | 55 - apps/schematic/api-docs/redocly.yaml | 11 - apps/schematic/api/.dockerignore | 72 - apps/schematic/api/.env.example | 4 - apps/schematic/api/.gitignore | 85 - apps/schematic/api/.openapi-generator-ignore | 41 - apps/schematic/api/.openapi-generator/FILES | 52 - apps/schematic/api/.openapi-generator/VERSION | 1 - apps/schematic/api/AUTHORS.md | 11 - apps/schematic/api/Dockerfile | 82 - apps/schematic/api/README.md | 54 - apps/schematic/api/certificate.conf | 28 - apps/schematic/api/debug_key_cert.py | 76 - apps/schematic/api/default_config.yaml | 4 - apps/schematic/api/dhparam.pem | 13 - apps/schematic/api/docker-compose.yml | 24 - apps/schematic/api/docker-entrypoint.sh | 9 - apps/schematic/api/openapitools.json | 20 - apps/schematic/api/poetry.lock | 4737 ----------------- apps/schematic/api/poetry.toml | 3 - apps/schematic/api/prepare-python.sh | 13 - apps/schematic/api/prepare_key_certificate.py | 60 - apps/schematic/api/project.json | 116 - apps/schematic/api/pyproject.toml | 37 - apps/schematic/api/pytest.ini | 4 - apps/schematic/api/redirect.conf | 11 - apps/schematic/api/save_key_certificate.py | 20 - apps/schematic/api/schematic_api/__init__.py | 0 apps/schematic/api/schematic_api/__main__.py | 24 - .../api/schematic_api/controllers/__init__.py | 0 .../manifest_generation_controller.py | 100 - .../manifest_generation_controller_impl.py | 185 - .../manifest_validation_controller.py | 184 - .../manifest_validation_controller_impl.py | 373 -- .../api/schematic_api/controllers/paging.py | 97 - .../controllers/schema_controller.py | 307 -- .../controllers/schema_controller_impl.py | 568 -- .../controllers/security_controller_.py | 15 - .../controllers/storage_controller.py | 348 -- .../controllers/storage_controller_impl.py | 671 --- .../controllers/tangled_tree_controller.py | 53 - .../tangled_tree_controller_impl.py | 88 - .../api/schematic_api/controllers/utils.py | 319 -- .../controllers/versions_controller.py | 20 - .../controllers/versions_controller_impl.py | 15 - apps/schematic/api/schematic_api/encoder.py | 20 - .../api/schematic_api/models/__init__.py | 45 - .../api/schematic_api/models/asset_type.py | 38 - .../api/schematic_api/models/base_model_.py | 73 - .../api/schematic_api/models/basic_error.py | 153 - .../models/component_requirement_array.py | 62 - .../models/component_requirement_graph.py | 71 - .../models/component_requirement_subgraph.py | 96 - .../models/connected_node_pair.py | 96 - .../models/connected_node_pair_array.py | 65 - .../models/connected_node_pair_page.py | 274 - .../models/connected_node_pair_page_all_of.py | 69 - .../schematic_api/models/dataset_metadata.py | 94 - .../models/dataset_metadata_array.py | 65 - .../models/dataset_metadata_page.py | 274 - .../models/dataset_metadata_page_all_of.py | 69 - .../api/schematic_api/models/file_metadata.py | 94 - .../models/file_metadata_array.py | 65 - .../models/file_metadata_page.py | 274 - .../models/file_metadata_page_all_of.py | 69 - .../models/google_sheet_links.py | 60 - .../schematic_api/models/manifest_metadata.py | 191 - .../models/manifest_metadata_array.py | 65 - .../models/manifest_metadata_page.py | 274 - .../models/manifest_metadata_page_all_of.py | 69 - .../models/manifest_validation_result.py | 88 - .../api/schematic_api/models/node.py | 66 - .../api/schematic_api/models/node_array.py | 65 - .../api/schematic_api/models/node_page.py | 274 - .../schematic_api/models/node_page_all_of.py | 69 - .../models/node_property_array.py | 62 - .../api/schematic_api/models/page_metadata.py | 238 - .../schematic_api/models/project_metadata.py | 94 - .../models/project_metadata_array.py | 65 - .../models/project_metadata_page.py | 274 - .../models/project_metadata_page_all_of.py | 69 - .../schematic_api/models/validation_rule.py | 66 - .../models/validation_rule_array.py | 65 - apps/schematic/api/schematic_api/mypy.ini | 2 - .../api/schematic_api/openapi/openapi.yaml | 3753 ------------- .../api/schematic_api/test/__init__.py | 15 - .../api/schematic_api/test/conftest.py | 109 - .../test/data/manifests/biospecimen.csv | 2 - .../data/manifests/biospecimen_incorrect.csv | 2 - .../test/data/synapse_config_example.yaml | 5 - ...est_manifest_generation_controller_impl.py | 116 - .../test_manifest_generation_endpoints.py | 100 - ...est_manifest_validation_controller_impl.py | 232 - .../test_manifest_validation_endpoints.py | 283 - .../api/schematic_api/test/test_paging.py | 108 - .../test/test_schema_controller_endpoints.py | 332 -- .../test/test_schema_controller_impl.py | 325 -- .../test/test_storage_controller_endpoints.py | 661 --- .../test_storage_controller_endpoints2.py | 493 -- .../test/test_storage_controller_impl.py | 960 ---- .../test/test_synapse_endpoints.py | 385 -- .../test/test_tangled_tree_endpoints.py | 121 - .../test/test_tangled_tree_impl.py | 50 - .../api/schematic_api/test/test_utils.py | 122 - .../test/test_version_endpoints.py | 41 - .../api/schematic_api/typing_utils.py | 32 - apps/schematic/api/schematic_api/util.py | 150 - apps/schematic/api/self-signed.conf | 6 - apps/schematic/api/ssl-params.conf | 21 - apps/schematic/api/templates/AUTHORS.md | 11 - apps/schematic/api/templates/config.yaml | 2 - .../api/templates/controller.mustache | 116 - apps/schematic/api/tox.ini | 9 - apps/schematic/api/uwsgi-nginx-entrypoint.sh | 53 - apps/schematic/api/uwsgi.ini | 24 - apps/schematic/notebook/.env.example | 2 - apps/schematic/notebook/.gitignore | 1 - apps/schematic/notebook/.python-version | 1 - apps/schematic/notebook/Dockerfile | 1 - apps/schematic/notebook/docker-compose.yml | 19 - apps/schematic/notebook/jupyter_lab_config.py | 1084 ---- .../notebook/notebooks/schematic-api.ipynb | 146 - apps/schematic/notebook/poetry.lock | 1914 ------- apps/schematic/notebook/prepare-python.sh | 8 - apps/schematic/notebook/project.json | 32 - apps/schematic/notebook/pyproject.toml | 19 - dev-env.sh | 4 - docker/schematic/networks.yml | 4 - docker/schematic/serve-detach.sh | 14 - docker/schematic/services/api-docs.yml | 15 - docker/schematic/services/api.yml | 18 - docker/schematic/volumes.yml | 0 docs/_archive/index.md | 1 - docs/index.md | 11 +- libs/schematic/api-client-python/.gitignore | 66 - .../.openapi-generator-ignore | 29 - .../.openapi-generator/FILES | 27 - .../.openapi-generator/VERSION | 1 - libs/schematic/api-client-python/README.md | 126 - .../api-client-python/docs/BasicError.md | 15 - .../api-client-python/docs/Dataset.md | 12 - .../api-client-python/docs/DatasetsPage.md | 18 - .../docs/DatasetsPageAllOf.md | 10 - .../api-client-python/docs/PageMetadata.md | 17 - .../api-client-python/docs/StorageApi.md | 75 - .../api-client-python/openapitools.json | 17 - libs/schematic/api-client-python/poetry.lock | 258 - .../api-client-python/prepare-python.sh | 8 - libs/schematic/api-client-python/project.json | 31 - .../api-client-python/pyproject.toml | 20 - .../schematic_client/__init__.py | 27 - .../schematic_client/api/__init__.py | 3 - .../schematic_client/api/storage_api.py | 140 - .../schematic_client/api_client.py | 1011 ---- .../schematic_client/apis/__init__.py | 16 - .../schematic_client/configuration.py | 462 -- .../schematic_client/exceptions.py | 157 - .../schematic_client/model/__init__.py | 5 - .../schematic_client/model/basic_error.py | 297 -- .../schematic_client/model/dataset.py | 283 - .../schematic_client/model/datasets_page.py | 360 -- .../model/datasets_page_all_of.py | 291 - .../schematic_client/model/page_metadata.py | 333 -- .../schematic_client/model_utils.py | 2090 -------- .../schematic_client/models/__init__.py | 16 - .../schematic_client/rest.py | 451 -- libs/schematic/api-client-python/setup.py | 42 - .../api-client-python/test-requirements.txt | 1 - .../api-client-python/test/__init__.py | 0 .../test/test_basic_error.py | 34 - .../api-client-python/test/test_dataset.py | 34 - .../test/test_datasets_page.py | 41 - .../test/test_datasets_page_all_of.py | 37 - .../test/test_page_metadata.py | 34 - .../test/test_storage_api.py | 34 - libs/schematic/api-client-python/tox.ini | 9 - libs/schematic/api-description/.gitignore | 1 - libs/schematic/api-description/README.md | 7 - libs/schematic/api-description/build/api.yaml | 990 ---- .../api-description/build/openapi.yaml | 1801 ------- libs/schematic/api-description/project.json | 24 - .../api-description/src/components/README.md | 13 - .../src/components/headers/ExpiresAfter.yaml | 4 - .../components/parameters/path/assetType.yaml | 6 - .../parameters/path/assetViewId.yaml | 6 - .../parameters/path/componentDisplay.yaml | 6 - .../parameters/path/componentLabel.yaml | 6 - .../components/parameters/path/datasetId.yaml | 6 - .../parameters/path/manifestId.yaml | 6 - .../parameters/path/nodeDisplay.yaml | 6 - .../components/parameters/path/nodeLabel.yaml | 6 - .../components/parameters/path/projectId.yaml | 6 - .../parameters/query/addAnnotations.yaml | 7 - .../parameters/query/annotationKeyStyle.yaml | 8 - .../parameters/query/assetViewIdQuery.yaml | 6 - .../parameters/query/component.yaml | 6 - .../parameters/query/componentLabelQuery.yaml | 6 - .../components/parameters/query/dataType.yaml | 6 - .../parameters/query/dataTypeArray.yaml | 6 - .../parameters/query/datasetIdArray.yaml | 6 - .../parameters/query/datasetIdQuery.yaml | 6 - .../parameters/query/displayLabelType.yaml | 8 - .../parameters/query/figureType.yaml | 8 - .../parameters/query/fileNames.yaml | 6 - .../parameters/query/hideBlanks.yaml | 9 - .../parameters/query/includeIndex.yaml | 7 - .../parameters/query/manifestJson.yaml | 6 - .../parameters/query/manifestTitle.yaml | 6 - .../parameters/query/nodeLabelArray.yaml | 6 - .../parameters/query/pageMaxItems.yaml | 8 - .../parameters/query/pageNumber.yaml | 8 - .../parameters/query/relationshipType.yaml | 6 - .../parameters/query/restrictRules.yaml | 6 - .../parameters/query/returnDisplayNames.yaml | 6 - .../query/returnOrderedBySchema.yaml | 6 - .../parameters/query/schemaUrl.yaml | 6 - .../parameters/query/storageMethod.yaml | 12 - .../query/tableColumnNameStyle.yaml | 8 - .../query/tableManipulationMethod.yaml | 10 - .../parameters/query/textFormat.yaml | 8 - .../parameters/query/useFullFilePath.yaml | 6 - .../parameters/query/useStrictCamelCase.yaml | 5 - .../parameters/query/useStrictValidation.yaml | 8 - .../src/components/responses/BadRequest.yaml | 5 - .../src/components/responses/Conflict.yaml | 5 - .../responses/InternalServerError.yaml | 5 - .../src/components/responses/NotFound.yaml | 5 - .../components/responses/Unauthorized.yaml | 5 - .../src/components/schemas/AssetType.yaml | 4 - .../src/components/schemas/AssetViewId.yaml | 3 - .../src/components/schemas/AssetViewJson.yaml | 2 - .../src/components/schemas/BasicError.yaml | 21 - .../src/components/schemas/Component.yaml | 11 - .../components/schemas/ComponentDisplay.yaml | 3 - .../components/schemas/ComponentLabel.yaml | 3 - .../schemas/ComponentRequirementArray.yaml | 7 - .../schemas/ComponentRequirementGraph.yaml | 7 - .../schemas/ComponentRequirementSubgraph.yaml | 16 - .../components/schemas/ComponentsPage.yaml | 15 - .../components/schemas/ConnectedNodePair.yaml | 16 - .../schemas/ConnectedNodePairArray.yaml | 8 - .../schemas/ConnectedNodePairPage.yaml | 15 - .../src/components/schemas/DataType.yaml | 3 - .../src/components/schemas/DataTypeArray.yaml | 4 - .../src/components/schemas/DatasetId.yaml | 3 - .../components/schemas/DatasetIdArray.yaml | 4 - .../components/schemas/DatasetMetadata.yaml | 16 - .../schemas/DatasetMetadataArray.yaml | 8 - .../schemas/DatasetMetadataPage.yaml | 15 - .../src/components/schemas/FileMetadata.yaml | 16 - .../components/schemas/FileMetadataArray.yaml | 8 - .../components/schemas/FileMetadataPage.yaml | 15 - .../src/components/schemas/FileNames.yaml | 4 - .../components/schemas/GoogleSheetLinks.yaml | 7 - .../src/components/schemas/ManifestId.yaml | 3 - .../src/components/schemas/ManifestJson.yaml | 2 - .../components/schemas/ManifestMetadata.yaml | 26 - .../schemas/ManifestMetadataArray.yaml | 8 - .../schemas/ManifestMetadataPage.yaml | 15 - .../schemas/ManifestValidationResult.yaml | 13 - .../src/components/schemas/Node.yaml | 11 - .../src/components/schemas/NodeArray.yaml | 8 - .../src/components/schemas/NodeDisplay.yaml | 3 - .../src/components/schemas/NodeLabel.yaml | 3 - .../components/schemas/NodeLabelArray.yaml | 4 - .../src/components/schemas/NodePage.yaml | 15 - .../components/schemas/NodePropertyArray.yaml | 8 - .../src/components/schemas/PageMetadata.yaml | 38 - .../src/components/schemas/ProjectId.yaml | 3 - .../components/schemas/ProjectMetadata.yaml | 16 - .../schemas/ProjectMetadataArray.yaml | 8 - .../schemas/ProjectMetadataPage.yaml | 15 - .../src/components/schemas/PropertyLabel.yaml | 3 - .../components/schemas/RelationshipType.yaml | 3 - .../src/components/schemas/RestrictRules.yaml | 3 - .../schemas/ReturnDisplayNames.yaml | 3 - .../schemas/ReturnOrderedBySchema.yaml | 3 - .../src/components/schemas/SchemaUrl.yaml | 3 - .../components/schemas/TangledTreeLayers.yaml | 2 - .../components/schemas/TangledTreeText.yaml | 2 - .../components/schemas/UseFullFilePath.yaml | 3 - .../schemas/UseStrictCamelCase.yaml | 3 - .../components/schemas/ValidationRule.yaml | 11 - .../schemas/ValidationRuleArray.yaml | 8 - .../api-description/src/openapi.yaml | 129 - .../api-description/src/paths/README.md | 107 - .../assetViews/@{assetViewId}/csv.yaml | 28 - .../assetViews/@{assetViewId}/json.yaml | 28 - .../@{assetViewId}/projectMetadataArray.yaml | 28 - .../@{assetViewId}/projectMetadataPage.yaml | 31 - .../@{datasetId}/fileMetadataArray.yaml | 32 - .../@{datasetId}/fileMetadataPage.yaml | 34 - .../datasets/@{datasetId}/manifestCsv.yaml | 30 - .../datasets/@{datasetId}/manifestJson.yaml | 30 - .../manifests/@{manifestId}/csv.yaml | 28 - .../manifests/@{manifestId}/json.yaml | 28 - .../@{projectId}/datasetMetadataArray.yaml | 30 - .../@{projectId}/datasetMetadataPage.yaml | 32 - .../@{projectId}/manifestMetadataArray.yaml | 30 - .../@{projectId}/manifestMetadataPage.yaml | 32 - .../@{componentLabel}/component.yaml | 22 - .../@{componentLabel}/requirementsArray.yaml | 20 - .../@{componentLabel}/requirementsGraph.yaml | 20 - .../src/paths/connectedNodePairArray.yaml | 19 - .../src/paths/connectedNodePairPage.yaml | 21 - .../src/paths/generateExcelManifest.yaml | 41 - .../paths/generateGoogleSheetManifests.yaml | 50 - .../nodes/@{nodeDisplay}/isRequired.yaml | 21 - .../nodes/@{nodeDisplay}/propertyLabel.yaml | 19 - .../nodes/@{nodeDisplay}/validationRules.yaml | 20 - .../nodes/@{nodeLabel}/dependencyArray.yaml | 22 - .../nodes/@{nodeLabel}/dependencyPage.yaml | 24 - .../nodes/@{nodeLabel}/nodeProperties.yaml | 20 - .../src/paths/schemaAttributes.yaml | 19 - .../src/paths/schematicVersion.yaml | 16 - .../src/paths/submitManifestCsv.yaml | 37 - .../src/paths/submitManifestJson.yaml | 35 - .../src/paths/tangledTreeLayers.yaml | 19 - .../src/paths/tangledTreeText.yaml | 20 - .../src/paths/validateManifestCsv.yaml | 30 - .../src/paths/validateManifestJson.yaml | 26 - .../api-description/tmp/output/openapi.yaml | 54 - ...plorer_find_class_specific_properties.yaml | 28 - .../paths/explorer_get_node_dependencies.yaml | 44 - .../output/paths/explorer_get_node_range.yaml | 34 - ..._get_property_label_from_display_name.yaml | 34 - .../output/paths/get_datatype_manifest.yaml | 39 - .../tmp/output/paths/manifest_download.yaml | 57 - .../tmp/output/paths/manifest_generate.yaml | 95 - .../tmp/output/paths/manifest_populate.yaml | 57 - .../paths/model_component-requirements.yaml | 57 - .../tmp/output/paths/model_submit.yaml | 100 - .../tmp/output/paths/model_validate.yaml | 60 - .../paths/schemas_get_graph_by_edge_type.yaml | 41 - .../tmp/output/paths/schemas_get_schema.yaml | 26 - .../paths/schemas_is_node_required.yaml | 28 - .../output/paths/storage_assets_tables.yaml | 41 - .../output/paths/storage_dataset_files.yaml | 59 - .../paths/storage_project_datasets.yaml | 43 - .../paths/storage_project_manifests.yaml | 50 - .../tmp/output/paths/storage_projects.yaml | 33 - .../output/paths/visualize_attributes.yaml | 24 - .../paths/visualize_tangled_tree_layers.yaml | 34 - .../paths/visualize_tangled_tree_text.yaml | 44 - mkdocs.yml | 2 - tools/configure-hostnames.sh | 1 - tools/redocly/config.yaml | 5 - 369 files changed, 26 insertions(+), 38204 deletions(-) delete mode 100644 .github/workflows/schematic-api-ci.yml delete mode 100644 apps/schematic/api-docs/.env.example delete mode 100644 apps/schematic/api-docs/Dockerfile delete mode 100644 apps/schematic/api-docs/README.md delete mode 100644 apps/schematic/api-docs/index.hbs delete mode 100644 apps/schematic/api-docs/project.json delete mode 100644 apps/schematic/api-docs/redocly.yaml delete mode 100644 apps/schematic/api/.dockerignore delete mode 100644 apps/schematic/api/.env.example delete mode 100644 apps/schematic/api/.gitignore delete mode 100644 apps/schematic/api/.openapi-generator-ignore delete mode 100644 apps/schematic/api/.openapi-generator/FILES delete mode 100644 apps/schematic/api/.openapi-generator/VERSION delete mode 100644 apps/schematic/api/AUTHORS.md delete mode 100644 apps/schematic/api/Dockerfile delete mode 100644 apps/schematic/api/README.md delete mode 100644 apps/schematic/api/certificate.conf delete mode 100644 apps/schematic/api/debug_key_cert.py delete mode 100644 apps/schematic/api/default_config.yaml delete mode 100644 apps/schematic/api/dhparam.pem delete mode 100644 apps/schematic/api/docker-compose.yml delete mode 100644 apps/schematic/api/docker-entrypoint.sh delete mode 100644 apps/schematic/api/openapitools.json delete mode 100644 apps/schematic/api/poetry.lock delete mode 100644 apps/schematic/api/poetry.toml delete mode 100755 apps/schematic/api/prepare-python.sh delete mode 100644 apps/schematic/api/prepare_key_certificate.py delete mode 100644 apps/schematic/api/project.json delete mode 100644 apps/schematic/api/pyproject.toml delete mode 100644 apps/schematic/api/pytest.ini delete mode 100644 apps/schematic/api/redirect.conf delete mode 100644 apps/schematic/api/save_key_certificate.py delete mode 100644 apps/schematic/api/schematic_api/__init__.py delete mode 100644 apps/schematic/api/schematic_api/__main__.py delete mode 100644 apps/schematic/api/schematic_api/controllers/__init__.py delete mode 100644 apps/schematic/api/schematic_api/controllers/manifest_generation_controller.py delete mode 100644 apps/schematic/api/schematic_api/controllers/manifest_generation_controller_impl.py delete mode 100644 apps/schematic/api/schematic_api/controllers/manifest_validation_controller.py delete mode 100644 apps/schematic/api/schematic_api/controllers/manifest_validation_controller_impl.py delete mode 100644 apps/schematic/api/schematic_api/controllers/paging.py delete mode 100644 apps/schematic/api/schematic_api/controllers/schema_controller.py delete mode 100644 apps/schematic/api/schematic_api/controllers/schema_controller_impl.py delete mode 100644 apps/schematic/api/schematic_api/controllers/security_controller_.py delete mode 100644 apps/schematic/api/schematic_api/controllers/storage_controller.py delete mode 100644 apps/schematic/api/schematic_api/controllers/storage_controller_impl.py delete mode 100644 apps/schematic/api/schematic_api/controllers/tangled_tree_controller.py delete mode 100644 apps/schematic/api/schematic_api/controllers/tangled_tree_controller_impl.py delete mode 100644 apps/schematic/api/schematic_api/controllers/utils.py delete mode 100644 apps/schematic/api/schematic_api/controllers/versions_controller.py delete mode 100644 apps/schematic/api/schematic_api/controllers/versions_controller_impl.py delete mode 100644 apps/schematic/api/schematic_api/encoder.py delete mode 100644 apps/schematic/api/schematic_api/models/__init__.py delete mode 100644 apps/schematic/api/schematic_api/models/asset_type.py delete mode 100644 apps/schematic/api/schematic_api/models/base_model_.py delete mode 100644 apps/schematic/api/schematic_api/models/basic_error.py delete mode 100644 apps/schematic/api/schematic_api/models/component_requirement_array.py delete mode 100644 apps/schematic/api/schematic_api/models/component_requirement_graph.py delete mode 100644 apps/schematic/api/schematic_api/models/component_requirement_subgraph.py delete mode 100644 apps/schematic/api/schematic_api/models/connected_node_pair.py delete mode 100644 apps/schematic/api/schematic_api/models/connected_node_pair_array.py delete mode 100644 apps/schematic/api/schematic_api/models/connected_node_pair_page.py delete mode 100644 apps/schematic/api/schematic_api/models/connected_node_pair_page_all_of.py delete mode 100644 apps/schematic/api/schematic_api/models/dataset_metadata.py delete mode 100644 apps/schematic/api/schematic_api/models/dataset_metadata_array.py delete mode 100644 apps/schematic/api/schematic_api/models/dataset_metadata_page.py delete mode 100644 apps/schematic/api/schematic_api/models/dataset_metadata_page_all_of.py delete mode 100644 apps/schematic/api/schematic_api/models/file_metadata.py delete mode 100644 apps/schematic/api/schematic_api/models/file_metadata_array.py delete mode 100644 apps/schematic/api/schematic_api/models/file_metadata_page.py delete mode 100644 apps/schematic/api/schematic_api/models/file_metadata_page_all_of.py delete mode 100644 apps/schematic/api/schematic_api/models/google_sheet_links.py delete mode 100644 apps/schematic/api/schematic_api/models/manifest_metadata.py delete mode 100644 apps/schematic/api/schematic_api/models/manifest_metadata_array.py delete mode 100644 apps/schematic/api/schematic_api/models/manifest_metadata_page.py delete mode 100644 apps/schematic/api/schematic_api/models/manifest_metadata_page_all_of.py delete mode 100644 apps/schematic/api/schematic_api/models/manifest_validation_result.py delete mode 100644 apps/schematic/api/schematic_api/models/node.py delete mode 100644 apps/schematic/api/schematic_api/models/node_array.py delete mode 100644 apps/schematic/api/schematic_api/models/node_page.py delete mode 100644 apps/schematic/api/schematic_api/models/node_page_all_of.py delete mode 100644 apps/schematic/api/schematic_api/models/node_property_array.py delete mode 100644 apps/schematic/api/schematic_api/models/page_metadata.py delete mode 100644 apps/schematic/api/schematic_api/models/project_metadata.py delete mode 100644 apps/schematic/api/schematic_api/models/project_metadata_array.py delete mode 100644 apps/schematic/api/schematic_api/models/project_metadata_page.py delete mode 100644 apps/schematic/api/schematic_api/models/project_metadata_page_all_of.py delete mode 100644 apps/schematic/api/schematic_api/models/validation_rule.py delete mode 100644 apps/schematic/api/schematic_api/models/validation_rule_array.py delete mode 100644 apps/schematic/api/schematic_api/mypy.ini delete mode 100644 apps/schematic/api/schematic_api/openapi/openapi.yaml delete mode 100644 apps/schematic/api/schematic_api/test/__init__.py delete mode 100644 apps/schematic/api/schematic_api/test/conftest.py delete mode 100644 apps/schematic/api/schematic_api/test/data/manifests/biospecimen.csv delete mode 100644 apps/schematic/api/schematic_api/test/data/manifests/biospecimen_incorrect.csv delete mode 100644 apps/schematic/api/schematic_api/test/data/synapse_config_example.yaml delete mode 100644 apps/schematic/api/schematic_api/test/test_manifest_generation_controller_impl.py delete mode 100644 apps/schematic/api/schematic_api/test/test_manifest_generation_endpoints.py delete mode 100644 apps/schematic/api/schematic_api/test/test_manifest_validation_controller_impl.py delete mode 100644 apps/schematic/api/schematic_api/test/test_manifest_validation_endpoints.py delete mode 100644 apps/schematic/api/schematic_api/test/test_paging.py delete mode 100644 apps/schematic/api/schematic_api/test/test_schema_controller_endpoints.py delete mode 100644 apps/schematic/api/schematic_api/test/test_schema_controller_impl.py delete mode 100644 apps/schematic/api/schematic_api/test/test_storage_controller_endpoints.py delete mode 100644 apps/schematic/api/schematic_api/test/test_storage_controller_endpoints2.py delete mode 100644 apps/schematic/api/schematic_api/test/test_storage_controller_impl.py delete mode 100644 apps/schematic/api/schematic_api/test/test_synapse_endpoints.py delete mode 100644 apps/schematic/api/schematic_api/test/test_tangled_tree_endpoints.py delete mode 100644 apps/schematic/api/schematic_api/test/test_tangled_tree_impl.py delete mode 100644 apps/schematic/api/schematic_api/test/test_utils.py delete mode 100644 apps/schematic/api/schematic_api/test/test_version_endpoints.py delete mode 100644 apps/schematic/api/schematic_api/typing_utils.py delete mode 100644 apps/schematic/api/schematic_api/util.py delete mode 100644 apps/schematic/api/self-signed.conf delete mode 100644 apps/schematic/api/ssl-params.conf delete mode 100644 apps/schematic/api/templates/AUTHORS.md delete mode 100644 apps/schematic/api/templates/config.yaml delete mode 100644 apps/schematic/api/templates/controller.mustache delete mode 100644 apps/schematic/api/tox.ini delete mode 100644 apps/schematic/api/uwsgi-nginx-entrypoint.sh delete mode 100644 apps/schematic/api/uwsgi.ini delete mode 100644 apps/schematic/notebook/.env.example delete mode 100644 apps/schematic/notebook/.gitignore delete mode 100644 apps/schematic/notebook/.python-version delete mode 100644 apps/schematic/notebook/Dockerfile delete mode 100644 apps/schematic/notebook/docker-compose.yml delete mode 100644 apps/schematic/notebook/jupyter_lab_config.py delete mode 100644 apps/schematic/notebook/notebooks/schematic-api.ipynb delete mode 100644 apps/schematic/notebook/poetry.lock delete mode 100755 apps/schematic/notebook/prepare-python.sh delete mode 100644 apps/schematic/notebook/project.json delete mode 100644 apps/schematic/notebook/pyproject.toml delete mode 100644 docker/schematic/networks.yml delete mode 100755 docker/schematic/serve-detach.sh delete mode 100644 docker/schematic/services/api-docs.yml delete mode 100644 docker/schematic/services/api.yml delete mode 100644 docker/schematic/volumes.yml delete mode 100644 libs/schematic/api-client-python/.gitignore delete mode 100644 libs/schematic/api-client-python/.openapi-generator-ignore delete mode 100644 libs/schematic/api-client-python/.openapi-generator/FILES delete mode 100644 libs/schematic/api-client-python/.openapi-generator/VERSION delete mode 100644 libs/schematic/api-client-python/README.md delete mode 100644 libs/schematic/api-client-python/docs/BasicError.md delete mode 100644 libs/schematic/api-client-python/docs/Dataset.md delete mode 100644 libs/schematic/api-client-python/docs/DatasetsPage.md delete mode 100644 libs/schematic/api-client-python/docs/DatasetsPageAllOf.md delete mode 100644 libs/schematic/api-client-python/docs/PageMetadata.md delete mode 100644 libs/schematic/api-client-python/docs/StorageApi.md delete mode 100644 libs/schematic/api-client-python/openapitools.json delete mode 100644 libs/schematic/api-client-python/poetry.lock delete mode 100755 libs/schematic/api-client-python/prepare-python.sh delete mode 100644 libs/schematic/api-client-python/project.json delete mode 100644 libs/schematic/api-client-python/pyproject.toml delete mode 100644 libs/schematic/api-client-python/schematic_client/__init__.py delete mode 100644 libs/schematic/api-client-python/schematic_client/api/__init__.py delete mode 100644 libs/schematic/api-client-python/schematic_client/api/storage_api.py delete mode 100644 libs/schematic/api-client-python/schematic_client/api_client.py delete mode 100644 libs/schematic/api-client-python/schematic_client/apis/__init__.py delete mode 100644 libs/schematic/api-client-python/schematic_client/configuration.py delete mode 100644 libs/schematic/api-client-python/schematic_client/exceptions.py delete mode 100644 libs/schematic/api-client-python/schematic_client/model/__init__.py delete mode 100644 libs/schematic/api-client-python/schematic_client/model/basic_error.py delete mode 100644 libs/schematic/api-client-python/schematic_client/model/dataset.py delete mode 100644 libs/schematic/api-client-python/schematic_client/model/datasets_page.py delete mode 100644 libs/schematic/api-client-python/schematic_client/model/datasets_page_all_of.py delete mode 100644 libs/schematic/api-client-python/schematic_client/model/page_metadata.py delete mode 100644 libs/schematic/api-client-python/schematic_client/model_utils.py delete mode 100644 libs/schematic/api-client-python/schematic_client/models/__init__.py delete mode 100644 libs/schematic/api-client-python/schematic_client/rest.py delete mode 100644 libs/schematic/api-client-python/setup.py delete mode 100644 libs/schematic/api-client-python/test-requirements.txt delete mode 100644 libs/schematic/api-client-python/test/__init__.py delete mode 100644 libs/schematic/api-client-python/test/test_basic_error.py delete mode 100644 libs/schematic/api-client-python/test/test_dataset.py delete mode 100644 libs/schematic/api-client-python/test/test_datasets_page.py delete mode 100644 libs/schematic/api-client-python/test/test_datasets_page_all_of.py delete mode 100644 libs/schematic/api-client-python/test/test_page_metadata.py delete mode 100644 libs/schematic/api-client-python/test/test_storage_api.py delete mode 100644 libs/schematic/api-client-python/tox.ini delete mode 100644 libs/schematic/api-description/.gitignore delete mode 100644 libs/schematic/api-description/README.md delete mode 100644 libs/schematic/api-description/build/api.yaml delete mode 100644 libs/schematic/api-description/build/openapi.yaml delete mode 100644 libs/schematic/api-description/project.json delete mode 100644 libs/schematic/api-description/src/components/README.md delete mode 100644 libs/schematic/api-description/src/components/headers/ExpiresAfter.yaml delete mode 100644 libs/schematic/api-description/src/components/parameters/path/assetType.yaml delete mode 100644 libs/schematic/api-description/src/components/parameters/path/assetViewId.yaml delete mode 100644 libs/schematic/api-description/src/components/parameters/path/componentDisplay.yaml delete mode 100644 libs/schematic/api-description/src/components/parameters/path/componentLabel.yaml delete mode 100644 libs/schematic/api-description/src/components/parameters/path/datasetId.yaml delete mode 100644 libs/schematic/api-description/src/components/parameters/path/manifestId.yaml delete mode 100644 libs/schematic/api-description/src/components/parameters/path/nodeDisplay.yaml delete mode 100644 libs/schematic/api-description/src/components/parameters/path/nodeLabel.yaml delete mode 100644 libs/schematic/api-description/src/components/parameters/path/projectId.yaml delete mode 100644 libs/schematic/api-description/src/components/parameters/query/addAnnotations.yaml delete mode 100644 libs/schematic/api-description/src/components/parameters/query/annotationKeyStyle.yaml delete mode 100644 libs/schematic/api-description/src/components/parameters/query/assetViewIdQuery.yaml delete mode 100644 libs/schematic/api-description/src/components/parameters/query/component.yaml delete mode 100644 libs/schematic/api-description/src/components/parameters/query/componentLabelQuery.yaml delete mode 100644 libs/schematic/api-description/src/components/parameters/query/dataType.yaml delete mode 100644 libs/schematic/api-description/src/components/parameters/query/dataTypeArray.yaml delete mode 100644 libs/schematic/api-description/src/components/parameters/query/datasetIdArray.yaml delete mode 100644 libs/schematic/api-description/src/components/parameters/query/datasetIdQuery.yaml delete mode 100644 libs/schematic/api-description/src/components/parameters/query/displayLabelType.yaml delete mode 100644 libs/schematic/api-description/src/components/parameters/query/figureType.yaml delete mode 100644 libs/schematic/api-description/src/components/parameters/query/fileNames.yaml delete mode 100644 libs/schematic/api-description/src/components/parameters/query/hideBlanks.yaml delete mode 100644 libs/schematic/api-description/src/components/parameters/query/includeIndex.yaml delete mode 100644 libs/schematic/api-description/src/components/parameters/query/manifestJson.yaml delete mode 100644 libs/schematic/api-description/src/components/parameters/query/manifestTitle.yaml delete mode 100644 libs/schematic/api-description/src/components/parameters/query/nodeLabelArray.yaml delete mode 100644 libs/schematic/api-description/src/components/parameters/query/pageMaxItems.yaml delete mode 100644 libs/schematic/api-description/src/components/parameters/query/pageNumber.yaml delete mode 100644 libs/schematic/api-description/src/components/parameters/query/relationshipType.yaml delete mode 100644 libs/schematic/api-description/src/components/parameters/query/restrictRules.yaml delete mode 100644 libs/schematic/api-description/src/components/parameters/query/returnDisplayNames.yaml delete mode 100644 libs/schematic/api-description/src/components/parameters/query/returnOrderedBySchema.yaml delete mode 100644 libs/schematic/api-description/src/components/parameters/query/schemaUrl.yaml delete mode 100644 libs/schematic/api-description/src/components/parameters/query/storageMethod.yaml delete mode 100644 libs/schematic/api-description/src/components/parameters/query/tableColumnNameStyle.yaml delete mode 100644 libs/schematic/api-description/src/components/parameters/query/tableManipulationMethod.yaml delete mode 100644 libs/schematic/api-description/src/components/parameters/query/textFormat.yaml delete mode 100644 libs/schematic/api-description/src/components/parameters/query/useFullFilePath.yaml delete mode 100644 libs/schematic/api-description/src/components/parameters/query/useStrictCamelCase.yaml delete mode 100644 libs/schematic/api-description/src/components/parameters/query/useStrictValidation.yaml delete mode 100644 libs/schematic/api-description/src/components/responses/BadRequest.yaml delete mode 100644 libs/schematic/api-description/src/components/responses/Conflict.yaml delete mode 100644 libs/schematic/api-description/src/components/responses/InternalServerError.yaml delete mode 100644 libs/schematic/api-description/src/components/responses/NotFound.yaml delete mode 100644 libs/schematic/api-description/src/components/responses/Unauthorized.yaml delete mode 100644 libs/schematic/api-description/src/components/schemas/AssetType.yaml delete mode 100644 libs/schematic/api-description/src/components/schemas/AssetViewId.yaml delete mode 100644 libs/schematic/api-description/src/components/schemas/AssetViewJson.yaml delete mode 100644 libs/schematic/api-description/src/components/schemas/BasicError.yaml delete mode 100644 libs/schematic/api-description/src/components/schemas/Component.yaml delete mode 100644 libs/schematic/api-description/src/components/schemas/ComponentDisplay.yaml delete mode 100644 libs/schematic/api-description/src/components/schemas/ComponentLabel.yaml delete mode 100644 libs/schematic/api-description/src/components/schemas/ComponentRequirementArray.yaml delete mode 100644 libs/schematic/api-description/src/components/schemas/ComponentRequirementGraph.yaml delete mode 100644 libs/schematic/api-description/src/components/schemas/ComponentRequirementSubgraph.yaml delete mode 100644 libs/schematic/api-description/src/components/schemas/ComponentsPage.yaml delete mode 100644 libs/schematic/api-description/src/components/schemas/ConnectedNodePair.yaml delete mode 100644 libs/schematic/api-description/src/components/schemas/ConnectedNodePairArray.yaml delete mode 100644 libs/schematic/api-description/src/components/schemas/ConnectedNodePairPage.yaml delete mode 100644 libs/schematic/api-description/src/components/schemas/DataType.yaml delete mode 100644 libs/schematic/api-description/src/components/schemas/DataTypeArray.yaml delete mode 100644 libs/schematic/api-description/src/components/schemas/DatasetId.yaml delete mode 100644 libs/schematic/api-description/src/components/schemas/DatasetIdArray.yaml delete mode 100644 libs/schematic/api-description/src/components/schemas/DatasetMetadata.yaml delete mode 100644 libs/schematic/api-description/src/components/schemas/DatasetMetadataArray.yaml delete mode 100644 libs/schematic/api-description/src/components/schemas/DatasetMetadataPage.yaml delete mode 100644 libs/schematic/api-description/src/components/schemas/FileMetadata.yaml delete mode 100644 libs/schematic/api-description/src/components/schemas/FileMetadataArray.yaml delete mode 100644 libs/schematic/api-description/src/components/schemas/FileMetadataPage.yaml delete mode 100644 libs/schematic/api-description/src/components/schemas/FileNames.yaml delete mode 100644 libs/schematic/api-description/src/components/schemas/GoogleSheetLinks.yaml delete mode 100644 libs/schematic/api-description/src/components/schemas/ManifestId.yaml delete mode 100644 libs/schematic/api-description/src/components/schemas/ManifestJson.yaml delete mode 100644 libs/schematic/api-description/src/components/schemas/ManifestMetadata.yaml delete mode 100644 libs/schematic/api-description/src/components/schemas/ManifestMetadataArray.yaml delete mode 100644 libs/schematic/api-description/src/components/schemas/ManifestMetadataPage.yaml delete mode 100644 libs/schematic/api-description/src/components/schemas/ManifestValidationResult.yaml delete mode 100644 libs/schematic/api-description/src/components/schemas/Node.yaml delete mode 100644 libs/schematic/api-description/src/components/schemas/NodeArray.yaml delete mode 100644 libs/schematic/api-description/src/components/schemas/NodeDisplay.yaml delete mode 100644 libs/schematic/api-description/src/components/schemas/NodeLabel.yaml delete mode 100644 libs/schematic/api-description/src/components/schemas/NodeLabelArray.yaml delete mode 100644 libs/schematic/api-description/src/components/schemas/NodePage.yaml delete mode 100644 libs/schematic/api-description/src/components/schemas/NodePropertyArray.yaml delete mode 100644 libs/schematic/api-description/src/components/schemas/PageMetadata.yaml delete mode 100644 libs/schematic/api-description/src/components/schemas/ProjectId.yaml delete mode 100644 libs/schematic/api-description/src/components/schemas/ProjectMetadata.yaml delete mode 100644 libs/schematic/api-description/src/components/schemas/ProjectMetadataArray.yaml delete mode 100644 libs/schematic/api-description/src/components/schemas/ProjectMetadataPage.yaml delete mode 100644 libs/schematic/api-description/src/components/schemas/PropertyLabel.yaml delete mode 100644 libs/schematic/api-description/src/components/schemas/RelationshipType.yaml delete mode 100644 libs/schematic/api-description/src/components/schemas/RestrictRules.yaml delete mode 100644 libs/schematic/api-description/src/components/schemas/ReturnDisplayNames.yaml delete mode 100644 libs/schematic/api-description/src/components/schemas/ReturnOrderedBySchema.yaml delete mode 100644 libs/schematic/api-description/src/components/schemas/SchemaUrl.yaml delete mode 100644 libs/schematic/api-description/src/components/schemas/TangledTreeLayers.yaml delete mode 100644 libs/schematic/api-description/src/components/schemas/TangledTreeText.yaml delete mode 100644 libs/schematic/api-description/src/components/schemas/UseFullFilePath.yaml delete mode 100644 libs/schematic/api-description/src/components/schemas/UseStrictCamelCase.yaml delete mode 100644 libs/schematic/api-description/src/components/schemas/ValidationRule.yaml delete mode 100644 libs/schematic/api-description/src/components/schemas/ValidationRuleArray.yaml delete mode 100644 libs/schematic/api-description/src/openapi.yaml delete mode 100644 libs/schematic/api-description/src/paths/README.md delete mode 100644 libs/schematic/api-description/src/paths/assetTypes/@{assetType}/assetViews/@{assetViewId}/csv.yaml delete mode 100644 libs/schematic/api-description/src/paths/assetTypes/@{assetType}/assetViews/@{assetViewId}/json.yaml delete mode 100644 libs/schematic/api-description/src/paths/assetTypes/@{assetType}/assetViews/@{assetViewId}/projectMetadataArray.yaml delete mode 100644 libs/schematic/api-description/src/paths/assetTypes/@{assetType}/assetViews/@{assetViewId}/projectMetadataPage.yaml delete mode 100644 libs/schematic/api-description/src/paths/assetTypes/@{assetType}/datasets/@{datasetId}/fileMetadataArray.yaml delete mode 100644 libs/schematic/api-description/src/paths/assetTypes/@{assetType}/datasets/@{datasetId}/fileMetadataPage.yaml delete mode 100644 libs/schematic/api-description/src/paths/assetTypes/@{assetType}/datasets/@{datasetId}/manifestCsv.yaml delete mode 100644 libs/schematic/api-description/src/paths/assetTypes/@{assetType}/datasets/@{datasetId}/manifestJson.yaml delete mode 100644 libs/schematic/api-description/src/paths/assetTypes/@{assetType}/manifests/@{manifestId}/csv.yaml delete mode 100644 libs/schematic/api-description/src/paths/assetTypes/@{assetType}/manifests/@{manifestId}/json.yaml delete mode 100644 libs/schematic/api-description/src/paths/assetTypes/@{assetType}/projects/@{projectId}/datasetMetadataArray.yaml delete mode 100644 libs/schematic/api-description/src/paths/assetTypes/@{assetType}/projects/@{projectId}/datasetMetadataPage.yaml delete mode 100644 libs/schematic/api-description/src/paths/assetTypes/@{assetType}/projects/@{projectId}/manifestMetadataArray.yaml delete mode 100644 libs/schematic/api-description/src/paths/assetTypes/@{assetType}/projects/@{projectId}/manifestMetadataPage.yaml delete mode 100644 libs/schematic/api-description/src/paths/components/@{componentLabel}/component.yaml delete mode 100644 libs/schematic/api-description/src/paths/components/@{componentLabel}/requirementsArray.yaml delete mode 100644 libs/schematic/api-description/src/paths/components/@{componentLabel}/requirementsGraph.yaml delete mode 100644 libs/schematic/api-description/src/paths/connectedNodePairArray.yaml delete mode 100644 libs/schematic/api-description/src/paths/connectedNodePairPage.yaml delete mode 100644 libs/schematic/api-description/src/paths/generateExcelManifest.yaml delete mode 100644 libs/schematic/api-description/src/paths/generateGoogleSheetManifests.yaml delete mode 100644 libs/schematic/api-description/src/paths/nodes/@{nodeDisplay}/isRequired.yaml delete mode 100644 libs/schematic/api-description/src/paths/nodes/@{nodeDisplay}/propertyLabel.yaml delete mode 100644 libs/schematic/api-description/src/paths/nodes/@{nodeDisplay}/validationRules.yaml delete mode 100644 libs/schematic/api-description/src/paths/nodes/@{nodeLabel}/dependencyArray.yaml delete mode 100644 libs/schematic/api-description/src/paths/nodes/@{nodeLabel}/dependencyPage.yaml delete mode 100644 libs/schematic/api-description/src/paths/nodes/@{nodeLabel}/nodeProperties.yaml delete mode 100644 libs/schematic/api-description/src/paths/schemaAttributes.yaml delete mode 100644 libs/schematic/api-description/src/paths/schematicVersion.yaml delete mode 100644 libs/schematic/api-description/src/paths/submitManifestCsv.yaml delete mode 100644 libs/schematic/api-description/src/paths/submitManifestJson.yaml delete mode 100644 libs/schematic/api-description/src/paths/tangledTreeLayers.yaml delete mode 100644 libs/schematic/api-description/src/paths/tangledTreeText.yaml delete mode 100644 libs/schematic/api-description/src/paths/validateManifestCsv.yaml delete mode 100644 libs/schematic/api-description/src/paths/validateManifestJson.yaml delete mode 100644 libs/schematic/api-description/tmp/output/openapi.yaml delete mode 100644 libs/schematic/api-description/tmp/output/paths/explorer_find_class_specific_properties.yaml delete mode 100644 libs/schematic/api-description/tmp/output/paths/explorer_get_node_dependencies.yaml delete mode 100644 libs/schematic/api-description/tmp/output/paths/explorer_get_node_range.yaml delete mode 100644 libs/schematic/api-description/tmp/output/paths/explorer_get_property_label_from_display_name.yaml delete mode 100644 libs/schematic/api-description/tmp/output/paths/get_datatype_manifest.yaml delete mode 100644 libs/schematic/api-description/tmp/output/paths/manifest_download.yaml delete mode 100644 libs/schematic/api-description/tmp/output/paths/manifest_generate.yaml delete mode 100644 libs/schematic/api-description/tmp/output/paths/manifest_populate.yaml delete mode 100644 libs/schematic/api-description/tmp/output/paths/model_component-requirements.yaml delete mode 100644 libs/schematic/api-description/tmp/output/paths/model_submit.yaml delete mode 100644 libs/schematic/api-description/tmp/output/paths/model_validate.yaml delete mode 100644 libs/schematic/api-description/tmp/output/paths/schemas_get_graph_by_edge_type.yaml delete mode 100644 libs/schematic/api-description/tmp/output/paths/schemas_get_schema.yaml delete mode 100644 libs/schematic/api-description/tmp/output/paths/schemas_is_node_required.yaml delete mode 100644 libs/schematic/api-description/tmp/output/paths/storage_assets_tables.yaml delete mode 100644 libs/schematic/api-description/tmp/output/paths/storage_dataset_files.yaml delete mode 100644 libs/schematic/api-description/tmp/output/paths/storage_project_datasets.yaml delete mode 100644 libs/schematic/api-description/tmp/output/paths/storage_project_manifests.yaml delete mode 100644 libs/schematic/api-description/tmp/output/paths/storage_projects.yaml delete mode 100644 libs/schematic/api-description/tmp/output/paths/visualize_attributes.yaml delete mode 100644 libs/schematic/api-description/tmp/output/paths/visualize_tangled_tree_layers.yaml delete mode 100644 libs/schematic/api-description/tmp/output/paths/visualize_tangled_tree_text.yaml diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index bf3a626bc..0d548718e 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -67,8 +67,8 @@ } }, "forwardPorts": [ - 2432, 3306, 3333, 4200, 4211, 5000, 5200, 5432, 5601, 7010, 7443, 7200, 7888, 8010, 8071, 8000, - 8080, 8081, 8082, 8084, 8085, 8086, 8090, 8200, 8888, 8889, 9200, 9411, 27017 + 2432, 3306, 3333, 4200, 4211, 5000, 5200, 5432, 5601, 8010, 8071, 8000, 8080, 8081, 8082, 8084, + 8085, 8086, 8090, 8200, 8888, 8889, 9200, 9411, 27017 ], "portsAttributes": { "2432": { @@ -107,22 +107,6 @@ "label": "openchallenges-opensearch-dashboards", "onAutoForward": "silent" }, - "7010": { - "label": "schematic-api-docs", - "onAutoForward": "silent" - }, - "7443": { - "label": "schematic-api", - "onAutoForward": "silent" - }, - "7200": { - "label": "schematic-app", - "onAutoForward": "silent" - }, - "7888": { - "label": "schematic-notebook", - "onAutoForward": "silent" - }, "8000": { "label": "openchallenges-apex", "onAutoForward": "silent" diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 94a673686..7a2cc956a 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -21,11 +21,6 @@ /apps/sage/ @tschaffter /libs/sage/ @tschaffter -# Schematic -/apps/schematic/ @andrewelamb @GiaJordan @linglp @milen-sage -/libs/schematic/ @andrewelamb @GiaJordan @linglp @milen-sage -/libs/schematic/api-client-r/ @andrewelamb @lakikowolfe @afwillia - /apps/agora/ @Sage-Bionetworks/sage-monorepo-agora /libs/agora/ @Sage-Bionetworks/sage-monorepo-agora diff --git a/.github/ISSUE_TEMPLATE/1-bug.yml b/.github/ISSUE_TEMPLATE/1-bug.yml index 71eb520aa..c39527486 100644 --- a/.github/ISSUE_TEMPLATE/1-bug.yml +++ b/.github/ISSUE_TEMPLATE/1-bug.yml @@ -27,7 +27,6 @@ body: - Sage - Sage Monorepo - Sandbox - - Schematic - Synapse - Other - type: textarea diff --git a/.github/ISSUE_TEMPLATE/2-feature.yml b/.github/ISSUE_TEMPLATE/2-feature.yml index 62d41d1a9..7b1cae040 100644 --- a/.github/ISSUE_TEMPLATE/2-feature.yml +++ b/.github/ISSUE_TEMPLATE/2-feature.yml @@ -16,7 +16,6 @@ body: - Sage - Sage Monorepo - Sandbox - - Schematic - Synapse - Other - type: textarea diff --git a/.github/ISSUE_TEMPLATE/3-documentation.yml b/.github/ISSUE_TEMPLATE/3-documentation.yml index 05279fdaa..63bcf6a42 100644 --- a/.github/ISSUE_TEMPLATE/3-documentation.yml +++ b/.github/ISSUE_TEMPLATE/3-documentation.yml @@ -16,7 +16,6 @@ body: - Sage - Sage Monorepo - Sandbox - - Schematic - Synapse - Other - type: checkboxes diff --git a/.github/ISSUE_TEMPLATE/4-story.yml b/.github/ISSUE_TEMPLATE/4-story.yml index 10f316062..24822d70e 100644 --- a/.github/ISSUE_TEMPLATE/4-story.yml +++ b/.github/ISSUE_TEMPLATE/4-story.yml @@ -16,7 +16,6 @@ body: - Sage - Sage Monorepo - Sandbox - - Schematic - Synapse - Other - type: textarea diff --git a/.github/workflows/lint-dockerfiles.yml b/.github/workflows/lint-dockerfiles.yml index 19667f3a3..5d180765d 100644 --- a/.github/workflows/lint-dockerfiles.yml +++ b/.github/workflows/lint-dockerfiles.yml @@ -9,7 +9,6 @@ on: - 'openchallenges/**' - 'sage-monorepo/**' - 'sage/**' - - 'schematic/**' pull_request: jobs: diff --git a/.github/workflows/lint-pr.yml b/.github/workflows/lint-pr.yml index 24684d4a9..d87bd91da 100644 --- a/.github/workflows/lint-pr.yml +++ b/.github/workflows/lint-pr.yml @@ -34,7 +34,6 @@ jobs: sage sage-monorepo sandbox - schematic synapse # Configure that a scope must always be provided. requireScope: false diff --git a/.github/workflows/schematic-api-ci.yml b/.github/workflows/schematic-api-ci.yml deleted file mode 100644 index df6b79ac5..000000000 --- a/.github/workflows/schematic-api-ci.yml +++ /dev/null @@ -1,140 +0,0 @@ -name: Schematic API CI -on: - pull_request: - branches: - - main - -env: - NX_BRANCH: ${{ github.event.number }} - NX_RUN_GROUP: ${{ github.run_id }} - NX_CLOUD_AUTH_TOKEN: ${{ secrets.NX_CLOUD_AUTH_TOKEN }} - NX_CLOUD_ENCRYPTION_KEY: ${{ secrets.NX_CLOUD_ENCRYPTION_KEY }} - NX_CLOUD_ENV_NAME: 'linux' - SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }} - DOCKER_USERNAME: ${{ github.actor }} - DOCKER_PASSWORD: ${{ secrets.GITHUB_TOKEN }} - -jobs: - pr: - runs-on: ubuntu-22.04-4core-16GBRAM-150GBSSD - # Runs this job if: - # 1. Triggered by a PR - # 2. The PR originate from the Schematic-API-Staging branch - # 3. Targets the main branch - if: | - github.event_name == 'pull_request' && - github.event.pull_request.base.ref == 'main' && - github.event.pull_request.head.ref == 'Schematic-API-Staging' - - steps: - - uses: actions/checkout@v3 - with: - ref: ${{ github.event.pull_request.head.ref }} - repository: ${{ github.event.pull_request.head.repo.full_name }} - fetch-depth: 0 - - - name: Derive appropriate SHAs for base and head for `nx affected` commands - uses: nrwl/nx-set-shas@v3 - - - name: Set up pnpm cache - uses: actions/cache@v3 - with: - path: '/tmp/.pnpm-store' - key: ${{ runner.os }}-pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }} - restore-keys: | - ${{ runner.os }}-pnpm-store- - - - name: Set up Poetry cache - uses: actions/cache@v3 - with: - path: '/tmp/.cache/pypoetry' - key: ${{ runner.os }}-poetry-${{ hashFiles('**/poetry.lock') }} - - - name: Set up venv cache - uses: actions/cache@v3 - with: - path: | - /tmp/.local/share/virtualenv - **/.venv - key: ${{ runner.os }}-venv-${{ hashFiles('**/poetry.lock') }} - - - name: Set up Gradle cache - uses: actions/cache@v3 - with: - path: | - /tmp/.gradle/caches - /tmp/.gradle/wrapper - key: ${{ runner.os }}-gradle-${{ hashFiles('**/*.gradle*', '**/gradle-wrapper.properties') }} - restore-keys: | - ${{ runner.os }}-gradle- - - - name: Install the Dev Container CLI - run: npm install -g @devcontainers/cli@0.69.0 - - - name: Start the dev container - run: | - mkdir -p \ - /tmp/.pnpm-store \ - /tmp/.cache/R/renv/cache \ - /tmp/.cache/pypoetry \ - /tmp/.local/share/virtualenv \ - /tmp/.gradle/caches \ - /tmp/.gradle/wrapper - - devcontainer up \ - --mount type=bind,source=/tmp/.pnpm-store,target=/workspaces/sage-monorepo/.pnpm-store \ - --mount type=bind,source=/tmp/.cache/R/renv/cache,target=/home/vscode/.cache/R/renv/cache \ - --mount type=bind,source=/tmp/.cache/pypoetry,target=/home/vscode/.cache/pypoetry \ - --mount type=bind,source=/tmp/.local/share/virtualenv,target=/home/vscode/.local/share/virtualenv \ - --mount type=bind,source=/tmp/.gradle/caches,target=/home/vscode/.gradle/caches \ - --mount type=bind,source=/tmp/.gradle/wrapper,target=/home/vscode/.gradle/wrapper \ - --workspace-folder ../sage-monorepo - - - name: Prepare the workspace - run: | - devcontainer exec --workspace-folder ../sage-monorepo bash -c " - sudo chown -R vscode:vscode \ - /workspaces/sage-monorepo \ - /home/vscode/.cache \ - /home/vscode/.local \ - /home/vscode/.gradle \ - && . ./dev-env.sh \ - && workspace-install" - - - name: Set up synapse config - run: | - import yaml - secrets = { - "synapse_token": "${{ secrets.SCHEMATIC_SYNAPSE_TOKEN }}", - "test_project": "${{ secrets.SCHEMATIC_TEST_PROJECT }}", - "test_dataset": "${{ secrets.SCHEMATIC_TEST_DATASET }}", - "test_manifest": "${{ secrets.SCHEMATIC_TEST_MANIFEST }}", - "test_asset_view": "${{ secrets.SCHEMATIC_TEST_ASSET_VIEW }}" - } - for key, secret in secrets.items(): - assert secret is not None - assert isinstance(secret, str) - assert len(secret) > 0 - with open('apps/schematic/api/schematic_api/test/data/synapse_config.yaml', 'w') as file: - yaml.dump(secrets, file) - shell: python - - - name: Set up google credentials - env: - SERVICE_ACCOUNT_CREDS: ${{ secrets.SCHEMATIC_SERVICE_ACCT_CREDS }} - run: | - import json - import os - credentials_dict = json.loads(os.environ["SERVICE_ACCOUNT_CREDS"]) - credentials_file_name = "apps/schematic/api/schematic_service_account_creds.json" - with open(credentials_file_name, 'w', encoding='utf-8') as f: - json.dump(credentials_dict, f, ensure_ascii=False, indent=4) - shell: python - - - name: Test the affected projects (all) - run: | - devcontainer exec --workspace-folder ../sage-monorepo bash -c ". ./dev-env.sh \ - && nx affected --target=test-all" - - - name: Remove the dev container - run: docker rm -f sage_devcontainer diff --git a/.github/workflows/sonar-scan-push.yml b/.github/workflows/sonar-scan-push.yml index b58eb30ca..441454b89 100644 --- a/.github/workflows/sonar-scan-push.yml +++ b/.github/workflows/sonar-scan-push.yml @@ -9,7 +9,6 @@ on: - 'openchallenges/**' - 'sage-monorepo/**' - 'sage/**' - - 'schematic/**' jobs: sonar: diff --git a/.stylelintignore b/.stylelintignore index d14036119..047daf643 100644 --- a/.stylelintignore +++ b/.stylelintignore @@ -8,6 +8,3 @@ dist/ # Ignore all test reports coverage/ reports/ - -# Ignore selected project folders -apps/schematic/api/great_expectations \ No newline at end of file diff --git a/CODE_QUALITY.md b/CODE_QUALITY.md index bd7e5b9c0..f9554c3bc 100644 --- a/CODE_QUALITY.md +++ b/CODE_QUALITY.md @@ -25,5 +25,5 @@ | [edam-etl](https://sonarcloud.io/summary/overall?id=openchallenges-edam-etl) | ![Quality Gate Status](https://sonarcloud.io/api/project_badges/measure?project=openchallenges-edam-etl&metric=alert_status) | ![Reliability Rating](https://sonarcloud.io/api/project_badges/measure?project=openchallenges-edam-etl&metric=reliability_rating) | ![Maintainability Rating](https://sonarcloud.io/api/project_badges/measure?project=openchallenges-edam-etl&metric=sqale_rating) | ![Technical Debt](https://sonarcloud.io/api/project_badges/measure?project=openchallenges-edam-etl&metric=sqale_index) | ![Coverage](https://sonarcloud.io/api/project_badges/measure?project=openchallenges-edam-etl&metric=coverage) | | [image-service](https://sonarcloud.io/summary/overall?id=openchallenges-image-service) | ![Quality Gate Status](https://sonarcloud.io/api/project_badges/measure?project=openchallenges-image-service&metric=alert_status) | ![Reliability Rating](https://sonarcloud.io/api/project_badges/measure?project=openchallenges-image-service&metric=reliability_rating) | ![Maintainability Rating](https://sonarcloud.io/api/project_badges/measure?project=openchallenges-image-service&metric=sqale_rating) | ![Technical Debt](https://sonarcloud.io/api/project_badges/measure?project=openchallenges-image-service&metric=sqale_index) | ![Coverage](https://sonarcloud.io/api/project_badges/measure?project=openchallenges-image-service&metric=coverage) | | [organization-service](https://sonarcloud.io/summary/overall?id=openchallenges-organization-service) | ![Quality Gate Status](https://sonarcloud.io/api/project_badges/measure?project=openchallenges-organization-service&metric=alert_status) | ![Reliability Rating](https://sonarcloud.io/api/project_badges/measure?project=openchallenges-organization-service&metric=reliability_rating) | ![Maintainability Rating](https://sonarcloud.io/api/project_badges/measure?project=openchallenges-organization-service&metric=sqale_rating) | ![Technical Debt](https://sonarcloud.io/api/project_badges/measure?project=openchallenges-organization-service&metric=sqale_index) | ![Coverage](https://sonarcloud.io/api/project_badges/measure?project=openchallenges-organization-service&metric=coverage) | -| **Schematic** | | -| [api](https://sonarcloud.io/summary/overall?id=schematic-api) | ![Quality Gate Status](https://sonarcloud.io/api/project_badges/measure?project=schematic-api&metric=alert_status) | ![Reliability Rating](https://sonarcloud.io/api/project_badges/measure?project=schematic-api&metric=reliability_rating) | ![Maintainability Rating](https://sonarcloud.io/api/project_badges/measure?project=schematic-api&metric=sqale_rating) | ![Technical Debt](https://sonarcloud.io/api/project_badges/measure?project=schematic-api&metric=sqale_index) | ![Coverage](https://sonarcloud.io/api/project_badges/measure?project=schematic-api&metric=coverage) | + +| diff --git a/README.md b/README.md index 147bc6c08..b7c8a567d 100644 --- a/README.md +++ b/README.md @@ -115,11 +115,10 @@ Join the conversation and help the community. ## Products -- Agora (evaluation) +- Agora - iAtlas - MODEL-AD - OpenChallenge -- Schematic - Synapse (evaluation) diff --git a/apps/iatlas/api/install.sh b/apps/iatlas/api/install.sh index 7466303d6..a8233ccf2 100755 --- a/apps/iatlas/api/install.sh +++ b/apps/iatlas/api/install.sh @@ -1,6 +1,6 @@ #!/usr/bin/env bash -PYTHON_VERSION="3.8.20" +PYTHON_VERSION=$(cat ".python-version") pyenv install --skip-existing $PYTHON_VERSION diff --git a/apps/iatlas/data/install.sh b/apps/iatlas/data/install.sh index 5275fc3df..a8233ccf2 100755 --- a/apps/iatlas/data/install.sh +++ b/apps/iatlas/data/install.sh @@ -1,6 +1,6 @@ #!/usr/bin/env bash -PYTHON_VERSION="3.11.10" +PYTHON_VERSION=$(cat ".python-version") pyenv install --skip-existing $PYTHON_VERSION diff --git a/apps/openchallenges/data-lambda/.python-version b/apps/openchallenges/data-lambda/.python-version index 4eba2a62e..3e72aa698 100644 --- a/apps/openchallenges/data-lambda/.python-version +++ b/apps/openchallenges/data-lambda/.python-version @@ -1 +1 @@ -3.13.0 +3.11.10 diff --git a/apps/openchallenges/data-lambda/poetry.lock b/apps/openchallenges/data-lambda/poetry.lock index f8212c984..9f006d898 100644 --- a/apps/openchallenges/data-lambda/poetry.lock +++ b/apps/openchallenges/data-lambda/poetry.lock @@ -2,13 +2,13 @@ [[package]] name = "cachetools" -version = "5.5.0" +version = "5.5.1" description = "Extensible memoizing collections and decorators" optional = false python-versions = ">=3.7" files = [ - {file = "cachetools-5.5.0-py3-none-any.whl", hash = "sha256:02134e8439cdc2ffb62023ce1debca2944c3f289d66bb17ead3ab3dede74b292"}, - {file = "cachetools-5.5.0.tar.gz", hash = "sha256:2cc24fb4cbe39633fb7badd9db9ca6295d766d9c2995f245725a46715d050f2a"}, + {file = "cachetools-5.5.1-py3-none-any.whl", hash = "sha256:b76651fdc3b24ead3c648bbdeeb940c1b04d365b38b4af66788f9ec4a81d42bb"}, + {file = "cachetools-5.5.1.tar.gz", hash = "sha256:70f238fbba50383ef62e55c6aff6d9673175fe59f7c6782c7a0b9e38f4a9df95"}, ] [[package]] @@ -125,13 +125,13 @@ files = [ [[package]] name = "google-auth" -version = "2.37.0" +version = "2.38.0" description = "Google Authentication Library" optional = false python-versions = ">=3.7" files = [ - {file = "google_auth-2.37.0-py2.py3-none-any.whl", hash = "sha256:42664f18290a6be591be5329a96fe30184be1a1badb7292a7f686a9659de9ca0"}, - {file = "google_auth-2.37.0.tar.gz", hash = "sha256:0054623abf1f9c83492c63d3f47e77f0a544caa3d40b2d98e099a611c2dd5d00"}, + {file = "google_auth-2.38.0-py2.py3-none-any.whl", hash = "sha256:e7dae6694313f434a2727bf2906f27ad259bae090d7aa896590d86feec3d9d4a"}, + {file = "google_auth-2.38.0.tar.gz", hash = "sha256:8285113607d3b80a3f1543b75962447ba8a09fe85783432a784fdeef6ac094c4"}, ] [package.dependencies] @@ -323,7 +323,7 @@ files = [ ] [package.dependencies] -numpy = {version = ">=1.26.0", markers = "python_version >= \"3.12\""} +numpy = {version = ">=1.23.2", markers = "python_version == \"3.11\""} python-dateutil = ">=2.8.2" pytz = ">=2020.1" tzdata = ">=2022.7" @@ -469,13 +469,13 @@ files = [ [[package]] name = "tzdata" -version = "2024.2" +version = "2025.1" description = "Provider of IANA time zone data" optional = false python-versions = ">=2" files = [ - {file = "tzdata-2024.2-py2.py3-none-any.whl", hash = "sha256:a48093786cdcde33cad18c2555e8532f34422074448fbc874186f0abd79565cd"}, - {file = "tzdata-2024.2.tar.gz", hash = "sha256:7d85cc416e9382e69095b7bdf4afd9e3880418a2413feec7069d533d6b4e31cc"}, + {file = "tzdata-2025.1-py2.py3-none-any.whl", hash = "sha256:7e127113816800496f027041c570f50bcd464a020098a3b6b199517772303639"}, + {file = "tzdata-2025.1.tar.gz", hash = "sha256:24894909e88cdb28bd1636c6887801df64cb485bd593f2fd83ef29075a81d694"}, ] [[package]] @@ -497,5 +497,5 @@ zstd = ["zstandard (>=0.18.0)"] [metadata] lock-version = "2.0" -python-versions = "3.13.0" -content-hash = "d3dbc9262b2bfe21782dec908509bb5e40044f93f1b8f2e9885384a00c4cce26" +python-versions = "3.11.10" +content-hash = "b638e246191450653a84a17d7991a0db94fc1c7294b288fe4563fdea4408e04c" diff --git a/apps/openchallenges/data-lambda/pyproject.toml b/apps/openchallenges/data-lambda/pyproject.toml index e27ce77da..0cb723823 100644 --- a/apps/openchallenges/data-lambda/pyproject.toml +++ b/apps/openchallenges/data-lambda/pyproject.toml @@ -6,7 +6,7 @@ authors = ["Verena Chung "] readme = "README.md" [tool.poetry.dependencies] -python = "3.13.0" +python = "3.11.10" requests = "2.32.3" gspread = "6.1.4" pandas = "2.2.3" diff --git a/apps/schematic/api-docs/.env.example b/apps/schematic/api-docs/.env.example deleted file mode 100644 index e91c7926d..000000000 --- a/apps/schematic/api-docs/.env.example +++ /dev/null @@ -1 +0,0 @@ -PORT=7010 \ No newline at end of file diff --git a/apps/schematic/api-docs/Dockerfile b/apps/schematic/api-docs/Dockerfile deleted file mode 100644 index f0f3a1aa8..000000000 --- a/apps/schematic/api-docs/Dockerfile +++ /dev/null @@ -1,6 +0,0 @@ -FROM redocly/redoc:v2.1.5 - -COPY build/redoc-static.html /usr/share/nginx/html/index.html -# COPY favicon.ico /usr/share/nginx/html/ - -EXPOSE 7010 \ No newline at end of file diff --git a/apps/schematic/api-docs/README.md b/apps/schematic/api-docs/README.md deleted file mode 100644 index a19e01a96..000000000 --- a/apps/schematic/api-docs/README.md +++ /dev/null @@ -1,3 +0,0 @@ -# Schematic API Docs - -TODO diff --git a/apps/schematic/api-docs/index.hbs b/apps/schematic/api-docs/index.hbs deleted file mode 100644 index 2adf572d4..000000000 --- a/apps/schematic/api-docs/index.hbs +++ /dev/null @@ -1,20 +0,0 @@ - - - - {{title}} - - {{!-- --}} - - - {{{redocHead}}} - {{#unless disableGoogleFont}}{{/unless}} - - - {{{redocHTML}}} - - \ No newline at end of file diff --git a/apps/schematic/api-docs/project.json b/apps/schematic/api-docs/project.json deleted file mode 100644 index 140040c71..000000000 --- a/apps/schematic/api-docs/project.json +++ /dev/null @@ -1,55 +0,0 @@ -{ - "name": "schematic-api-docs", - "$schema": "../../../node_modules/nx/schemas/project-schema.json", - "sourceRoot": "apps/schematic/api-docs/src", - "projectType": "application", - "targets": { - "create-config": { - "executor": "nx:run-commands", - "options": { - "command": "cp -n .env.example .env", - "cwd": "{projectRoot}" - } - }, - "build": { - "executor": "nx:run-commands", - "options": { - "command": "redocly build-docs --config redocly.yaml --template index.hbs --output build/redoc-static.html", - "cwd": "apps/schematic/api-docs" - } - }, - "serve": { - "executor": "nx:run-commands", - "options": { - "command": "redocly preview-docs --config redocly.yaml --port 7010", - "cwd": "apps/schematic/api-docs" - } - }, - "serve-detach": { - "executor": "nx:run-commands", - "options": { - "command": "docker/schematic/serve-detach.sh schematic-api-docs" - } - }, - "build-image": { - "executor": "@nx-tools/nx-container:build", - "options": { - "context": "apps/schematic/api-docs", - "metadata": { - "images": ["ghcr.io/sage-bionetworks/schematic-api-docs"], - "tags": ["type=edge,branch=main", "type=raw,value=local", "type=sha"] - }, - "push": false - } - }, - "scan-image": { - "executor": "nx:run-commands", - "options": { - "command": "trivy image ghcr.io/sage-bionetworks/schematic-api-docs:local --quiet", - "color": true - } - } - }, - "tags": ["type:docs", "scope:backend"], - "implicitDependencies": ["schematic-api-description"] -} diff --git a/apps/schematic/api-docs/redocly.yaml b/apps/schematic/api-docs/redocly.yaml deleted file mode 100644 index e6a2f0b50..000000000 --- a/apps/schematic/api-docs/redocly.yaml +++ /dev/null @@ -1,11 +0,0 @@ -# See https://docs.redoc.ly/cli/configuration/ for more information. -apis: - main: - root: '../../../libs/schematic/api-description/build/openapi.yaml' -theme: - openapi: - theme: - rightPanel: - backgroundColor: '#314fa7' -# sidebar: -# backgroundColor: '#ffffff' diff --git a/apps/schematic/api/.dockerignore b/apps/schematic/api/.dockerignore deleted file mode 100644 index f96196019..000000000 --- a/apps/schematic/api/.dockerignore +++ /dev/null @@ -1,72 +0,0 @@ -.travis.yaml -.openapi-generator-ignore -README.md -tox.ini -git_push.sh -test-requirements.txt -setup.py - -# Byte-compiled / optimized / DLL files -__pycache__/ -*.py[cod] -*$py.class - -# C extensions -*.so - -# Distribution / packaging -.Python -env/ -build/ -develop-eggs/ -dist/ -downloads/ -eggs/ -.eggs/ -lib/ -lib64/ -parts/ -sdist/ -var/ -*.egg-info/ -.installed.cfg -*.egg - -# PyInstaller -# Usually these files are written by a python script from a template -# before PyInstaller builds the exe, so as to inject date/other infos into it. -*.manifest -*.spec - -# Installer logs -pip-log.txt -pip-delete-this-directory.txt - -# Unit test / coverage reports -htmlcov/ -.tox/ -.coverage -.coverage.* -.cache -nosetests.xml -coverage.xml -*,cover -.hypothesis/ -venv/ -.python-version - -# Translations -*.mo -*.pot - -# Django stuff: -*.log - -# Sphinx documentation -docs/_build/ - -# PyBuilder -target/ - -#Ipython Notebook -.ipynb_checkpoints diff --git a/apps/schematic/api/.env.example b/apps/schematic/api/.env.example deleted file mode 100644 index a2af929e7..000000000 --- a/apps/schematic/api/.env.example +++ /dev/null @@ -1,4 +0,0 @@ -# API server -SERVER_PROTOCOL=http:// -SERVER_DOMAIN=localhost -SERVER_PORT=7080 diff --git a/apps/schematic/api/.gitignore b/apps/schematic/api/.gitignore deleted file mode 100644 index eb3cb8900..000000000 --- a/apps/schematic/api/.gitignore +++ /dev/null @@ -1,85 +0,0 @@ -.scannerwork - -# Byte-compiled / optimized / DLL files -__pycache__/ -*.py[cod] -*$py.class - -# C extensions -*.so - -# Distribution / packaging -.Python -env/ -build/ -develop-eggs/ -dist/ -downloads/ -eggs/ -.eggs/ -lib/ -lib64/ -parts/ -sdist/ -var/ -*.egg-info/ -.installed.cfg -*.egg - -# PyInstaller -# Usually these files are written by a python script from a template -# before PyInstaller builds the exe, so as to inject date/other infos into it. -*.manifest -*.spec - -# Installer logs -pip-log.txt -pip-delete-this-directory.txt - -# Unit test / coverage reports -htmlcov/ -.tox/ -.coverage -.coverage.* -.cache -nosetests.xml -coverage.xml -*,cover -.hypothesis/ -venv/ -.venv/ -.python-version -.pytest_cache - -# Translations -*.mo -*.pot - -# Django stuff: -*.log - -# Sphinx documentation -docs/_build/ - -# PyBuilder -target/ - -#Ipython Notebook -.ipynb_checkpoints - -#secrets -*secrets* -schematic_service_account_creds.json -private_localhost_certificate.crt -private_localhost.key - -#config files -config.yaml -schematic_api/test/data/synapse_config.yaml - -#schematic downloaded files -manifests -great_expectations - -#integration tests -schematic_api/test/test_integration.py \ No newline at end of file diff --git a/apps/schematic/api/.openapi-generator-ignore b/apps/schematic/api/.openapi-generator-ignore deleted file mode 100644 index 2ae630743..000000000 --- a/apps/schematic/api/.openapi-generator-ignore +++ /dev/null @@ -1,41 +0,0 @@ -# OpenAPI Generator Ignore -# Generated by openapi-generator https://github.com/openapitools/openapi-generator - -# Use this file to prevent files from being overwritten by the generator. -# The patterns follow closely to .gitignore or .dockerignore. - -# As an example, the C# client generator defines ApiClient.cs. -# You can make changes and tell OpenAPI Generator to ignore just this file by uncommenting the following line: -#ApiClient.cs - -# You can match any string of characters against a directory, file or extension with a single asterisk (*): -#foo/*/qux -# The above matches foo/bar/qux and foo/baz/qux, but not foo/bar/baz/qux - -# You can recursively match patterns against a directory, file or extension with a double asterisk (**): -#foo/**/qux -# This matches foo/bar/qux, foo/baz/qux, and foo/bar/baz/qux - -# You can also negate patterns with an exclamation (!). -# For example, you can ignore all files in a docs folder with the file extension .md: -#docs/*.md -# Then explicitly reverse the ignore rule for a single file: -#!docs/README.md -.dockerignore -.gitignore -.openapi-generator-ignore -.travis.yml -Dockerfile -git_push.sh -README.md -requirements.txt -schematic_api/__main__.py -setup.py -test-requirements.txt -schematic_api/test/test_manifest_generation_controller.py -schematic_api/test/test_storage_controller.py -schematic_api/test/test_schema_controller.py -schematic_api/test/test_manifest_validation_controller.py -schematic_api/test/test_versions_controller.py -schematic_api/test/test_tangled_tree_controller.py -tox.ini diff --git a/apps/schematic/api/.openapi-generator/FILES b/apps/schematic/api/.openapi-generator/FILES deleted file mode 100644 index 67091db1d..000000000 --- a/apps/schematic/api/.openapi-generator/FILES +++ /dev/null @@ -1,52 +0,0 @@ -AUTHORS.md -schematic_api/__init__.py -schematic_api/controllers/__init__.py -schematic_api/controllers/manifest_generation_controller.py -schematic_api/controllers/manifest_validation_controller.py -schematic_api/controllers/schema_controller.py -schematic_api/controllers/security_controller_.py -schematic_api/controllers/storage_controller.py -schematic_api/controllers/tangled_tree_controller.py -schematic_api/controllers/versions_controller.py -schematic_api/encoder.py -schematic_api/models/__init__.py -schematic_api/models/asset_type.py -schematic_api/models/base_model_.py -schematic_api/models/basic_error.py -schematic_api/models/component_requirement_array.py -schematic_api/models/component_requirement_graph.py -schematic_api/models/component_requirement_subgraph.py -schematic_api/models/connected_node_pair.py -schematic_api/models/connected_node_pair_array.py -schematic_api/models/connected_node_pair_page.py -schematic_api/models/connected_node_pair_page_all_of.py -schematic_api/models/dataset_metadata.py -schematic_api/models/dataset_metadata_array.py -schematic_api/models/dataset_metadata_page.py -schematic_api/models/dataset_metadata_page_all_of.py -schematic_api/models/file_metadata.py -schematic_api/models/file_metadata_array.py -schematic_api/models/file_metadata_page.py -schematic_api/models/file_metadata_page_all_of.py -schematic_api/models/google_sheet_links.py -schematic_api/models/manifest_metadata.py -schematic_api/models/manifest_metadata_array.py -schematic_api/models/manifest_metadata_page.py -schematic_api/models/manifest_metadata_page_all_of.py -schematic_api/models/manifest_validation_result.py -schematic_api/models/node.py -schematic_api/models/node_array.py -schematic_api/models/node_page.py -schematic_api/models/node_page_all_of.py -schematic_api/models/node_property_array.py -schematic_api/models/page_metadata.py -schematic_api/models/project_metadata.py -schematic_api/models/project_metadata_array.py -schematic_api/models/project_metadata_page.py -schematic_api/models/project_metadata_page_all_of.py -schematic_api/models/validation_rule.py -schematic_api/models/validation_rule_array.py -schematic_api/openapi/openapi.yaml -schematic_api/test/__init__.py -schematic_api/typing_utils.py -schematic_api/util.py diff --git a/apps/schematic/api/.openapi-generator/VERSION b/apps/schematic/api/.openapi-generator/VERSION deleted file mode 100644 index 0df17dd0f..000000000 --- a/apps/schematic/api/.openapi-generator/VERSION +++ /dev/null @@ -1 +0,0 @@ -6.2.1 \ No newline at end of file diff --git a/apps/schematic/api/AUTHORS.md b/apps/schematic/api/AUTHORS.md deleted file mode 100644 index 01c10d9f0..000000000 --- a/apps/schematic/api/AUTHORS.md +++ /dev/null @@ -1,11 +0,0 @@ -# Authors - -Ordered by first contribution. - -- [Thomas Schaffter](https://github.com/tschaffter) - - - - - diff --git a/apps/schematic/api/Dockerfile b/apps/schematic/api/Dockerfile deleted file mode 100644 index 7da94a2e8..000000000 --- a/apps/schematic/api/Dockerfile +++ /dev/null @@ -1,82 +0,0 @@ -FROM tiangolo/uwsgi-nginx-flask:python3.10-2024-09-16 - -# add label -LABEL org.opencontainers.image.authors='Milen Nikolov , Andrew Lamb , Mialy DeFelice , Gianna Jordan , Lingling Peng ' - -# the environment variables defined here are the default -# and can be overwritten by docker run -e VARIABLE = XX -# or can be overwritten by .env when using docker compose -ENV PYTHONFAULTHANDLER=1 \ - PYTHONUNBUFFERED=1 \ - PYTHONHASHSEED=random \ - PIP_NO_CACHE_DIR=off \ - PIP_DISABLE_PIP_VERSION_CHECK=on \ - PIP_DEFAULT_TIMEOUT=200 \ - POETRY_VERSION=1.3.0 \ - APP_PARENT_DIR=/app \ - NGINX_CONFIG=/etc/nginx/conf.d \ - APP_DIR=/app/app \ - ROOT=/ \ - UWSGI_INI=/app/uwsgi.ini \ - NGINX_WORKER_PROCESSES=1 \ - VERSION=$TAG - -# run open ssl and generate certificate -RUN apt-get update -qq -y && export DEBIAN_FRONTEND=noninteractive \ - && apt-get install --no-install-recommends -qq -y \ - openssl jq \ - && apt-get -y autoclean \ - && apt-get -y autoremove \ - && rm -rf /var/lib/apt/lists/* - -# add dhparam.pem -# dhparam.pem was used in ssl-params.conf -COPY dhparam.pem /etc/ssl/dhparam.pem - -# copy all nginx config files -WORKDIR ${NGINX_CONFIG} -COPY ./self-signed.conf ./ssl-params.conf ./certificate.conf ./ - -# copy to use custom uwsgi.ini -COPY uwsgi.ini /app/uwsgi.ini - -# copy files relevant for schematic apis -COPY pyproject.toml poetry.lock /app/app/ -COPY ./schematic_api /app/app/schematic_api - -# install dependencies -WORKDIR /app/app -# Use the version of Poetry installed in the dev container. -# See /workspaces/sage-monorepo/tools/devcontainers/sage/.devcontainer/Dockerfile -RUN pip install --no-cache-dir poetry==1.6.1 \ - && poetry config --local virtualenvs.create false \ - && poetry run pip install "cython<3.0.0" \ - && poetry run pip install --no-build-isolation pyyaml==5.4.1 \ - && poetry install --with prod --no-root --no-interaction --no-ansi \ - # Update file permission - && mkdir /root/.synapseCache /app/app/manifests \ - # temporary here to ensure .synapseCache is not empty - && echo "This is a test file." > /root/.synapseCache/test.txt \ - # temporary here until we move .synapseCache to a different path - && chmod -R 777 /root /app - -# Modify entrypoint script to allow SSL private key and certificate to be saved -WORKDIR ${ROOT} -COPY uwsgi-nginx-entrypoint.sh ./entrypoint2.sh -COPY uwsgi-nginx-entrypoint.sh ./uwsgi-nginx-entrypoint2.sh -COPY save_key_certificate.py ./save_key_certificate.py - -RUN chmod +x uwsgi-nginx-entrypoint2.sh \ - && chmod +x entrypoint2.sh \ - && chown -R nginx /uwsgi-nginx-entrypoint2.sh \ - && chown -R nginx /entrypoint2.sh - -WORKDIR ${APP_DIR} - -# specify entrypoint again to generate config -# have to respecify CMD too -ENTRYPOINT ["/entrypoint2.sh"] -CMD ["/start.sh"] - -# Expose ports -EXPOSE 7443 \ No newline at end of file diff --git a/apps/schematic/api/README.md b/apps/schematic/api/README.md deleted file mode 100644 index faab8f7e2..000000000 --- a/apps/schematic/api/README.md +++ /dev/null @@ -1,54 +0,0 @@ -# Run Schematic APIs - -## Running without Docker - -To run the server, please execute the following from folder `apps/schematic/api`: - -``` -poetry shell -``` - -To install dependencies: - -``` -poetry install -``` - -And run schematic APIs: - -``` -python3 -m schematic_api -``` - -and open your browser to here: - -``` -http://localhost:7443/api/v1/ui/ -``` - -## Running with Docker - -To run the server on a Docker container, please execute the following from the root directory: - -```bash -# Prepare the development environment of the project with nx prepare schematic-api. This will create a venv and install all the Python dependencies. -nx prepare schematic-api - -# You only need to run this command one time -# This step adds SSL private key and certificate as environment variable in .env file -python3 apps/schematic/api/prepare_key_certificate.py - -# Running the following command from root (/workspaces/sage-monorepo) to build the image -nx build-image schematic-api - -# In the same location as above, start the containerized REST API with: -nx serve-detach schematic-api -``` - -You could open your browser here: - -``` -https://localhost:7443/api/v1/ui/ -``` - -Note: When the OpenAPI description has changed, regenerate the REST API with nx run schematic-api:generate. Also, `dhparam.pem` was generated by using command: `RUN openssl dhparam -out dhparam.pem 4096`. If there's an issue with the `dhparam.pem` in the future, please re run the command. diff --git a/apps/schematic/api/certificate.conf b/apps/schematic/api/certificate.conf deleted file mode 100644 index 805572fbf..000000000 --- a/apps/schematic/api/certificate.conf +++ /dev/null @@ -1,28 +0,0 @@ -server { - # listen to port 80 for http requests - listen 80 http2 default_server; - # listen to port 7443 for https requests - listen 7443 ssl http2 default_server; - listen [::]:7443 ssl http2 default_server; - include /etc/nginx/conf.d/self-signed.conf; - include /etc/nginx/conf.d/ssl-params.conf; - server_name 127.0.0.1; - proxy_read_timeout 300; - proxy_connect_timeout 300; - proxy_send_timeout 300; - error_page 497 https://$http_host$request_uri; - location / { - try_files $uri @app; - } - location @app { - include uwsgi_params; - uwsgi_pass unix:///tmp/uwsgi.sock; - } - location /static { - alias /app/static; - } - location /health { - return 200 'alive'; - add_header Content-Type text/plain; - } -} \ No newline at end of file diff --git a/apps/schematic/api/debug_key_cert.py b/apps/schematic/api/debug_key_cert.py deleted file mode 100644 index 1425b422b..000000000 --- a/apps/schematic/api/debug_key_cert.py +++ /dev/null @@ -1,76 +0,0 @@ -from environs import Env -import subprocess -import json -import base64 -import os - -# Create an instance of Env -env = Env() - -# Load the .env file -env.read_env(path=".env") - -# Define key and certificate file path -test_private_key_file_path = "test_private_key.key" -test_certificate_key_file_path = "test_certificate.crt" - -# Access the variables -secret_manager_secrets = os.environ["SECRETS_MANAGER_SECRETS"] - -ssl_private_key = json.loads(secret_manager_secrets)["SSL_PRIVATE_KEY"] -ssl_certificate_key = json.loads(secret_manager_secrets)["SSL_CERTIFICATE"] - -# delete preivous results if necessary -if os.path.exists(test_private_key_file_path): - os.remove(test_private_key_file_path) -if os.path.exists(test_certificate_key_file_path): - os.remove(test_certificate_key_file_path) - -# make sure that key and certificate can be decoded in correct format -with open(test_private_key_file_path, "wb") as file: - decoded_private_key = base64.b64decode(ssl_private_key) - file.write(decoded_private_key) - -with open(test_certificate_key_file_path, "wb") as file: - decoded_ssl_certificate_key = base64.b64decode(ssl_certificate_key) - file.write(decoded_ssl_certificate_key) - - -# Make sure that certificate and key match each other -def get_md5_cert(file): - openssl_x509_command = ["openssl", "x509", "-noout", "-modulus", "-in", file] - openssl_md5_command = ["openssl", "md5"] - - x509_process = subprocess.Popen(openssl_x509_command, stdout=subprocess.PIPE) - md5_process = subprocess.Popen( - openssl_md5_command, stdin=x509_process.stdout, stdout=subprocess.PIPE - ) - - output, error = md5_process.communicate() - - if error: - print("error getting md5", error.decode("utf-8")) - - return output.decode("utf-8").strip() - - -def get_md5_private_key(file): - openssl_rsa_command = ["openssl", "rsa", "-noout", "-modulus", "-in", file] - openssl_md5_command = ["openssl", "md5"] - - rsa_process = subprocess.Popen(openssl_rsa_command, stdout=subprocess.PIPE) - md5_process = subprocess.Popen( - openssl_md5_command, stdin=rsa_process.stdout, stdout=subprocess.PIPE - ) - - output, error = md5_process.communicate() - - if error: - print(error.decode("utf-8")) - return output.decode("utf-8").strip() - - -md5_key = get_md5_private_key(test_private_key_file_path) -md5_cert = get_md5_cert(test_certificate_key_file_path) - -assert md5_key == md5_cert diff --git a/apps/schematic/api/default_config.yaml b/apps/schematic/api/default_config.yaml deleted file mode 100644 index 9c4c1c849..000000000 --- a/apps/schematic/api/default_config.yaml +++ /dev/null @@ -1,4 +0,0 @@ -# If true, the synapse cache is purged before running a synapse storage related endpoint -purge_synapse_cache: true -# This can be set to a specific path, or to null to let the python client deicde -synapse_cache_path: '/var/tmp/synapse' diff --git a/apps/schematic/api/dhparam.pem b/apps/schematic/api/dhparam.pem deleted file mode 100644 index 71901dec8..000000000 --- a/apps/schematic/api/dhparam.pem +++ /dev/null @@ -1,13 +0,0 @@ ------BEGIN DH PARAMETERS----- -MIICCAKCAgEAwzcUARDhjqW74+OsC8quGIJhkuL60o9GHuR3feEOky1cY1D/Qm0I -s32FFsm5PutkJ6ZUFf86Uae+ARrQlsidsryHF+bHZpIBe12h/4pcQhH9ghdmHcq6 -lqJiD55Gb+uzqe/rmOCInEjl7WXxLvYDzndRbP9gY7I5tVQCdfMTYm7ZYK+Xt53X -ufIkwwkxj3qXimFIEeTJEqK837u7VM9Q5H+nZR+W/lAw8IvYp3wH3IrOmBflWBi/ -AaHCemd+gwaS8nZFrNSWffOd9Gg+tuFehlnCSMb4FudEbLk+AyvDAq1RMI1bH1SO -+go/i/iX3u616eXi83/U7JUMNj655Iyoc5F02GlDjyvRRauV50S4nIB7t/mxgGEZ -B7C1wce23PwhsRLxsT5xlti7T3QWgvO0w/P+jnCvwfyu9jUzP87qozAYAV8jCMzx -Henya27o3Qewhr6IuMm7tqo5Bz28AJMm+/DL+XQfF4ceP4XzA4OJVKfQzDKXh4PI -BSw8qY85esWJ3yQjwITygdOMHIxRVQA7Et7kKee3D3iDKeHdRu1m4hLtqRbHesrc -QMdhbZBW6WwYQPeqDhkYKBFbAFrrQHkNAOsduq6/OpWoOuu4yL4K4Sfkd8wQ/3At -e6lfZimHDEHlCIfKM3+MS91zkeOHlBVFVI+H2LiBk6tVcPm1hSK9c+sCAQI= ------END DH PARAMETERS----- diff --git a/apps/schematic/api/docker-compose.yml b/apps/schematic/api/docker-compose.yml deleted file mode 100644 index 560782494..000000000 --- a/apps/schematic/api/docker-compose.yml +++ /dev/null @@ -1,24 +0,0 @@ -version: '3.10' - -services: - schematic-api: - image: ghcr.io/sage-bionetworks/schematic-api:local - container_name: schematic-api - restart: always - env_file: - - .env - # volumes: - # - openchallenges-mariadb:/data/db - # - ./docker-entrypoint-initdb.d:/docker-entrypoint-initdb.d - networks: - - schematic - ports: - - '7443:7443' - -# volumes: -# openchallenges-mariadb: -# name: openchallenges-mariadb - -networks: - schematic: - name: schematic diff --git a/apps/schematic/api/docker-entrypoint.sh b/apps/schematic/api/docker-entrypoint.sh deleted file mode 100644 index e031eda92..000000000 --- a/apps/schematic/api/docker-entrypoint.sh +++ /dev/null @@ -1,9 +0,0 @@ -#!/usr/bin/env bash -set -e - -if [ "$1" = 'uwsgi' ] || [ "$1" = 'python' ]; then - cd ${APP_DIR} - exec gosu www-data "$@" -fi - -exec "$@" \ No newline at end of file diff --git a/apps/schematic/api/openapitools.json b/apps/schematic/api/openapitools.json deleted file mode 100644 index 8c03a2517..000000000 --- a/apps/schematic/api/openapitools.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "$schema": "../../../node_modules/@openapitools/openapi-generator-cli/config.schema.json", - "spaces": 2, - "generator-cli": { - "version": "6.2.1", - "generators": { - "schematic-api": { - "config": "templates/config.yaml", - "generatorName": "python-flask", - "inputSpec": "#{cwd}/../../../libs/schematic/api-description/build/openapi.yaml", - "output": "#{cwd}", - "templateDir": "templates", - "additionalProperties": { - "packageName": "schematic_api", - "legacyDiscriminatorBehavior": false - } - } - } - } -} diff --git a/apps/schematic/api/poetry.lock b/apps/schematic/api/poetry.lock deleted file mode 100644 index 8b28d952f..000000000 --- a/apps/schematic/api/poetry.lock +++ /dev/null @@ -1,4737 +0,0 @@ -# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. - -[[package]] -name = "alabaster" -version = "1.0.0" -description = "A light, configurable Sphinx theme" -optional = false -python-versions = ">=3.10" -files = [ - {file = "alabaster-1.0.0-py3-none-any.whl", hash = "sha256:fc6786402dc3fcb2de3cabd5fe455a2db534b371124f1f21de8731783dec828b"}, - {file = "alabaster-1.0.0.tar.gz", hash = "sha256:c00dca57bca26fa62a6d7d0a9fcce65f3e026e9bfe33e9c538fd3fbb2144fd9e"}, -] - -[[package]] -name = "altair" -version = "4.2.0" -description = "Altair: A declarative statistical visualization library for Python." -optional = false -python-versions = ">=3.7" -files = [ - {file = "altair-4.2.0-py3-none-any.whl", hash = "sha256:0c724848ae53410c13fa28be2b3b9a9dcb7b5caa1a70f7f217bd663bb419935a"}, - {file = "altair-4.2.0.tar.gz", hash = "sha256:d87d9372e63b48cd96b2a6415f0cf9457f50162ab79dc7a31cd7e024dd840026"}, -] - -[package.dependencies] -entrypoints = "*" -jinja2 = "*" -jsonschema = ">=3.0" -numpy = "*" -pandas = ">=0.18" -toolz = "*" - -[package.extras] -dev = ["black", "docutils", "flake8", "ipython", "m2r", "mistune (<2.0.0)", "pytest", "recommonmark", "sphinx", "vega-datasets"] - -[[package]] -name = "anyio" -version = "4.4.0" -description = "High level compatibility layer for multiple asynchronous event loop implementations" -optional = false -python-versions = ">=3.8" -files = [ - {file = "anyio-4.4.0-py3-none-any.whl", hash = "sha256:c1b2d8f46a8a812513012e1107cb0e68c17159a7a594208005a57dc776e1bdc7"}, - {file = "anyio-4.4.0.tar.gz", hash = "sha256:5aadc6a1bbb7cdb0bede386cac5e2940f5e2ff3aa20277e991cf028e0585ce94"}, -] - -[package.dependencies] -exceptiongroup = {version = ">=1.0.2", markers = "python_version < \"3.11\""} -idna = ">=2.8" -sniffio = ">=1.1" -typing-extensions = {version = ">=4.1", markers = "python_version < \"3.11\""} - -[package.extras] -doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] -test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"] -trio = ["trio (>=0.23)"] - -[[package]] -name = "appnope" -version = "0.1.4" -description = "Disable App Nap on macOS >= 10.9" -optional = false -python-versions = ">=3.6" -files = [ - {file = "appnope-0.1.4-py2.py3-none-any.whl", hash = "sha256:502575ee11cd7a28c0205f379b525beefebab9d161b7c964670864014ed7213c"}, - {file = "appnope-0.1.4.tar.gz", hash = "sha256:1de3860566df9caf38f01f86f65e0e13e379af54f9e4bee1e66b48f2efffd1ee"}, -] - -[[package]] -name = "argon2-cffi" -version = "23.1.0" -description = "Argon2 for Python" -optional = false -python-versions = ">=3.7" -files = [ - {file = "argon2_cffi-23.1.0-py3-none-any.whl", hash = "sha256:c670642b78ba29641818ab2e68bd4e6a78ba53b7eff7b4c3815ae16abf91c7ea"}, - {file = "argon2_cffi-23.1.0.tar.gz", hash = "sha256:879c3e79a2729ce768ebb7d36d4609e3a78a4ca2ec3a9f12286ca057e3d0db08"}, -] - -[package.dependencies] -argon2-cffi-bindings = "*" - -[package.extras] -dev = ["argon2-cffi[tests,typing]", "tox (>4)"] -docs = ["furo", "myst-parser", "sphinx", "sphinx-copybutton", "sphinx-notfound-page"] -tests = ["hypothesis", "pytest"] -typing = ["mypy"] - -[[package]] -name = "argon2-cffi-bindings" -version = "21.2.0" -description = "Low-level CFFI bindings for Argon2" -optional = false -python-versions = ">=3.6" -files = [ - {file = "argon2-cffi-bindings-21.2.0.tar.gz", hash = "sha256:bb89ceffa6c791807d1305ceb77dbfacc5aa499891d2c55661c6459651fc39e3"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:ccb949252cb2ab3a08c02024acb77cfb179492d5701c7cbdbfd776124d4d2367"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9524464572e12979364b7d600abf96181d3541da11e23ddf565a32e70bd4dc0d"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b746dba803a79238e925d9046a63aa26bf86ab2a2fe74ce6b009a1c3f5c8f2ae"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:58ed19212051f49a523abb1dbe954337dc82d947fb6e5a0da60f7c8471a8476c"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:bd46088725ef7f58b5a1ef7ca06647ebaf0eb4baff7d1d0d177c6cc8744abd86"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_i686.whl", hash = "sha256:8cd69c07dd875537a824deec19f978e0f2078fdda07fd5c42ac29668dda5f40f"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:f1152ac548bd5b8bcecfb0b0371f082037e47128653df2e8ba6e914d384f3c3e"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-win32.whl", hash = "sha256:603ca0aba86b1349b147cab91ae970c63118a0f30444d4bc80355937c950c082"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-win_amd64.whl", hash = "sha256:b2ef1c30440dbbcba7a5dc3e319408b59676e2e039e2ae11a8775ecf482b192f"}, - {file = "argon2_cffi_bindings-21.2.0-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:e415e3f62c8d124ee16018e491a009937f8cf7ebf5eb430ffc5de21b900dad93"}, - {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3e385d1c39c520c08b53d63300c3ecc28622f076f4c2b0e6d7e796e9f6502194"}, - {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c3e3cc67fdb7d82c4718f19b4e7a87123caf8a93fde7e23cf66ac0337d3cb3f"}, - {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6a22ad9800121b71099d0fb0a65323810a15f2e292f2ba450810a7316e128ee5"}, - {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f9f8b450ed0547e3d473fdc8612083fd08dd2120d6ac8f73828df9b7d45bb351"}, - {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:93f9bf70084f97245ba10ee36575f0c3f1e7d7724d67d8e5b08e61787c320ed7"}, - {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3b9ef65804859d335dc6b31582cad2c5166f0c3e7975f324d9ffaa34ee7e6583"}, - {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4966ef5848d820776f5f562a7d45fdd70c2f330c961d0d745b784034bd9f48d"}, - {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20ef543a89dee4db46a1a6e206cd015360e5a75822f76df533845c3cbaf72670"}, - {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ed2937d286e2ad0cc79a7087d3c272832865f779430e0cc2b4f3718d3159b0cb"}, - {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:5e00316dabdaea0b2dd82d141cc66889ced0cdcbfa599e8b471cf22c620c329a"}, -] - -[package.dependencies] -cffi = ">=1.0.1" - -[package.extras] -dev = ["cogapp", "pre-commit", "pytest", "wheel"] -tests = ["pytest"] - -[[package]] -name = "arrow" -version = "1.3.0" -description = "Better dates & times for Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "arrow-1.3.0-py3-none-any.whl", hash = "sha256:c728b120ebc00eb84e01882a6f5e7927a53960aa990ce7dd2b10f39005a67f80"}, - {file = "arrow-1.3.0.tar.gz", hash = "sha256:d4540617648cb5f895730f1ad8c82a65f2dad0166f57b75f3ca54759c4d67a85"}, -] - -[package.dependencies] -python-dateutil = ">=2.7.0" -types-python-dateutil = ">=2.8.10" - -[package.extras] -doc = ["doc8", "sphinx (>=7.0.0)", "sphinx-autobuild", "sphinx-autodoc-typehints", "sphinx_rtd_theme (>=1.3.0)"] -test = ["dateparser (==1.*)", "pre-commit", "pytest", "pytest-cov", "pytest-mock", "pytz (==2021.1)", "simplejson (==3.*)"] - -[[package]] -name = "astroid" -version = "3.2.4" -description = "An abstract syntax tree for Python with inference support." -optional = false -python-versions = ">=3.8.0" -files = [ - {file = "astroid-3.2.4-py3-none-any.whl", hash = "sha256:413658a61eeca6202a59231abb473f932038fbcbf1666587f66d482083413a25"}, - {file = "astroid-3.2.4.tar.gz", hash = "sha256:0e14202810b30da1b735827f78f5157be2bbd4a7a59b7707ca0bfc2fb4c0063a"}, -] - -[package.dependencies] -typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.11\""} - -[[package]] -name = "asttokens" -version = "2.4.1" -description = "Annotate AST trees with source code positions" -optional = false -python-versions = "*" -files = [ - {file = "asttokens-2.4.1-py2.py3-none-any.whl", hash = "sha256:051ed49c3dcae8913ea7cd08e46a606dba30b79993209636c4875bc1d637bc24"}, - {file = "asttokens-2.4.1.tar.gz", hash = "sha256:b03869718ba9a6eb027e134bfdf69f38a236d681c83c160d510768af11254ba0"}, -] - -[package.dependencies] -six = ">=1.12.0" - -[package.extras] -astroid = ["astroid (>=1,<2)", "astroid (>=2,<4)"] -test = ["astroid (>=1,<2)", "astroid (>=2,<4)", "pytest"] - -[[package]] -name = "async-lru" -version = "2.0.4" -description = "Simple LRU cache for asyncio" -optional = false -python-versions = ">=3.8" -files = [ - {file = "async-lru-2.0.4.tar.gz", hash = "sha256:b8a59a5df60805ff63220b2a0c5b5393da5521b113cd5465a44eb037d81a5627"}, - {file = "async_lru-2.0.4-py3-none-any.whl", hash = "sha256:ff02944ce3c288c5be660c42dbcca0742b32c3b279d6dceda655190240b99224"}, -] - -[package.dependencies] -typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.11\""} - -[[package]] -name = "asyncio-atexit" -version = "1.0.1" -description = "Like atexit, but for asyncio" -optional = false -python-versions = ">=3.6" -files = [ - {file = "asyncio-atexit-1.0.1.tar.gz", hash = "sha256:1d0c71544b8ee2c484d322844ee72c0875dde6f250c0ed5b6993592ab9f7d436"}, - {file = "asyncio_atexit-1.0.1-py3-none-any.whl", hash = "sha256:d93d5f7d5633a534abd521ce2896ed0fbe8de170bb1e65ec871d1c20eac9d376"}, -] - -[package.extras] -test = ["pytest", "uvloop"] - -[[package]] -name = "attrs" -version = "24.2.0" -description = "Classes Without Boilerplate" -optional = false -python-versions = ">=3.7" -files = [ - {file = "attrs-24.2.0-py3-none-any.whl", hash = "sha256:81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2"}, - {file = "attrs-24.2.0.tar.gz", hash = "sha256:5cfb1b9148b5b086569baec03f20d7b6bf3bcacc9a42bebf87ffaaca362f6346"}, -] - -[package.extras] -benchmark = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins", "pytest-xdist[psutil]"] -cov = ["cloudpickle", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] -dev = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] -docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier (<24.7)"] -tests = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] -tests-mypy = ["mypy (>=1.11.1)", "pytest-mypy-plugins"] - -[[package]] -name = "babel" -version = "2.16.0" -description = "Internationalization utilities" -optional = false -python-versions = ">=3.8" -files = [ - {file = "babel-2.16.0-py3-none-any.whl", hash = "sha256:368b5b98b37c06b7daf6696391c3240c938b37767d4584413e8438c5c435fa8b"}, - {file = "babel-2.16.0.tar.gz", hash = "sha256:d1f3554ca26605fe173f3de0c65f750f5a42f924499bf134de6423582298e316"}, -] - -[package.extras] -dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"] - -[[package]] -name = "backoff" -version = "2.2.1" -description = "Function decoration for backoff and retry" -optional = false -python-versions = ">=3.7,<4.0" -files = [ - {file = "backoff-2.2.1-py3-none-any.whl", hash = "sha256:63579f9a0628e06278f7e47b7d7d5b6ce20dc65c5e96a6f3ca99a6adca0396e8"}, - {file = "backoff-2.2.1.tar.gz", hash = "sha256:03f829f5bb1923180821643f8753b0502c3b682293992485b0eef2807afa5cba"}, -] - -[[package]] -name = "beautifulsoup4" -version = "4.12.3" -description = "Screen-scraping library" -optional = false -python-versions = ">=3.6.0" -files = [ - {file = "beautifulsoup4-4.12.3-py3-none-any.whl", hash = "sha256:b80878c9f40111313e55da8ba20bdba06d8fa3969fc68304167741bbf9e082ed"}, - {file = "beautifulsoup4-4.12.3.tar.gz", hash = "sha256:74e3d1928edc070d21748185c46e3fb33490f22f52a3addee9aee0f4f7781051"}, -] - -[package.dependencies] -soupsieve = ">1.2" - -[package.extras] -cchardet = ["cchardet"] -chardet = ["chardet"] -charset-normalizer = ["charset-normalizer"] -html5lib = ["html5lib"] -lxml = ["lxml"] - -[[package]] -name = "bleach" -version = "6.1.0" -description = "An easy safelist-based HTML-sanitizing tool." -optional = false -python-versions = ">=3.8" -files = [ - {file = "bleach-6.1.0-py3-none-any.whl", hash = "sha256:3225f354cfc436b9789c66c4ee030194bee0568fbf9cbdad3bc8b5c26c5f12b6"}, - {file = "bleach-6.1.0.tar.gz", hash = "sha256:0a31f1837963c41d46bbf1331b8778e1308ea0791db03cc4e7357b97cf42a8fe"}, -] - -[package.dependencies] -six = ">=1.9.0" -webencodings = "*" - -[package.extras] -css = ["tinycss2 (>=1.1.0,<1.3)"] - -[[package]] -name = "cachetools" -version = "5.5.0" -description = "Extensible memoizing collections and decorators" -optional = false -python-versions = ">=3.7" -files = [ - {file = "cachetools-5.5.0-py3-none-any.whl", hash = "sha256:02134e8439cdc2ffb62023ce1debca2944c3f289d66bb17ead3ab3dede74b292"}, - {file = "cachetools-5.5.0.tar.gz", hash = "sha256:2cc24fb4cbe39633fb7badd9db9ca6295d766d9c2995f245725a46715d050f2a"}, -] - -[[package]] -name = "certifi" -version = "2024.8.30" -description = "Python package for providing Mozilla's CA Bundle." -optional = false -python-versions = ">=3.6" -files = [ - {file = "certifi-2024.8.30-py3-none-any.whl", hash = "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8"}, - {file = "certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9"}, -] - -[[package]] -name = "cffi" -version = "1.17.1" -description = "Foreign Function Interface for Python calling C code." -optional = false -python-versions = ">=3.8" -files = [ - {file = "cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14"}, - {file = "cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67"}, - {file = "cffi-1.17.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edae79245293e15384b51f88b00613ba9f7198016a5948b5dddf4917d4d26382"}, - {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45398b671ac6d70e67da8e4224a065cec6a93541bb7aebe1b198a61b58c7b702"}, - {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad9413ccdeda48c5afdae7e4fa2192157e991ff761e7ab8fdd8926f40b160cc3"}, - {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5da5719280082ac6bd9aa7becb3938dc9f9cbd57fac7d2871717b1feb0902ab6"}, - {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bb1a08b8008b281856e5971307cc386a8e9c5b625ac297e853d36da6efe9c17"}, - {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:045d61c734659cc045141be4bae381a41d89b741f795af1dd018bfb532fd0df8"}, - {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6883e737d7d9e4899a8a695e00ec36bd4e5e4f18fabe0aca0efe0a4b44cdb13e"}, - {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6b8b4a92e1c65048ff98cfe1f735ef8f1ceb72e3d5f0c25fdb12087a23da22be"}, - {file = "cffi-1.17.1-cp310-cp310-win32.whl", hash = "sha256:c9c3d058ebabb74db66e431095118094d06abf53284d9c81f27300d0e0d8bc7c"}, - {file = "cffi-1.17.1-cp310-cp310-win_amd64.whl", hash = "sha256:0f048dcf80db46f0098ccac01132761580d28e28bc0f78ae0d58048063317e15"}, - {file = "cffi-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401"}, - {file = "cffi-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf"}, - {file = "cffi-1.17.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4"}, - {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41"}, - {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1"}, - {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6"}, - {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d"}, - {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6"}, - {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f"}, - {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b"}, - {file = "cffi-1.17.1-cp311-cp311-win32.whl", hash = "sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655"}, - {file = "cffi-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0"}, - {file = "cffi-1.17.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4"}, - {file = "cffi-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c"}, - {file = "cffi-1.17.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36"}, - {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5"}, - {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff"}, - {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99"}, - {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93"}, - {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3"}, - {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8"}, - {file = "cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65"}, - {file = "cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903"}, - {file = "cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e"}, - {file = "cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2"}, - {file = "cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3"}, - {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683"}, - {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5"}, - {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4"}, - {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd"}, - {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed"}, - {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9"}, - {file = "cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d"}, - {file = "cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a"}, - {file = "cffi-1.17.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:636062ea65bd0195bc012fea9321aca499c0504409f413dc88af450b57ffd03b"}, - {file = "cffi-1.17.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7eac2ef9b63c79431bc4b25f1cd649d7f061a28808cbc6c47b534bd789ef964"}, - {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e221cf152cff04059d011ee126477f0d9588303eb57e88923578ace7baad17f9"}, - {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:31000ec67d4221a71bd3f67df918b1f88f676f1c3b535a7eb473255fdc0b83fc"}, - {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6f17be4345073b0a7b8ea599688f692ac3ef23ce28e5df79c04de519dbc4912c"}, - {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2b1fac190ae3ebfe37b979cc1ce69c81f4e4fe5746bb401dca63a9062cdaf1"}, - {file = "cffi-1.17.1-cp38-cp38-win32.whl", hash = "sha256:7596d6620d3fa590f677e9ee430df2958d2d6d6de2feeae5b20e82c00b76fbf8"}, - {file = "cffi-1.17.1-cp38-cp38-win_amd64.whl", hash = "sha256:78122be759c3f8a014ce010908ae03364d00a1f81ab5c7f4a7a5120607ea56e1"}, - {file = "cffi-1.17.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b2ab587605f4ba0bf81dc0cb08a41bd1c0a5906bd59243d56bad7668a6fc6c16"}, - {file = "cffi-1.17.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:28b16024becceed8c6dfbc75629e27788d8a3f9030691a1dbf9821a128b22c36"}, - {file = "cffi-1.17.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d599671f396c4723d016dbddb72fe8e0397082b0a77a4fab8028923bec050e8"}, - {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca74b8dbe6e8e8263c0ffd60277de77dcee6c837a3d0881d8c1ead7268c9e576"}, - {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7f5baafcc48261359e14bcd6d9bff6d4b28d9103847c9e136694cb0501aef87"}, - {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98e3969bcff97cae1b2def8ba499ea3d6f31ddfdb7635374834cf89a1a08ecf0"}, - {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdf5ce3acdfd1661132f2a9c19cac174758dc2352bfe37d98aa7512c6b7178b3"}, - {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9755e4345d1ec879e3849e62222a18c7174d65a6a92d5b346b1863912168b595"}, - {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f1e22e8c4419538cb197e4dd60acc919d7696e5ef98ee4da4e01d3f8cfa4cc5a"}, - {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c03e868a0b3bc35839ba98e74211ed2b05d2119be4e8a0f224fba9384f1fe02e"}, - {file = "cffi-1.17.1-cp39-cp39-win32.whl", hash = "sha256:e31ae45bc2e29f6b2abd0de1cc3b9d5205aa847cafaecb8af1476a609a2f6eb7"}, - {file = "cffi-1.17.1-cp39-cp39-win_amd64.whl", hash = "sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662"}, - {file = "cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824"}, -] - -[package.dependencies] -pycparser = "*" - -[[package]] -name = "chardet" -version = "5.2.0" -description = "Universal encoding detector for Python 3" -optional = false -python-versions = ">=3.7" -files = [ - {file = "chardet-5.2.0-py3-none-any.whl", hash = "sha256:e1cf59446890a00105fe7b7912492ea04b6e6f06d4b742b2c788469e34c82970"}, - {file = "chardet-5.2.0.tar.gz", hash = "sha256:1b3b6ff479a8c414bc3fa2c0852995695c4a026dcd6d0633b2dd092ca39c1cf7"}, -] - -[[package]] -name = "charset-normalizer" -version = "3.3.2" -description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." -optional = false -python-versions = ">=3.7.0" -files = [ - {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, - {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, -] - -[[package]] -name = "click" -version = "8.1.7" -description = "Composable command line interface toolkit" -optional = false -python-versions = ">=3.7" -files = [ - {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, - {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, -] - -[package.dependencies] -colorama = {version = "*", markers = "platform_system == \"Windows\""} - -[[package]] -name = "click-log" -version = "0.4.0" -description = "Logging integration for Click" -optional = false -python-versions = "*" -files = [ - {file = "click-log-0.4.0.tar.gz", hash = "sha256:3970f8570ac54491237bcdb3d8ab5e3eef6c057df29f8c3d1151a51a9c23b975"}, - {file = "click_log-0.4.0-py2.py3-none-any.whl", hash = "sha256:a43e394b528d52112af599f2fc9e4b7cf3c15f94e53581f74fa6867e68c91756"}, -] - -[package.dependencies] -click = "*" - -[[package]] -name = "clickclick" -version = "20.10.2" -description = "Click utility functions" -optional = false -python-versions = "*" -files = [ - {file = "clickclick-20.10.2-py2.py3-none-any.whl", hash = "sha256:c8f33e6d9ec83f68416dd2136a7950125bd256ec39ccc9a85c6e280a16be2bb5"}, - {file = "clickclick-20.10.2.tar.gz", hash = "sha256:4efb13e62353e34c5eef7ed6582c4920b418d7dedc86d819e22ee089ba01802c"}, -] - -[package.dependencies] -click = ">=4.0" -PyYAML = ">=3.11" - -[[package]] -name = "colorama" -version = "0.4.6" -description = "Cross-platform colored terminal text." -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" -files = [ - {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, - {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, -] - -[[package]] -name = "comm" -version = "0.2.2" -description = "Jupyter Python Comm implementation, for usage in ipykernel, xeus-python etc." -optional = false -python-versions = ">=3.8" -files = [ - {file = "comm-0.2.2-py3-none-any.whl", hash = "sha256:e6fb86cb70ff661ee8c9c14e7d36d6de3b4066f1441be4063df9c5009f0a64d3"}, - {file = "comm-0.2.2.tar.gz", hash = "sha256:3fd7a84065306e07bea1773df6eb8282de51ba82f77c72f9c85716ab11fe980e"}, -] - -[package.dependencies] -traitlets = ">=4" - -[package.extras] -test = ["pytest"] - -[[package]] -name = "connexion" -version = "2.14.1" -description = "Connexion - API first applications with OpenAPI/Swagger and Flask" -optional = false -python-versions = ">=3.6" -files = [ - {file = "connexion-2.14.1-py2.py3-none-any.whl", hash = "sha256:f343717241b4c4802a694c38fee66fb1693c897fe4ea5a957fa9b3b07caf6394"}, - {file = "connexion-2.14.1.tar.gz", hash = "sha256:99aa5781e70a7b94f8ffae8cf89f309d49cdb811bbd65a8e2f2546f3b19a01e6"}, -] - -[package.dependencies] -clickclick = ">=1.2,<21" -flask = ">=1.0.4,<3" -inflection = ">=0.3.1,<0.6" -itsdangerous = ">=0.24" -jsonschema = ">=2.5.1,<5" -packaging = ">=20" -PyYAML = ">=5.1,<7" -requests = ">=2.9.1,<3" -swagger-ui-bundle = {version = ">=0.0.2,<0.1", optional = true, markers = "extra == \"swagger-ui\""} -werkzeug = ">=1.0,<3" - -[package.extras] -aiohttp = ["MarkupSafe (>=0.23)", "aiohttp (>=2.3.10,<4)", "aiohttp-jinja2 (>=0.14.0,<2)"] -docs = ["sphinx-autoapi (==1.8.1)"] -flask = ["flask (>=1.0.4,<3)", "itsdangerous (>=0.24)"] -swagger-ui = ["swagger-ui-bundle (>=0.0.2,<0.1)"] -tests = ["MarkupSafe (>=0.23)", "aiohttp (>=2.3.10,<4)", "aiohttp-jinja2 (>=0.14.0,<2)", "aiohttp-remotes", "decorator (>=5,<6)", "flask (>=1.0.4,<3)", "itsdangerous (>=0.24)", "pytest (>=6,<7)", "pytest-aiohttp", "pytest-cov (>=2,<3)", "swagger-ui-bundle (>=0.0.2,<0.1)", "testfixtures (>=6,<7)"] - -[[package]] -name = "coverage" -version = "7.6.1" -description = "Code coverage measurement for Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "coverage-7.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b06079abebbc0e89e6163b8e8f0e16270124c154dc6e4a47b413dd538859af16"}, - {file = "coverage-7.6.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cf4b19715bccd7ee27b6b120e7e9dd56037b9c0681dcc1adc9ba9db3d417fa36"}, - {file = "coverage-7.6.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61c0abb4c85b095a784ef23fdd4aede7a2628478e7baba7c5e3deba61070a02"}, - {file = "coverage-7.6.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fd21f6ae3f08b41004dfb433fa895d858f3f5979e7762d052b12aef444e29afc"}, - {file = "coverage-7.6.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f59d57baca39b32db42b83b2a7ba6f47ad9c394ec2076b084c3f029b7afca23"}, - {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a1ac0ae2b8bd743b88ed0502544847c3053d7171a3cff9228af618a068ed9c34"}, - {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e6a08c0be454c3b3beb105c0596ebdc2371fab6bb90c0c0297f4e58fd7e1012c"}, - {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f5796e664fe802da4f57a168c85359a8fbf3eab5e55cd4e4569fbacecc903959"}, - {file = "coverage-7.6.1-cp310-cp310-win32.whl", hash = "sha256:7bb65125fcbef8d989fa1dd0e8a060999497629ca5b0efbca209588a73356232"}, - {file = "coverage-7.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:3115a95daa9bdba70aea750db7b96b37259a81a709223c8448fa97727d546fe0"}, - {file = "coverage-7.6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7dea0889685db8550f839fa202744652e87c60015029ce3f60e006f8c4462c93"}, - {file = "coverage-7.6.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ed37bd3c3b063412f7620464a9ac1314d33100329f39799255fb8d3027da50d3"}, - {file = "coverage-7.6.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d85f5e9a5f8b73e2350097c3756ef7e785f55bd71205defa0bfdaf96c31616ff"}, - {file = "coverage-7.6.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bc572be474cafb617672c43fe989d6e48d3c83af02ce8de73fff1c6bb3c198d"}, - {file = "coverage-7.6.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c0420b573964c760df9e9e86d1a9a622d0d27f417e1a949a8a66dd7bcee7bc6"}, - {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1f4aa8219db826ce6be7099d559f8ec311549bfc4046f7f9fe9b5cea5c581c56"}, - {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:fc5a77d0c516700ebad189b587de289a20a78324bc54baee03dd486f0855d234"}, - {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b48f312cca9621272ae49008c7f613337c53fadca647d6384cc129d2996d1133"}, - {file = "coverage-7.6.1-cp311-cp311-win32.whl", hash = "sha256:1125ca0e5fd475cbbba3bb67ae20bd2c23a98fac4e32412883f9bcbaa81c314c"}, - {file = "coverage-7.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:8ae539519c4c040c5ffd0632784e21b2f03fc1340752af711f33e5be83a9d6c6"}, - {file = "coverage-7.6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:95cae0efeb032af8458fc27d191f85d1717b1d4e49f7cb226cf526ff28179778"}, - {file = "coverage-7.6.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5621a9175cf9d0b0c84c2ef2b12e9f5f5071357c4d2ea6ca1cf01814f45d2391"}, - {file = "coverage-7.6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:260933720fdcd75340e7dbe9060655aff3af1f0c5d20f46b57f262ab6c86a5e8"}, - {file = "coverage-7.6.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07e2ca0ad381b91350c0ed49d52699b625aab2b44b65e1b4e02fa9df0e92ad2d"}, - {file = "coverage-7.6.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c44fee9975f04b33331cb8eb272827111efc8930cfd582e0320613263ca849ca"}, - {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:877abb17e6339d96bf08e7a622d05095e72b71f8afd8a9fefc82cf30ed944163"}, - {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3e0cadcf6733c09154b461f1ca72d5416635e5e4ec4e536192180d34ec160f8a"}, - {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c3c02d12f837d9683e5ab2f3d9844dc57655b92c74e286c262e0fc54213c216d"}, - {file = "coverage-7.6.1-cp312-cp312-win32.whl", hash = "sha256:e05882b70b87a18d937ca6768ff33cc3f72847cbc4de4491c8e73880766718e5"}, - {file = "coverage-7.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:b5d7b556859dd85f3a541db6a4e0167b86e7273e1cdc973e5b175166bb634fdb"}, - {file = "coverage-7.6.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a4acd025ecc06185ba2b801f2de85546e0b8ac787cf9d3b06e7e2a69f925b106"}, - {file = "coverage-7.6.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a6d3adcf24b624a7b778533480e32434a39ad8fa30c315208f6d3e5542aeb6e9"}, - {file = "coverage-7.6.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0c212c49b6c10e6951362f7c6df3329f04c2b1c28499563d4035d964ab8e08c"}, - {file = "coverage-7.6.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e81d7a3e58882450ec4186ca59a3f20a5d4440f25b1cff6f0902ad890e6748a"}, - {file = "coverage-7.6.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78b260de9790fd81e69401c2dc8b17da47c8038176a79092a89cb2b7d945d060"}, - {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a78d169acd38300060b28d600344a803628c3fd585c912cacc9ea8790fe96862"}, - {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2c09f4ce52cb99dd7505cd0fc8e0e37c77b87f46bc9c1eb03fe3bc9991085388"}, - {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6878ef48d4227aace338d88c48738a4258213cd7b74fd9a3d4d7582bb1d8a155"}, - {file = "coverage-7.6.1-cp313-cp313-win32.whl", hash = "sha256:44df346d5215a8c0e360307d46ffaabe0f5d3502c8a1cefd700b34baf31d411a"}, - {file = "coverage-7.6.1-cp313-cp313-win_amd64.whl", hash = "sha256:8284cf8c0dd272a247bc154eb6c95548722dce90d098c17a883ed36e67cdb129"}, - {file = "coverage-7.6.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:d3296782ca4eab572a1a4eca686d8bfb00226300dcefdf43faa25b5242ab8a3e"}, - {file = "coverage-7.6.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:502753043567491d3ff6d08629270127e0c31d4184c4c8d98f92c26f65019962"}, - {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a89ecca80709d4076b95f89f308544ec8f7b4727e8a547913a35f16717856cb"}, - {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a318d68e92e80af8b00fa99609796fdbcdfef3629c77c6283566c6f02c6d6704"}, - {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13b0a73a0896988f053e4fbb7de6d93388e6dd292b0d87ee51d106f2c11b465b"}, - {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:4421712dbfc5562150f7554f13dde997a2e932a6b5f352edcce948a815efee6f"}, - {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:166811d20dfea725e2e4baa71fffd6c968a958577848d2131f39b60043400223"}, - {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:225667980479a17db1048cb2bf8bfb39b8e5be8f164b8f6628b64f78a72cf9d3"}, - {file = "coverage-7.6.1-cp313-cp313t-win32.whl", hash = "sha256:170d444ab405852903b7d04ea9ae9b98f98ab6d7e63e1115e82620807519797f"}, - {file = "coverage-7.6.1-cp313-cp313t-win_amd64.whl", hash = "sha256:b9f222de8cded79c49bf184bdbc06630d4c58eec9459b939b4a690c82ed05657"}, - {file = "coverage-7.6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6db04803b6c7291985a761004e9060b2bca08da6d04f26a7f2294b8623a0c1a0"}, - {file = "coverage-7.6.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f1adfc8ac319e1a348af294106bc6a8458a0f1633cc62a1446aebc30c5fa186a"}, - {file = "coverage-7.6.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a95324a9de9650a729239daea117df21f4b9868ce32e63f8b650ebe6cef5595b"}, - {file = "coverage-7.6.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b43c03669dc4618ec25270b06ecd3ee4fa94c7f9b3c14bae6571ca00ef98b0d3"}, - {file = "coverage-7.6.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8929543a7192c13d177b770008bc4e8119f2e1f881d563fc6b6305d2d0ebe9de"}, - {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:a09ece4a69cf399510c8ab25e0950d9cf2b42f7b3cb0374f95d2e2ff594478a6"}, - {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:9054a0754de38d9dbd01a46621636689124d666bad1936d76c0341f7d71bf569"}, - {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0dbde0f4aa9a16fa4d754356a8f2e36296ff4d83994b2c9d8398aa32f222f989"}, - {file = "coverage-7.6.1-cp38-cp38-win32.whl", hash = "sha256:da511e6ad4f7323ee5702e6633085fb76c2f893aaf8ce4c51a0ba4fc07580ea7"}, - {file = "coverage-7.6.1-cp38-cp38-win_amd64.whl", hash = "sha256:3f1156e3e8f2872197af3840d8ad307a9dd18e615dc64d9ee41696f287c57ad8"}, - {file = "coverage-7.6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:abd5fd0db5f4dc9289408aaf34908072f805ff7792632250dcb36dc591d24255"}, - {file = "coverage-7.6.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:547f45fa1a93154bd82050a7f3cddbc1a7a4dd2a9bf5cb7d06f4ae29fe94eaf8"}, - {file = "coverage-7.6.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:645786266c8f18a931b65bfcefdbf6952dd0dea98feee39bd188607a9d307ed2"}, - {file = "coverage-7.6.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9e0b2df163b8ed01d515807af24f63de04bebcecbd6c3bfeff88385789fdf75a"}, - {file = "coverage-7.6.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:609b06f178fe8e9f89ef676532760ec0b4deea15e9969bf754b37f7c40326dbc"}, - {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:702855feff378050ae4f741045e19a32d57d19f3e0676d589df0575008ea5004"}, - {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:2bdb062ea438f22d99cba0d7829c2ef0af1d768d1e4a4f528087224c90b132cb"}, - {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:9c56863d44bd1c4fe2abb8a4d6f5371d197f1ac0ebdee542f07f35895fc07f36"}, - {file = "coverage-7.6.1-cp39-cp39-win32.whl", hash = "sha256:6e2cd258d7d927d09493c8df1ce9174ad01b381d4729a9d8d4e38670ca24774c"}, - {file = "coverage-7.6.1-cp39-cp39-win_amd64.whl", hash = "sha256:06a737c882bd26d0d6ee7269b20b12f14a8704807a01056c80bb881a4b2ce6ca"}, - {file = "coverage-7.6.1-pp38.pp39.pp310-none-any.whl", hash = "sha256:e9a6e0eb86070e8ccaedfbd9d38fec54864f3125ab95419970575b42af7541df"}, - {file = "coverage-7.6.1.tar.gz", hash = "sha256:953510dfb7b12ab69d20135a0662397f077c59b1e6379a768e97c59d852ee51d"}, -] - -[package.dependencies] -tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""} - -[package.extras] -toml = ["tomli"] - -[[package]] -name = "cryptography" -version = "43.0.1" -description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." -optional = false -python-versions = ">=3.7" -files = [ - {file = "cryptography-43.0.1-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:8385d98f6a3bf8bb2d65a73e17ed87a3ba84f6991c155691c51112075f9ffc5d"}, - {file = "cryptography-43.0.1-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:27e613d7077ac613e399270253259d9d53872aaf657471473ebfc9a52935c062"}, - {file = "cryptography-43.0.1-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:68aaecc4178e90719e95298515979814bda0cbada1256a4485414860bd7ab962"}, - {file = "cryptography-43.0.1-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:de41fd81a41e53267cb020bb3a7212861da53a7d39f863585d13ea11049cf277"}, - {file = "cryptography-43.0.1-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f98bf604c82c416bc829e490c700ca1553eafdf2912a91e23a79d97d9801372a"}, - {file = "cryptography-43.0.1-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:61ec41068b7b74268fa86e3e9e12b9f0c21fcf65434571dbb13d954bceb08042"}, - {file = "cryptography-43.0.1-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:014f58110f53237ace6a408b5beb6c427b64e084eb451ef25a28308270086494"}, - {file = "cryptography-43.0.1-cp37-abi3-win32.whl", hash = "sha256:2bd51274dcd59f09dd952afb696bf9c61a7a49dfc764c04dd33ef7a6b502a1e2"}, - {file = "cryptography-43.0.1-cp37-abi3-win_amd64.whl", hash = "sha256:666ae11966643886c2987b3b721899d250855718d6d9ce41b521252a17985f4d"}, - {file = "cryptography-43.0.1-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:ac119bb76b9faa00f48128b7f5679e1d8d437365c5d26f1c2c3f0da4ce1b553d"}, - {file = "cryptography-43.0.1-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1bbcce1a551e262dfbafb6e6252f1ae36a248e615ca44ba302df077a846a8806"}, - {file = "cryptography-43.0.1-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58d4e9129985185a06d849aa6df265bdd5a74ca6e1b736a77959b498e0505b85"}, - {file = "cryptography-43.0.1-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:d03a475165f3134f773d1388aeb19c2d25ba88b6a9733c5c590b9ff7bbfa2e0c"}, - {file = "cryptography-43.0.1-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:511f4273808ab590912a93ddb4e3914dfd8a388fed883361b02dea3791f292e1"}, - {file = "cryptography-43.0.1-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:80eda8b3e173f0f247f711eef62be51b599b5d425c429b5d4ca6a05e9e856baa"}, - {file = "cryptography-43.0.1-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:38926c50cff6f533f8a2dae3d7f19541432610d114a70808f0926d5aaa7121e4"}, - {file = "cryptography-43.0.1-cp39-abi3-win32.whl", hash = "sha256:a575913fb06e05e6b4b814d7f7468c2c660e8bb16d8d5a1faf9b33ccc569dd47"}, - {file = "cryptography-43.0.1-cp39-abi3-win_amd64.whl", hash = "sha256:d75601ad10b059ec832e78823b348bfa1a59f6b8d545db3a24fd44362a1564cb"}, - {file = "cryptography-43.0.1-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:ea25acb556320250756e53f9e20a4177515f012c9eaea17eb7587a8c4d8ae034"}, - {file = "cryptography-43.0.1-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c1332724be35d23a854994ff0b66530119500b6053d0bd3363265f7e5e77288d"}, - {file = "cryptography-43.0.1-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:fba1007b3ef89946dbbb515aeeb41e30203b004f0b4b00e5e16078b518563289"}, - {file = "cryptography-43.0.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:5b43d1ea6b378b54a1dc99dd8a2b5be47658fe9a7ce0a58ff0b55f4b43ef2b84"}, - {file = "cryptography-43.0.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:88cce104c36870d70c49c7c8fd22885875d950d9ee6ab54df2745f83ba0dc365"}, - {file = "cryptography-43.0.1-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:9d3cdb25fa98afdd3d0892d132b8d7139e2c087da1712041f6b762e4f807cc96"}, - {file = "cryptography-43.0.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e710bf40870f4db63c3d7d929aa9e09e4e7ee219e703f949ec4073b4294f6172"}, - {file = "cryptography-43.0.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7c05650fe8023c5ed0d46793d4b7d7e6cd9c04e68eabe5b0aeea836e37bdcec2"}, - {file = "cryptography-43.0.1.tar.gz", hash = "sha256:203e92a75716d8cfb491dc47c79e17d0d9207ccffcbcb35f598fbe463ae3444d"}, -] - -[package.dependencies] -cffi = {version = ">=1.12", markers = "platform_python_implementation != \"PyPy\""} - -[package.extras] -docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"] -docstest = ["pyenchant (>=1.6.11)", "readme-renderer", "sphinxcontrib-spelling (>=4.0.1)"] -nox = ["nox"] -pep8test = ["check-sdist", "click", "mypy", "ruff"] -sdist = ["build"] -ssh = ["bcrypt (>=3.1.5)"] -test = ["certifi", "cryptography-vectors (==43.0.1)", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] -test-randomorder = ["pytest-randomly"] - -[[package]] -name = "dataclasses-json" -version = "0.6.7" -description = "Easily serialize dataclasses to and from JSON." -optional = false -python-versions = "<4.0,>=3.7" -files = [ - {file = "dataclasses_json-0.6.7-py3-none-any.whl", hash = "sha256:0dbf33f26c8d5305befd61b39d2b3414e8a407bedc2834dea9b8d642666fb40a"}, - {file = "dataclasses_json-0.6.7.tar.gz", hash = "sha256:b6b3e528266ea45b9535223bc53ca645f5208833c29229e847b3f26a1cc55fc0"}, -] - -[package.dependencies] -marshmallow = ">=3.18.0,<4.0.0" -typing-inspect = ">=0.4.0,<1" - -[[package]] -name = "dateparser" -version = "1.2.0" -description = "Date parsing library designed to parse dates from HTML pages" -optional = false -python-versions = ">=3.7" -files = [ - {file = "dateparser-1.2.0-py2.py3-none-any.whl", hash = "sha256:0b21ad96534e562920a0083e97fd45fa959882d4162acc358705144520a35830"}, - {file = "dateparser-1.2.0.tar.gz", hash = "sha256:7975b43a4222283e0ae15be7b4999d08c9a70e2d378ac87385b1ccf2cffbbb30"}, -] - -[package.dependencies] -python-dateutil = "*" -pytz = "*" -regex = "<2019.02.19 || >2019.02.19,<2021.8.27 || >2021.8.27" -tzlocal = "*" - -[package.extras] -calendars = ["convertdate", "hijri-converter"] -fasttext = ["fasttext"] -langdetect = ["langdetect"] - -[[package]] -name = "debugpy" -version = "1.8.5" -description = "An implementation of the Debug Adapter Protocol for Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "debugpy-1.8.5-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:7e4d594367d6407a120b76bdaa03886e9eb652c05ba7f87e37418426ad2079f7"}, - {file = "debugpy-1.8.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4413b7a3ede757dc33a273a17d685ea2b0c09dbd312cc03f5534a0fd4d40750a"}, - {file = "debugpy-1.8.5-cp310-cp310-win32.whl", hash = "sha256:dd3811bd63632bb25eda6bd73bea8e0521794cda02be41fa3160eb26fc29e7ed"}, - {file = "debugpy-1.8.5-cp310-cp310-win_amd64.whl", hash = "sha256:b78c1250441ce893cb5035dd6f5fc12db968cc07f91cc06996b2087f7cefdd8e"}, - {file = "debugpy-1.8.5-cp311-cp311-macosx_12_0_universal2.whl", hash = "sha256:606bccba19f7188b6ea9579c8a4f5a5364ecd0bf5a0659c8a5d0e10dcee3032a"}, - {file = "debugpy-1.8.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db9fb642938a7a609a6c865c32ecd0d795d56c1aaa7a7a5722d77855d5e77f2b"}, - {file = "debugpy-1.8.5-cp311-cp311-win32.whl", hash = "sha256:4fbb3b39ae1aa3e5ad578f37a48a7a303dad9a3d018d369bc9ec629c1cfa7408"}, - {file = "debugpy-1.8.5-cp311-cp311-win_amd64.whl", hash = "sha256:345d6a0206e81eb68b1493ce2fbffd57c3088e2ce4b46592077a943d2b968ca3"}, - {file = "debugpy-1.8.5-cp312-cp312-macosx_12_0_universal2.whl", hash = "sha256:5b5c770977c8ec6c40c60d6f58cacc7f7fe5a45960363d6974ddb9b62dbee156"}, - {file = "debugpy-1.8.5-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0a65b00b7cdd2ee0c2cf4c7335fef31e15f1b7056c7fdbce9e90193e1a8c8cb"}, - {file = "debugpy-1.8.5-cp312-cp312-win32.whl", hash = "sha256:c9f7c15ea1da18d2fcc2709e9f3d6de98b69a5b0fff1807fb80bc55f906691f7"}, - {file = "debugpy-1.8.5-cp312-cp312-win_amd64.whl", hash = "sha256:28ced650c974aaf179231668a293ecd5c63c0a671ae6d56b8795ecc5d2f48d3c"}, - {file = "debugpy-1.8.5-cp38-cp38-macosx_12_0_x86_64.whl", hash = "sha256:3df6692351172a42af7558daa5019651f898fc67450bf091335aa8a18fbf6f3a"}, - {file = "debugpy-1.8.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1cd04a73eb2769eb0bfe43f5bfde1215c5923d6924b9b90f94d15f207a402226"}, - {file = "debugpy-1.8.5-cp38-cp38-win32.whl", hash = "sha256:8f913ee8e9fcf9d38a751f56e6de12a297ae7832749d35de26d960f14280750a"}, - {file = "debugpy-1.8.5-cp38-cp38-win_amd64.whl", hash = "sha256:a697beca97dad3780b89a7fb525d5e79f33821a8bc0c06faf1f1289e549743cf"}, - {file = "debugpy-1.8.5-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:0a1029a2869d01cb777216af8c53cda0476875ef02a2b6ff8b2f2c9a4b04176c"}, - {file = "debugpy-1.8.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e84c276489e141ed0b93b0af648eef891546143d6a48f610945416453a8ad406"}, - {file = "debugpy-1.8.5-cp39-cp39-win32.whl", hash = "sha256:ad84b7cde7fd96cf6eea34ff6c4a1b7887e0fe2ea46e099e53234856f9d99a34"}, - {file = "debugpy-1.8.5-cp39-cp39-win_amd64.whl", hash = "sha256:7b0fe36ed9d26cb6836b0a51453653f8f2e347ba7348f2bbfe76bfeb670bfb1c"}, - {file = "debugpy-1.8.5-py2.py3-none-any.whl", hash = "sha256:55919dce65b471eff25901acf82d328bbd5b833526b6c1364bd5133754777a44"}, - {file = "debugpy-1.8.5.zip", hash = "sha256:b2112cfeb34b4507399d298fe7023a16656fc553ed5246536060ca7bd0e668d0"}, -] - -[[package]] -name = "decorator" -version = "5.1.1" -description = "Decorators for Humans" -optional = false -python-versions = ">=3.5" -files = [ - {file = "decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"}, - {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"}, -] - -[[package]] -name = "defusedxml" -version = "0.7.1" -description = "XML bomb protection for Python stdlib modules" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -files = [ - {file = "defusedxml-0.7.1-py2.py3-none-any.whl", hash = "sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61"}, - {file = "defusedxml-0.7.1.tar.gz", hash = "sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69"}, -] - -[[package]] -name = "deprecated" -version = "1.2.14" -description = "Python @deprecated decorator to deprecate old python classes, functions or methods." -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -files = [ - {file = "Deprecated-1.2.14-py2.py3-none-any.whl", hash = "sha256:6fac8b097794a90302bdbb17b9b815e732d3c4720583ff1b198499d78470466c"}, - {file = "Deprecated-1.2.14.tar.gz", hash = "sha256:e5323eb936458dccc2582dc6f9c322c852a775a27065ff2b0c4970b9d53d01b3"}, -] - -[package.dependencies] -wrapt = ">=1.10,<2" - -[package.extras] -dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "sphinx (<2)", "tox"] - -[[package]] -name = "deprecation" -version = "2.1.0" -description = "A library to handle automated deprecations" -optional = false -python-versions = "*" -files = [ - {file = "deprecation-2.1.0-py2.py3-none-any.whl", hash = "sha256:a10811591210e1fb0e768a8c25517cabeabcba6f0bf96564f8ff45189f90b14a"}, - {file = "deprecation-2.1.0.tar.gz", hash = "sha256:72b3bde64e5d778694b0cf68178aed03d15e15477116add3fb773e581f9518ff"}, -] - -[package.dependencies] -packaging = "*" - -[[package]] -name = "dill" -version = "0.3.8" -description = "serialize all of Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "dill-0.3.8-py3-none-any.whl", hash = "sha256:c36ca9ffb54365bdd2f8eb3eff7d2a21237f8452b57ace88b1ac615b7e815bd7"}, - {file = "dill-0.3.8.tar.gz", hash = "sha256:3ebe3c479ad625c4553aca177444d89b486b1d84982eeacded644afc0cf797ca"}, -] - -[package.extras] -graph = ["objgraph (>=1.7.2)"] -profile = ["gprof2dot (>=2022.7.29)"] - -[[package]] -name = "distlib" -version = "0.3.8" -description = "Distribution utilities" -optional = false -python-versions = "*" -files = [ - {file = "distlib-0.3.8-py2.py3-none-any.whl", hash = "sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784"}, - {file = "distlib-0.3.8.tar.gz", hash = "sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64"}, -] - -[[package]] -name = "docutils" -version = "0.21.2" -description = "Docutils -- Python Documentation Utilities" -optional = false -python-versions = ">=3.9" -files = [ - {file = "docutils-0.21.2-py3-none-any.whl", hash = "sha256:dafca5b9e384f0e419294eb4d2ff9fa826435bf15f15b7bd45723e8ad76811b2"}, - {file = "docutils-0.21.2.tar.gz", hash = "sha256:3a6b18732edf182daa3cd12775bbb338cf5691468f91eeeb109deff6ebfa986f"}, -] - -[[package]] -name = "entrypoints" -version = "0.4" -description = "Discover and load entry points from installed packages." -optional = false -python-versions = ">=3.6" -files = [ - {file = "entrypoints-0.4-py3-none-any.whl", hash = "sha256:f174b5ff827504fd3cd97cc3f8649f3693f51538c7e4bdf3ef002c8429d42f9f"}, - {file = "entrypoints-0.4.tar.gz", hash = "sha256:b706eddaa9218a19ebcd67b56818f05bb27589b1ca9e8d797b74affad4ccacd4"}, -] - -[[package]] -name = "environs" -version = "11.0.0" -description = "simplified environment variable parsing" -optional = false -python-versions = ">=3.8" -files = [ - {file = "environs-11.0.0-py3-none-any.whl", hash = "sha256:e0bcfd41c718c07a7db422f9109e490746450da38793fe4ee197f397b9343435"}, - {file = "environs-11.0.0.tar.gz", hash = "sha256:069727a8f73d8ba8d033d3cd95c0da231d44f38f1da773bf076cef168d312ee8"}, -] - -[package.dependencies] -marshmallow = ">=3.13.0" -python-dotenv = "*" - -[package.extras] -dev = ["environs[tests]", "pre-commit (>=3.5,<4.0)", "tox"] -django = ["dj-database-url", "dj-email-url", "django-cache-url"] -tests = ["environs[django]", "pytest"] - -[[package]] -name = "et-xmlfile" -version = "1.1.0" -description = "An implementation of lxml.xmlfile for the standard library" -optional = false -python-versions = ">=3.6" -files = [ - {file = "et_xmlfile-1.1.0-py3-none-any.whl", hash = "sha256:a2ba85d1d6a74ef63837eed693bcb89c3f752169b0e3e7ae5b16ca5e1b3deada"}, - {file = "et_xmlfile-1.1.0.tar.gz", hash = "sha256:8eb9e2bc2f8c97e37a2dc85a09ecdcdec9d8a396530a6d5a33b30b9a92da0c5c"}, -] - -[[package]] -name = "exceptiongroup" -version = "1.2.2" -description = "Backport of PEP 654 (exception groups)" -optional = false -python-versions = ">=3.7" -files = [ - {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, - {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"}, -] - -[package.extras] -test = ["pytest (>=6)"] - -[[package]] -name = "executing" -version = "2.1.0" -description = "Get the currently executing AST node of a frame, and other information" -optional = false -python-versions = ">=3.8" -files = [ - {file = "executing-2.1.0-py2.py3-none-any.whl", hash = "sha256:8d63781349375b5ebccc3142f4b30350c0cd9c79f921cde38be2be4637e98eaf"}, - {file = "executing-2.1.0.tar.gz", hash = "sha256:8ea27ddd260da8150fa5a708269c4a10e76161e2496ec3e587da9e3c0fe4b9ab"}, -] - -[package.extras] -tests = ["asttokens (>=2.1.0)", "coverage", "coverage-enable-subprocess", "ipython", "littleutils", "pytest", "rich"] - -[[package]] -name = "fastjsonschema" -version = "2.20.0" -description = "Fastest Python implementation of JSON schema" -optional = false -python-versions = "*" -files = [ - {file = "fastjsonschema-2.20.0-py3-none-any.whl", hash = "sha256:5875f0b0fa7a0043a91e93a9b8f793bcbbba9691e7fd83dca95c28ba26d21f0a"}, - {file = "fastjsonschema-2.20.0.tar.gz", hash = "sha256:3d48fc5300ee96f5d116f10fe6f28d938e6008f59a6a025c2649475b87f76a23"}, -] - -[package.extras] -devel = ["colorama", "json-spec", "jsonschema", "pylint", "pytest", "pytest-benchmark", "pytest-cache", "validictory"] - -[[package]] -name = "filelock" -version = "3.16.0" -description = "A platform independent file lock." -optional = false -python-versions = ">=3.8" -files = [ - {file = "filelock-3.16.0-py3-none-any.whl", hash = "sha256:f6ed4c963184f4c84dd5557ce8fece759a3724b37b80c6c4f20a2f63a4dc6609"}, - {file = "filelock-3.16.0.tar.gz", hash = "sha256:81de9eb8453c769b63369f87f11131a7ab04e367f8d97ad39dc230daa07e3bec"}, -] - -[package.extras] -docs = ["furo (>=2024.8.6)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4)"] -testing = ["covdefaults (>=2.3)", "coverage (>=7.6.1)", "diff-cover (>=9.1.1)", "pytest (>=8.3.2)", "pytest-asyncio (>=0.24)", "pytest-cov (>=5)", "pytest-mock (>=3.14)", "pytest-timeout (>=2.3.1)", "virtualenv (>=20.26.3)"] -typing = ["typing-extensions (>=4.12.2)"] - -[[package]] -name = "flask" -version = "2.2.5" -description = "A simple framework for building complex web applications." -optional = false -python-versions = ">=3.7" -files = [ - {file = "Flask-2.2.5-py3-none-any.whl", hash = "sha256:58107ed83443e86067e41eff4631b058178191a355886f8e479e347fa1285fdf"}, - {file = "Flask-2.2.5.tar.gz", hash = "sha256:edee9b0a7ff26621bd5a8c10ff484ae28737a2410d99b0bb9a6850c7fb977aa0"}, -] - -[package.dependencies] -click = ">=8.0" -itsdangerous = ">=2.0" -Jinja2 = ">=3.0" -Werkzeug = ">=2.2.2" - -[package.extras] -async = ["asgiref (>=3.2)"] -dotenv = ["python-dotenv"] - -[[package]] -name = "flask-cors" -version = "3.0.10" -description = "A Flask extension adding a decorator for CORS support" -optional = false -python-versions = "*" -files = [ - {file = "Flask-Cors-3.0.10.tar.gz", hash = "sha256:b60839393f3b84a0f3746f6cdca56c1ad7426aa738b70d6c61375857823181de"}, - {file = "Flask_Cors-3.0.10-py2.py3-none-any.whl", hash = "sha256:74efc975af1194fc7891ff5cd85b0f7478be4f7f59fe158102e91abb72bb4438"}, -] - -[package.dependencies] -Flask = ">=0.9" -Six = "*" - -[[package]] -name = "flask-testing" -version = "0.8.1" -description = "Unit testing for Flask" -optional = false -python-versions = "*" -files = [ - {file = "Flask-Testing-0.8.1.tar.gz", hash = "sha256:0a734d7b68e63a9410b413cd7b1f96456f9a858bd09a6222d465650cc782eb01"}, -] - -[package.dependencies] -Flask = "*" - -[[package]] -name = "fqdn" -version = "1.5.1" -description = "Validates fully-qualified domain names against RFC 1123, so that they are acceptable to modern bowsers" -optional = false -python-versions = ">=2.7, !=3.0, !=3.1, !=3.2, !=3.3, !=3.4, <4" -files = [ - {file = "fqdn-1.5.1-py3-none-any.whl", hash = "sha256:3a179af3761e4df6eb2e026ff9e1a3033d3587bf980a0b1b2e1e5d08d7358014"}, - {file = "fqdn-1.5.1.tar.gz", hash = "sha256:105ed3677e767fb5ca086a0c1f4bb66ebc3c100be518f0e0d755d9eae164d89f"}, -] - -[[package]] -name = "google-api-core" -version = "2.19.2" -description = "Google API client core library" -optional = false -python-versions = ">=3.7" -files = [ - {file = "google_api_core-2.19.2-py3-none-any.whl", hash = "sha256:53ec0258f2837dd53bbd3d3df50f5359281b3cc13f800c941dd15a9b5a415af4"}, - {file = "google_api_core-2.19.2.tar.gz", hash = "sha256:ca07de7e8aa1c98a8bfca9321890ad2340ef7f2eb136e558cee68f24b94b0a8f"}, -] - -[package.dependencies] -google-auth = ">=2.14.1,<3.0.dev0" -googleapis-common-protos = ">=1.56.2,<2.0.dev0" -proto-plus = ">=1.22.3,<2.0.0dev" -protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<6.0.0.dev0" -requests = ">=2.18.0,<3.0.0.dev0" - -[package.extras] -grpc = ["grpcio (>=1.33.2,<2.0dev)", "grpcio (>=1.49.1,<2.0dev)", "grpcio-status (>=1.33.2,<2.0.dev0)", "grpcio-status (>=1.49.1,<2.0.dev0)"] -grpcgcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] -grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] - -[[package]] -name = "google-api-python-client" -version = "2.123.0" -description = "Google API Client Library for Python" -optional = false -python-versions = ">=3.7" -files = [ - {file = "google-api-python-client-2.123.0.tar.gz", hash = "sha256:a17226b02f71de581afe045437b441844110a9cd91580b73549d41108cf1b9f0"}, - {file = "google_api_python_client-2.123.0-py2.py3-none-any.whl", hash = "sha256:1c2bcaa846acf5bac4d6f244d8373d4de9de73d64eb6e77b56767ab4cf681419"}, -] - -[package.dependencies] -google-api-core = ">=1.31.5,<2.0.dev0 || >2.3.0,<3.0.0.dev0" -google-auth = ">=1.19.0,<3.0.0.dev0" -google-auth-httplib2 = ">=0.1.0" -httplib2 = ">=0.15.0,<1.dev0" -uritemplate = ">=3.0.1,<5" - -[[package]] -name = "google-auth" -version = "2.34.0" -description = "Google Authentication Library" -optional = false -python-versions = ">=3.7" -files = [ - {file = "google_auth-2.34.0-py2.py3-none-any.whl", hash = "sha256:72fd4733b80b6d777dcde515628a9eb4a577339437012874ea286bca7261ee65"}, - {file = "google_auth-2.34.0.tar.gz", hash = "sha256:8eb87396435c19b20d32abd2f984e31c191a15284af72eb922f10e5bde9c04cc"}, -] - -[package.dependencies] -cachetools = ">=2.0.0,<6.0" -pyasn1-modules = ">=0.2.1" -rsa = ">=3.1.4,<5" - -[package.extras] -aiohttp = ["aiohttp (>=3.6.2,<4.0.0.dev0)", "requests (>=2.20.0,<3.0.0.dev0)"] -enterprise-cert = ["cryptography", "pyopenssl"] -pyopenssl = ["cryptography (>=38.0.3)", "pyopenssl (>=20.0.0)"] -reauth = ["pyu2f (>=0.1.5)"] -requests = ["requests (>=2.20.0,<3.0.0.dev0)"] - -[[package]] -name = "google-auth-httplib2" -version = "0.1.1" -description = "Google Authentication Library: httplib2 transport" -optional = false -python-versions = "*" -files = [ - {file = "google-auth-httplib2-0.1.1.tar.gz", hash = "sha256:c64bc555fdc6dd788ea62ecf7bccffcf497bf77244887a3f3d7a5a02f8e3fc29"}, - {file = "google_auth_httplib2-0.1.1-py2.py3-none-any.whl", hash = "sha256:42c50900b8e4dcdf8222364d1f0efe32b8421fb6ed72f2613f12f75cc933478c"}, -] - -[package.dependencies] -google-auth = "*" -httplib2 = ">=0.19.0" - -[[package]] -name = "google-auth-oauthlib" -version = "0.8.0" -description = "Google Authentication Library" -optional = false -python-versions = ">=3.6" -files = [ - {file = "google-auth-oauthlib-0.8.0.tar.gz", hash = "sha256:81056a310fb1c4a3e5a7e1a443e1eb96593c6bbc55b26c0261e4d3295d3e6593"}, - {file = "google_auth_oauthlib-0.8.0-py2.py3-none-any.whl", hash = "sha256:40cc612a13c3336d5433e94e2adb42a0c88f6feb6c55769e44500fc70043a576"}, -] - -[package.dependencies] -google-auth = ">=2.15.0" -requests-oauthlib = ">=0.7.0" - -[package.extras] -tool = ["click (>=6.0.0)"] - -[[package]] -name = "googleapis-common-protos" -version = "1.65.0" -description = "Common protobufs used in Google APIs" -optional = false -python-versions = ">=3.7" -files = [ - {file = "googleapis_common_protos-1.65.0-py2.py3-none-any.whl", hash = "sha256:2972e6c496f435b92590fd54045060867f3fe9be2c82ab148fc8885035479a63"}, - {file = "googleapis_common_protos-1.65.0.tar.gz", hash = "sha256:334a29d07cddc3aa01dee4988f9afd9b2916ee2ff49d6b757155dc0d197852c0"}, -] - -[package.dependencies] -protobuf = ">=3.20.2,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<6.0.0.dev0" - -[package.extras] -grpc = ["grpcio (>=1.44.0,<2.0.0.dev0)"] - -[[package]] -name = "graphviz" -version = "0.20.3" -description = "Simple Python interface for Graphviz" -optional = false -python-versions = ">=3.8" -files = [ - {file = "graphviz-0.20.3-py3-none-any.whl", hash = "sha256:81f848f2904515d8cd359cc611faba817598d2feaac4027b266aa3eda7b3dde5"}, - {file = "graphviz-0.20.3.zip", hash = "sha256:09d6bc81e6a9fa392e7ba52135a9d49f1ed62526f96499325930e87ca1b5925d"}, -] - -[package.extras] -dev = ["flake8", "pep8-naming", "tox (>=3)", "twine", "wheel"] -docs = ["sphinx (>=5,<7)", "sphinx-autodoc-typehints", "sphinx-rtd-theme"] -test = ["coverage", "pytest (>=7,<8.1)", "pytest-cov", "pytest-mock (>=3)"] - -[[package]] -name = "great-expectations" -version = "0.15.50" -description = "Always know what to expect from your data." -optional = false -python-versions = "*" -files = [ - {file = "great_expectations-0.15.50-py3-none-any.whl", hash = "sha256:bda4c6bfe199dc0610273a1c160aab3876583266b1957a34a7edb72b055fd13d"}, - {file = "great_expectations-0.15.50.tar.gz", hash = "sha256:0b00c974410d598a97b4c662d7955d80d6268e35c5f3893ddb546f75432412db"}, -] - -[package.dependencies] -altair = ">=4.0.0,<4.2.1" -Click = ">=7.1.2" -colorama = ">=0.4.3" -cryptography = ">=3.2" -importlib-metadata = ">=1.7.0" -Ipython = ">=7.16.3" -ipywidgets = ">=7.5.1" -jinja2 = ">=2.10" -jsonpatch = ">=1.22" -jsonschema = ">=2.5.1" -makefun = ">=1.7.0,<2" -marshmallow = ">=3.7.1,<4.0.0" -mistune = ">=0.8.4" -nbformat = ">=5.0" -notebook = ">=6.4.10" -numpy = {version = ">=1.23.0", markers = "python_version >= \"3.10\""} -packaging = "*" -pandas = {version = ">=1.3.0", markers = "python_version >= \"3.10\""} -pydantic = ">=1.10.4,<2.0" -pyparsing = ">=2.4" -python-dateutil = ">=2.8.1" -pytz = ">=2021.3" -requests = ">=2.20" -"ruamel.yaml" = ">=0.16,<0.17.18" -scipy = ">=0.19.0" -tqdm = ">=4.59.0" -typing-extensions = ">=3.10.0.0" -tzlocal = ">=1.2" -urllib3 = ">=1.25.4,<1.27" - -[package.extras] -arrow = ["feather-format (>=0.4.1)", "pyarrow"] -athena = ["pyathena (>=1.11)", "sqlalchemy (>=1.3.18,<2.0.0)"] -aws-secrets = ["boto3 (==1.17.106)"] -azure = ["azure-identity (>=1.10.0)", "azure-keyvault-secrets (>=4.0.0)", "azure-storage-blob (>=12.5.0)"] -azure-secrets = ["azure-identity (>=1.10.0)", "azure-keyvault-secrets (>=4.0.0)", "azure-storage-blob (>=12.5.0)"] -bigquery = ["gcsfs (>=0.5.1)", "google-cloud-bigquery (>=3.3.6)", "google-cloud-secret-manager (>=1.0.0)", "google-cloud-storage (>=1.28.0)", "sqlalchemy (>=1.3.18,<2.0.0)", "sqlalchemy-bigquery (>=1.3.0)"] -dev = ["PyHive (>=0.6.5)", "PyMySQL (>=0.9.3,<0.10)", "azure-identity (>=1.10.0)", "azure-keyvault-secrets (>=4.0.0)", "azure-storage-blob (>=12.5.0)", "black[jupyter] (==22.3.0)", "boto3 (==1.17.106)", "docstring-parser (==0.15)", "feather-format (>=0.4.1)", "flask (>=1.0.0)", "freezegun (>=0.3.15)", "gcsfs (>=0.5.1)", "google-cloud-bigquery (>=3.3.6)", "google-cloud-secret-manager (>=1.0.0)", "google-cloud-storage (>=1.28.0)", "invoke (>=2.0.0)", "ipykernel (<=6.17.1)", "mock-alchemy (>=0.2.5)", "moto (>=2.0.0,<3.0.0)", "mypy (==1.0.0)", "nbconvert (>=5)", "openpyxl (>=3.0.7)", "pre-commit (>=2.21.0)", "psycopg2-binary (>=2.7.6)", "pyarrow", "pyathena (>=1.11)", "pyfakefs (>=4.5.1)", "pyodbc (>=4.0.30)", "pypd (==1.1.0)", "pyspark (>=2.3.2)", "pytest (>=6.2.0)", "pytest-benchmark (>=3.4.1)", "pytest-cov (>=2.8.1)", "pytest-icdiff (>=0.6)", "pytest-mock (>=3.8.2)", "pytest-order (>=0.9.5)", "pytest-random-order (>=1.0.4)", "pytest-timeout (>=2.1.0)", "requirements-parser (>=0.2.0)", "ruff (==0.0.246)", "s3fs (>=0.5.1)", "snapshottest (==0.6.0)", "snowflake-connector-python (>=2.5.0)", "snowflake-sqlalchemy (>=1.2.3)", "sqlalchemy (>=1.3.18,<2.0.0)", "sqlalchemy-bigquery (>=1.3.0)", "sqlalchemy-dremio (==1.2.1)", "sqlalchemy-redshift (>=0.8.8)", "sqlalchemy-vertica-python (>=0.5.10)", "teradatasqlalchemy (==17.0.0.1)", "thrift (>=0.16.0)", "thrift-sasl (>=0.4.3)", "trino (>=0.310.0,!=0.316.0)", "xlrd (>=1.1.0,<2.0.0)"] -dremio = ["pyarrow", "pyodbc (>=4.0.30)", "sqlalchemy (>=1.3.18,<2.0.0)", "sqlalchemy-dremio (==1.2.1)"] -excel = ["openpyxl (>=3.0.7)", "xlrd (>=1.1.0,<2.0.0)"] -gcp = ["gcsfs (>=0.5.1)", "google-cloud-bigquery (>=3.3.6)", "google-cloud-secret-manager (>=1.0.0)", "google-cloud-storage (>=1.28.0)", "sqlalchemy (>=1.3.18,<2.0.0)", "sqlalchemy-bigquery (>=1.3.0)"] -hive = ["PyHive (>=0.6.5)", "thrift (>=0.16.0)", "thrift-sasl (>=0.4.3)"] -mssql = ["pyodbc (>=4.0.30)", "sqlalchemy (>=1.3.18,<2.0.0)"] -mysql = ["PyMySQL (>=0.9.3,<0.10)", "sqlalchemy (>=1.3.18,<2.0.0)"] -pagerduty = ["pypd (==1.1.0)"] -postgresql = ["psycopg2-binary (>=2.7.6)", "sqlalchemy (>=1.3.18,<2.0.0)"] -redshift = ["psycopg2-binary (>=2.7.6)", "sqlalchemy (>=1.3.18,<2.0.0)", "sqlalchemy-redshift (>=0.8.8)"] -s3 = ["boto3 (==1.17.106)"] -snowflake = ["snowflake-connector-python (>=2.5.0)", "snowflake-sqlalchemy (>=1.2.3)", "sqlalchemy (>=1.3.18,<2.0.0)"] -spark = ["pyspark (>=2.3.2)"] -sqlalchemy = ["sqlalchemy (>=1.3.18,<2.0.0)"] -teradata = ["sqlalchemy (>=1.3.18,<2.0.0)", "teradatasqlalchemy (==17.0.0.1)"] -test = ["black[jupyter] (==22.3.0)", "boto3 (==1.17.106)", "docstring-parser (==0.15)", "flask (>=1.0.0)", "freezegun (>=0.3.15)", "invoke (>=2.0.0)", "ipykernel (<=6.17.1)", "mock-alchemy (>=0.2.5)", "moto (>=2.0.0,<3.0.0)", "mypy (==1.0.0)", "nbconvert (>=5)", "pre-commit (>=2.21.0)", "pyfakefs (>=4.5.1)", "pytest (>=6.2.0)", "pytest-benchmark (>=3.4.1)", "pytest-cov (>=2.8.1)", "pytest-icdiff (>=0.6)", "pytest-mock (>=3.8.2)", "pytest-order (>=0.9.5)", "pytest-random-order (>=1.0.4)", "pytest-timeout (>=2.1.0)", "requirements-parser (>=0.2.0)", "ruff (==0.0.246)", "s3fs (>=0.5.1)", "snapshottest (==0.6.0)", "sqlalchemy (>=1.3.18,<2.0.0)"] -trino = ["sqlalchemy (>=1.3.18,<2.0.0)", "trino (>=0.310.0,!=0.316.0)"] -vertica = ["sqlalchemy (>=1.3.18,<2.0.0)", "sqlalchemy-vertica-python (>=0.5.10)"] - -[[package]] -name = "greenlet" -version = "3.1.0" -description = "Lightweight in-process concurrent programming" -optional = false -python-versions = ">=3.7" -files = [ - {file = "greenlet-3.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a814dc3100e8a046ff48faeaa909e80cdb358411a3d6dd5293158425c684eda8"}, - {file = "greenlet-3.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a771dc64fa44ebe58d65768d869fcfb9060169d203446c1d446e844b62bdfdca"}, - {file = "greenlet-3.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0e49a65d25d7350cca2da15aac31b6f67a43d867448babf997fe83c7505f57bc"}, - {file = "greenlet-3.1.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2cd8518eade968bc52262d8c46727cfc0826ff4d552cf0430b8d65aaf50bb91d"}, - {file = "greenlet-3.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76dc19e660baea5c38e949455c1181bc018893f25372d10ffe24b3ed7341fb25"}, - {file = "greenlet-3.1.0-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c0a5b1c22c82831f56f2f7ad9bbe4948879762fe0d59833a4a71f16e5fa0f682"}, - {file = "greenlet-3.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:2651dfb006f391bcb240635079a68a261b227a10a08af6349cba834a2141efa1"}, - {file = "greenlet-3.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3e7e6ef1737a819819b1163116ad4b48d06cfdd40352d813bb14436024fcda99"}, - {file = "greenlet-3.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:ffb08f2a1e59d38c7b8b9ac8083c9c8b9875f0955b1e9b9b9a965607a51f8e54"}, - {file = "greenlet-3.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9730929375021ec90f6447bff4f7f5508faef1c02f399a1953870cdb78e0c345"}, - {file = "greenlet-3.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:713d450cf8e61854de9420fb7eea8ad228df4e27e7d4ed465de98c955d2b3fa6"}, - {file = "greenlet-3.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4c3446937be153718250fe421da548f973124189f18fe4575a0510b5c928f0cc"}, - {file = "greenlet-3.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1ddc7bcedeb47187be74208bc652d63d6b20cb24f4e596bd356092d8000da6d6"}, - {file = "greenlet-3.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:44151d7b81b9391ed759a2f2865bbe623ef00d648fed59363be2bbbd5154656f"}, - {file = "greenlet-3.1.0-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6cea1cca3be76c9483282dc7760ea1cc08a6ecec1f0b6ca0a94ea0d17432da19"}, - {file = "greenlet-3.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:619935a44f414274a2c08c9e74611965650b730eb4efe4b2270f91df5e4adf9a"}, - {file = "greenlet-3.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:221169d31cada333a0c7fd087b957c8f431c1dba202c3a58cf5a3583ed973e9b"}, - {file = "greenlet-3.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:01059afb9b178606b4b6e92c3e710ea1635597c3537e44da69f4531e111dd5e9"}, - {file = "greenlet-3.1.0-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:24fc216ec7c8be9becba8b64a98a78f9cd057fd2dc75ae952ca94ed8a893bf27"}, - {file = "greenlet-3.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3d07c28b85b350564bdff9f51c1c5007dfb2f389385d1bc23288de51134ca303"}, - {file = "greenlet-3.1.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:243a223c96a4246f8a30ea470c440fe9db1f5e444941ee3c3cd79df119b8eebf"}, - {file = "greenlet-3.1.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:26811df4dc81271033a7836bc20d12cd30938e6bd2e9437f56fa03da81b0f8fc"}, - {file = "greenlet-3.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c9d86401550b09a55410f32ceb5fe7efcd998bd2dad9e82521713cb148a4a15f"}, - {file = "greenlet-3.1.0-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:26d9c1c4f1748ccac0bae1dbb465fb1a795a75aba8af8ca871503019f4285e2a"}, - {file = "greenlet-3.1.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:cd468ec62257bb4544989402b19d795d2305eccb06cde5da0eb739b63dc04665"}, - {file = "greenlet-3.1.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a53dfe8f82b715319e9953330fa5c8708b610d48b5c59f1316337302af5c0811"}, - {file = "greenlet-3.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:28fe80a3eb673b2d5cc3b12eea468a5e5f4603c26aa34d88bf61bba82ceb2f9b"}, - {file = "greenlet-3.1.0-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:76b3e3976d2a452cba7aa9e453498ac72240d43030fdc6d538a72b87eaff52fd"}, - {file = "greenlet-3.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:655b21ffd37a96b1e78cc48bf254f5ea4b5b85efaf9e9e2a526b3c9309d660ca"}, - {file = "greenlet-3.1.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c6f4c2027689093775fd58ca2388d58789009116844432d920e9147f91acbe64"}, - {file = "greenlet-3.1.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:76e5064fd8e94c3f74d9fd69b02d99e3cdb8fc286ed49a1f10b256e59d0d3a0b"}, - {file = "greenlet-3.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6a4bf607f690f7987ab3291406e012cd8591a4f77aa54f29b890f9c331e84989"}, - {file = "greenlet-3.1.0-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:037d9ac99540ace9424cb9ea89f0accfaff4316f149520b4ae293eebc5bded17"}, - {file = "greenlet-3.1.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:90b5bbf05fe3d3ef697103850c2ce3374558f6fe40fd57c9fac1bf14903f50a5"}, - {file = "greenlet-3.1.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:726377bd60081172685c0ff46afbc600d064f01053190e4450857483c4d44484"}, - {file = "greenlet-3.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:d46d5069e2eeda111d6f71970e341f4bd9aeeee92074e649ae263b834286ecc0"}, - {file = "greenlet-3.1.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:81eeec4403a7d7684b5812a8aaa626fa23b7d0848edb3a28d2eb3220daddcbd0"}, - {file = "greenlet-3.1.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4a3dae7492d16e85ea6045fd11cb8e782b63eac8c8d520c3a92c02ac4573b0a6"}, - {file = "greenlet-3.1.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4b5ea3664eed571779403858d7cd0a9b0ebf50d57d2cdeafc7748e09ef8cd81a"}, - {file = "greenlet-3.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a22f4e26400f7f48faef2d69c20dc055a1f3043d330923f9abe08ea0aecc44df"}, - {file = "greenlet-3.1.0-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:13ff8c8e54a10472ce3b2a2da007f915175192f18e6495bad50486e87c7f6637"}, - {file = "greenlet-3.1.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:f9671e7282d8c6fcabc32c0fb8d7c0ea8894ae85cee89c9aadc2d7129e1a9954"}, - {file = "greenlet-3.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:184258372ae9e1e9bddce6f187967f2e08ecd16906557c4320e3ba88a93438c3"}, - {file = "greenlet-3.1.0-cp37-cp37m-win32.whl", hash = "sha256:a0409bc18a9f85321399c29baf93545152d74a49d92f2f55302f122007cfda00"}, - {file = "greenlet-3.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:9eb4a1d7399b9f3c7ac68ae6baa6be5f9195d1d08c9ddc45ad559aa6b556bce6"}, - {file = "greenlet-3.1.0-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:a8870983af660798dc1b529e1fd6f1cefd94e45135a32e58bd70edd694540f33"}, - {file = "greenlet-3.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cfcfb73aed40f550a57ea904629bdaf2e562c68fa1164fa4588e752af6efdc3f"}, - {file = "greenlet-3.1.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f9482c2ed414781c0af0b35d9d575226da6b728bd1a720668fa05837184965b7"}, - {file = "greenlet-3.1.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d58ec349e0c2c0bc6669bf2cd4982d2f93bf067860d23a0ea1fe677b0f0b1e09"}, - {file = "greenlet-3.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd65695a8df1233309b701dec2539cc4b11e97d4fcc0f4185b4a12ce54db0491"}, - {file = "greenlet-3.1.0-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:665b21e95bc0fce5cab03b2e1d90ba9c66c510f1bb5fdc864f3a377d0f553f6b"}, - {file = "greenlet-3.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:d3c59a06c2c28a81a026ff11fbf012081ea34fb9b7052f2ed0366e14896f0a1d"}, - {file = "greenlet-3.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5415b9494ff6240b09af06b91a375731febe0090218e2898d2b85f9b92abcda0"}, - {file = "greenlet-3.1.0-cp38-cp38-win32.whl", hash = "sha256:1544b8dd090b494c55e60c4ff46e238be44fdc472d2589e943c241e0169bcea2"}, - {file = "greenlet-3.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:7f346d24d74c00b6730440f5eb8ec3fe5774ca8d1c9574e8e57c8671bb51b910"}, - {file = "greenlet-3.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:db1b3ccb93488328c74e97ff888604a8b95ae4f35f4f56677ca57a4fc3a4220b"}, - {file = "greenlet-3.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:44cd313629ded43bb3b98737bba2f3e2c2c8679b55ea29ed73daea6b755fe8e7"}, - {file = "greenlet-3.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fad7a051e07f64e297e6e8399b4d6a3bdcad3d7297409e9a06ef8cbccff4f501"}, - {file = "greenlet-3.1.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c3967dcc1cd2ea61b08b0b276659242cbce5caca39e7cbc02408222fb9e6ff39"}, - {file = "greenlet-3.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d45b75b0f3fd8d99f62eb7908cfa6d727b7ed190737dec7fe46d993da550b81a"}, - {file = "greenlet-3.1.0-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2d004db911ed7b6218ec5c5bfe4cf70ae8aa2223dffbb5b3c69e342bb253cb28"}, - {file = "greenlet-3.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b9505a0c8579899057cbefd4ec34d865ab99852baf1ff33a9481eb3924e2da0b"}, - {file = "greenlet-3.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5fd6e94593f6f9714dbad1aaba734b5ec04593374fa6638df61592055868f8b8"}, - {file = "greenlet-3.1.0-cp39-cp39-win32.whl", hash = "sha256:d0dd943282231480aad5f50f89bdf26690c995e8ff555f26d8a5b9887b559bcc"}, - {file = "greenlet-3.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:ac0adfdb3a21dc2a24ed728b61e72440d297d0fd3a577389df566651fcd08f97"}, - {file = "greenlet-3.1.0.tar.gz", hash = "sha256:b395121e9bbe8d02a750886f108d540abe66075e61e22f7353d9acb0b81be0f0"}, -] - -[package.extras] -docs = ["Sphinx", "furo"] -test = ["objgraph", "psutil"] - -[[package]] -name = "h11" -version = "0.14.0" -description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" -optional = false -python-versions = ">=3.7" -files = [ - {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, - {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, -] - -[[package]] -name = "httpcore" -version = "1.0.5" -description = "A minimal low-level HTTP client." -optional = false -python-versions = ">=3.8" -files = [ - {file = "httpcore-1.0.5-py3-none-any.whl", hash = "sha256:421f18bac248b25d310f3cacd198d55b8e6125c107797b609ff9b7a6ba7991b5"}, - {file = "httpcore-1.0.5.tar.gz", hash = "sha256:34a38e2f9291467ee3b44e89dd52615370e152954ba21721378a87b2960f7a61"}, -] - -[package.dependencies] -certifi = "*" -h11 = ">=0.13,<0.15" - -[package.extras] -asyncio = ["anyio (>=4.0,<5.0)"] -http2 = ["h2 (>=3,<5)"] -socks = ["socksio (==1.*)"] -trio = ["trio (>=0.22.0,<0.26.0)"] - -[[package]] -name = "httplib2" -version = "0.22.0" -description = "A comprehensive HTTP client library." -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -files = [ - {file = "httplib2-0.22.0-py3-none-any.whl", hash = "sha256:14ae0a53c1ba8f3d37e9e27cf37eabb0fb9980f435ba405d546948b009dd64dc"}, - {file = "httplib2-0.22.0.tar.gz", hash = "sha256:d7a10bc5ef5ab08322488bde8c726eeee5c8618723fdb399597ec58f3d82df81"}, -] - -[package.dependencies] -pyparsing = {version = ">=2.4.2,<3.0.0 || >3.0.0,<3.0.1 || >3.0.1,<3.0.2 || >3.0.2,<3.0.3 || >3.0.3,<4", markers = "python_version > \"3.0\""} - -[[package]] -name = "httpx" -version = "0.27.2" -description = "The next generation HTTP client." -optional = false -python-versions = ">=3.8" -files = [ - {file = "httpx-0.27.2-py3-none-any.whl", hash = "sha256:7bb2708e112d8fdd7829cd4243970f0c223274051cb35ee80c03301ee29a3df0"}, - {file = "httpx-0.27.2.tar.gz", hash = "sha256:f7c2be1d2f3c3c3160d441802406b206c2b76f5947b11115e6df10c6c65e66c2"}, -] - -[package.dependencies] -anyio = "*" -certifi = "*" -httpcore = "==1.*" -idna = "*" -sniffio = "*" - -[package.extras] -brotli = ["brotli", "brotlicffi"] -cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] -http2 = ["h2 (>=3,<5)"] -socks = ["socksio (==1.*)"] -zstd = ["zstandard (>=0.18.0)"] - -[[package]] -name = "idna" -version = "3.8" -description = "Internationalized Domain Names in Applications (IDNA)" -optional = false -python-versions = ">=3.6" -files = [ - {file = "idna-3.8-py3-none-any.whl", hash = "sha256:050b4e5baadcd44d760cedbd2b8e639f2ff89bbc7a5730fcc662954303377aac"}, - {file = "idna-3.8.tar.gz", hash = "sha256:d838c2c0ed6fced7693d5e8ab8e734d5f8fda53a039c0164afb0b82e771e3603"}, -] - -[[package]] -name = "imagesize" -version = "1.4.1" -description = "Getting image size from png/jpeg/jpeg2000/gif file" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -files = [ - {file = "imagesize-1.4.1-py2.py3-none-any.whl", hash = "sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b"}, - {file = "imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a"}, -] - -[[package]] -name = "importlib-metadata" -version = "6.11.0" -description = "Read metadata from Python packages" -optional = false -python-versions = ">=3.8" -files = [ - {file = "importlib_metadata-6.11.0-py3-none-any.whl", hash = "sha256:f0afba6205ad8f8947c7d338b5342d5db2afbfd82f9cbef7879a9539cc12eb9b"}, - {file = "importlib_metadata-6.11.0.tar.gz", hash = "sha256:1231cf92d825c9e03cfc4da076a16de6422c863558229ea0b22b675657463443"}, -] - -[package.dependencies] -zipp = ">=0.5" - -[package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] -perf = ["ipython"] -testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"] - -[[package]] -name = "inflection" -version = "0.5.1" -description = "A port of Ruby on Rails inflector to Python" -optional = false -python-versions = ">=3.5" -files = [ - {file = "inflection-0.5.1-py2.py3-none-any.whl", hash = "sha256:f38b2b640938a4f35ade69ac3d053042959b62a0f1076a5bbaa1b9526605a8a2"}, - {file = "inflection-0.5.1.tar.gz", hash = "sha256:1a29730d366e996aaacffb2f1f1cb9593dc38e2ddd30c91250c6dde09ea9b417"}, -] - -[[package]] -name = "iniconfig" -version = "2.0.0" -description = "brain-dead simple config-ini parsing" -optional = false -python-versions = ">=3.7" -files = [ - {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, - {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, -] - -[[package]] -name = "interrogate" -version = "1.7.0" -description = "Interrogate a codebase for docstring coverage." -optional = false -python-versions = ">=3.8" -files = [ - {file = "interrogate-1.7.0-py3-none-any.whl", hash = "sha256:b13ff4dd8403369670e2efe684066de9fcb868ad9d7f2b4095d8112142dc9d12"}, - {file = "interrogate-1.7.0.tar.gz", hash = "sha256:a320d6ec644dfd887cc58247a345054fc4d9f981100c45184470068f4b3719b0"}, -] - -[package.dependencies] -attrs = "*" -click = ">=7.1" -colorama = "*" -py = "*" -tabulate = "*" -tomli = {version = "*", markers = "python_version < \"3.11\""} - -[package.extras] -dev = ["cairosvg", "coverage[toml]", "pre-commit", "pytest", "pytest-cov", "pytest-mock", "sphinx", "sphinx-autobuild", "wheel"] -docs = ["sphinx", "sphinx-autobuild"] -png = ["cairosvg"] -tests = ["coverage[toml]", "pytest", "pytest-cov", "pytest-mock"] - -[[package]] -name = "ipykernel" -version = "6.29.5" -description = "IPython Kernel for Jupyter" -optional = false -python-versions = ">=3.8" -files = [ - {file = "ipykernel-6.29.5-py3-none-any.whl", hash = "sha256:afdb66ba5aa354b09b91379bac28ae4afebbb30e8b39510c9690afb7a10421b5"}, - {file = "ipykernel-6.29.5.tar.gz", hash = "sha256:f093a22c4a40f8828f8e330a9c297cb93dcab13bd9678ded6de8e5cf81c56215"}, -] - -[package.dependencies] -appnope = {version = "*", markers = "platform_system == \"Darwin\""} -comm = ">=0.1.1" -debugpy = ">=1.6.5" -ipython = ">=7.23.1" -jupyter-client = ">=6.1.12" -jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" -matplotlib-inline = ">=0.1" -nest-asyncio = "*" -packaging = "*" -psutil = "*" -pyzmq = ">=24" -tornado = ">=6.1" -traitlets = ">=5.4.0" - -[package.extras] -cov = ["coverage[toml]", "curio", "matplotlib", "pytest-cov", "trio"] -docs = ["myst-parser", "pydata-sphinx-theme", "sphinx", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling", "trio"] -pyqt5 = ["pyqt5"] -pyside6 = ["pyside6"] -test = ["flaky", "ipyparallel", "pre-commit", "pytest (>=7.0)", "pytest-asyncio (>=0.23.5)", "pytest-cov", "pytest-timeout"] - -[[package]] -name = "ipython" -version = "8.27.0" -description = "IPython: Productive Interactive Computing" -optional = false -python-versions = ">=3.10" -files = [ - {file = "ipython-8.27.0-py3-none-any.whl", hash = "sha256:f68b3cb8bde357a5d7adc9598d57e22a45dfbea19eb6b98286fa3b288c9cd55c"}, - {file = "ipython-8.27.0.tar.gz", hash = "sha256:0b99a2dc9f15fd68692e898e5568725c6d49c527d36a9fb5960ffbdeaa82ff7e"}, -] - -[package.dependencies] -colorama = {version = "*", markers = "sys_platform == \"win32\""} -decorator = "*" -exceptiongroup = {version = "*", markers = "python_version < \"3.11\""} -jedi = ">=0.16" -matplotlib-inline = "*" -pexpect = {version = ">4.3", markers = "sys_platform != \"win32\" and sys_platform != \"emscripten\""} -prompt-toolkit = ">=3.0.41,<3.1.0" -pygments = ">=2.4.0" -stack-data = "*" -traitlets = ">=5.13.0" -typing-extensions = {version = ">=4.6", markers = "python_version < \"3.12\""} - -[package.extras] -all = ["ipython[black,doc,kernel,matplotlib,nbconvert,nbformat,notebook,parallel,qtconsole]", "ipython[test,test-extra]"] -black = ["black"] -doc = ["docrepr", "exceptiongroup", "intersphinx-registry", "ipykernel", "ipython[test]", "matplotlib", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "sphinxcontrib-jquery", "tomli", "typing-extensions"] -kernel = ["ipykernel"] -matplotlib = ["matplotlib"] -nbconvert = ["nbconvert"] -nbformat = ["nbformat"] -notebook = ["ipywidgets", "notebook"] -parallel = ["ipyparallel"] -qtconsole = ["qtconsole"] -test = ["packaging", "pickleshare", "pytest", "pytest-asyncio (<0.22)", "testpath"] -test-extra = ["curio", "ipython[test]", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.23)", "pandas", "trio"] - -[[package]] -name = "ipywidgets" -version = "8.1.5" -description = "Jupyter interactive widgets" -optional = false -python-versions = ">=3.7" -files = [ - {file = "ipywidgets-8.1.5-py3-none-any.whl", hash = "sha256:3290f526f87ae6e77655555baba4f36681c555b8bdbbff430b70e52c34c86245"}, - {file = "ipywidgets-8.1.5.tar.gz", hash = "sha256:870e43b1a35656a80c18c9503bbf2d16802db1cb487eec6fab27d683381dde17"}, -] - -[package.dependencies] -comm = ">=0.1.3" -ipython = ">=6.1.0" -jupyterlab-widgets = ">=3.0.12,<3.1.0" -traitlets = ">=4.3.1" -widgetsnbextension = ">=4.0.12,<4.1.0" - -[package.extras] -test = ["ipykernel", "jsonschema", "pytest (>=3.6.0)", "pytest-cov", "pytz"] - -[[package]] -name = "isodate" -version = "0.6.1" -description = "An ISO 8601 date/time/duration parser and formatter" -optional = false -python-versions = "*" -files = [ - {file = "isodate-0.6.1-py2.py3-none-any.whl", hash = "sha256:0751eece944162659049d35f4f549ed815792b38793f07cf73381c1c87cbed96"}, - {file = "isodate-0.6.1.tar.gz", hash = "sha256:48c5881de7e8b0a0d648cb024c8062dc84e7b840ed81e864c7614fd3c127bde9"}, -] - -[package.dependencies] -six = "*" - -[[package]] -name = "isoduration" -version = "20.11.0" -description = "Operations with ISO 8601 durations" -optional = false -python-versions = ">=3.7" -files = [ - {file = "isoduration-20.11.0-py3-none-any.whl", hash = "sha256:b2904c2a4228c3d44f409c8ae8e2370eb21a26f7ac2ec5446df141dde3452042"}, - {file = "isoduration-20.11.0.tar.gz", hash = "sha256:ac2f9015137935279eac671f94f89eb00584f940f5dc49462a0c4ee692ba1bd9"}, -] - -[package.dependencies] -arrow = ">=0.15.0" - -[[package]] -name = "isort" -version = "5.13.2" -description = "A Python utility / library to sort Python imports." -optional = false -python-versions = ">=3.8.0" -files = [ - {file = "isort-5.13.2-py3-none-any.whl", hash = "sha256:8ca5e72a8d85860d5a3fa69b8745237f2939afe12dbf656afbcb47fe72d947a6"}, - {file = "isort-5.13.2.tar.gz", hash = "sha256:48fdfcb9face5d58a4f6dde2e72a1fb8dcaf8ab26f95ab49fab84c2ddefb0109"}, -] - -[package.extras] -colors = ["colorama (>=0.4.6)"] - -[[package]] -name = "itsdangerous" -version = "2.2.0" -description = "Safely pass data to untrusted environments and back." -optional = false -python-versions = ">=3.8" -files = [ - {file = "itsdangerous-2.2.0-py3-none-any.whl", hash = "sha256:c6242fc49e35958c8b15141343aa660db5fc54d4f13a1db01a3f5891b98700ef"}, - {file = "itsdangerous-2.2.0.tar.gz", hash = "sha256:e0050c0b7da1eea53ffaf149c0cfbb5c6e2e2b69c4bef22c81fa6eb73e5f6173"}, -] - -[[package]] -name = "jedi" -version = "0.19.1" -description = "An autocompletion tool for Python that can be used for text editors." -optional = false -python-versions = ">=3.6" -files = [ - {file = "jedi-0.19.1-py2.py3-none-any.whl", hash = "sha256:e983c654fe5c02867aef4cdfce5a2fbb4a50adc0af145f70504238f18ef5e7e0"}, - {file = "jedi-0.19.1.tar.gz", hash = "sha256:cf0496f3651bc65d7174ac1b7d043eff454892c708a87d1b683e57b569927ffd"}, -] - -[package.dependencies] -parso = ">=0.8.3,<0.9.0" - -[package.extras] -docs = ["Jinja2 (==2.11.3)", "MarkupSafe (==1.1.1)", "Pygments (==2.8.1)", "alabaster (==0.7.12)", "babel (==2.9.1)", "chardet (==4.0.0)", "commonmark (==0.8.1)", "docutils (==0.17.1)", "future (==0.18.2)", "idna (==2.10)", "imagesize (==1.2.0)", "mock (==1.0.1)", "packaging (==20.9)", "pyparsing (==2.4.7)", "pytz (==2021.1)", "readthedocs-sphinx-ext (==2.1.4)", "recommonmark (==0.5.0)", "requests (==2.25.1)", "six (==1.15.0)", "snowballstemmer (==2.1.0)", "sphinx (==1.8.5)", "sphinx-rtd-theme (==0.4.3)", "sphinxcontrib-serializinghtml (==1.1.4)", "sphinxcontrib-websupport (==1.2.4)", "urllib3 (==1.26.4)"] -qa = ["flake8 (==5.0.4)", "mypy (==0.971)", "types-setuptools (==67.2.0.1)"] -testing = ["Django", "attrs", "colorama", "docopt", "pytest (<7.0.0)"] - -[[package]] -name = "jinja2" -version = "3.1.4" -description = "A very fast and expressive template engine." -optional = false -python-versions = ">=3.7" -files = [ - {file = "jinja2-3.1.4-py3-none-any.whl", hash = "sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d"}, - {file = "jinja2-3.1.4.tar.gz", hash = "sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369"}, -] - -[package.dependencies] -MarkupSafe = ">=2.0" - -[package.extras] -i18n = ["Babel (>=2.7)"] - -[[package]] -name = "json5" -version = "0.9.25" -description = "A Python implementation of the JSON5 data format." -optional = false -python-versions = ">=3.8" -files = [ - {file = "json5-0.9.25-py3-none-any.whl", hash = "sha256:34ed7d834b1341a86987ed52f3f76cd8ee184394906b6e22a1e0deb9ab294e8f"}, - {file = "json5-0.9.25.tar.gz", hash = "sha256:548e41b9be043f9426776f05df8635a00fe06104ea51ed24b67f908856e151ae"}, -] - -[[package]] -name = "jsonpatch" -version = "1.33" -description = "Apply JSON-Patches (RFC 6902)" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*" -files = [ - {file = "jsonpatch-1.33-py2.py3-none-any.whl", hash = "sha256:0ae28c0cd062bbd8b8ecc26d7d164fbbea9652a1a3693f3b956c1eae5145dade"}, - {file = "jsonpatch-1.33.tar.gz", hash = "sha256:9fcd4009c41e6d12348b4a0ff2563ba56a2923a7dfee731d004e212e1ee5030c"}, -] - -[package.dependencies] -jsonpointer = ">=1.9" - -[[package]] -name = "jsonpointer" -version = "3.0.0" -description = "Identify specific nodes in a JSON document (RFC 6901)" -optional = false -python-versions = ">=3.7" -files = [ - {file = "jsonpointer-3.0.0-py2.py3-none-any.whl", hash = "sha256:13e088adc14fca8b6aa8177c044e12701e6ad4b28ff10e65f2267a90109c9942"}, - {file = "jsonpointer-3.0.0.tar.gz", hash = "sha256:2b2d729f2091522d61c3b31f82e11870f60b68f43fbc705cb76bf4b832af59ef"}, -] - -[[package]] -name = "jsonschema" -version = "4.23.0" -description = "An implementation of JSON Schema validation for Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "jsonschema-4.23.0-py3-none-any.whl", hash = "sha256:fbadb6f8b144a8f8cf9f0b89ba94501d143e50411a1278633f56a7acf7fd5566"}, - {file = "jsonschema-4.23.0.tar.gz", hash = "sha256:d71497fef26351a33265337fa77ffeb82423f3ea21283cd9467bb03999266bc4"}, -] - -[package.dependencies] -attrs = ">=22.2.0" -fqdn = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} -idna = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} -isoduration = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} -jsonpointer = {version = ">1.13", optional = true, markers = "extra == \"format-nongpl\""} -jsonschema-specifications = ">=2023.03.6" -referencing = ">=0.28.4" -rfc3339-validator = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} -rfc3986-validator = {version = ">0.1.0", optional = true, markers = "extra == \"format-nongpl\""} -rpds-py = ">=0.7.1" -uri-template = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} -webcolors = {version = ">=24.6.0", optional = true, markers = "extra == \"format-nongpl\""} - -[package.extras] -format = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3987", "uri-template", "webcolors (>=1.11)"] -format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "uri-template", "webcolors (>=24.6.0)"] - -[[package]] -name = "jsonschema-specifications" -version = "2023.12.1" -description = "The JSON Schema meta-schemas and vocabularies, exposed as a Registry" -optional = false -python-versions = ">=3.8" -files = [ - {file = "jsonschema_specifications-2023.12.1-py3-none-any.whl", hash = "sha256:87e4fdf3a94858b8a2ba2778d9ba57d8a9cafca7c7489c46ba0d30a8bc6a9c3c"}, - {file = "jsonschema_specifications-2023.12.1.tar.gz", hash = "sha256:48a76787b3e70f5ed53f1160d2b81f586e4ca6d1548c5de7085d1682674764cc"}, -] - -[package.dependencies] -referencing = ">=0.31.0" - -[[package]] -name = "jupyter-client" -version = "8.6.2" -description = "Jupyter protocol implementation and client libraries" -optional = false -python-versions = ">=3.8" -files = [ - {file = "jupyter_client-8.6.2-py3-none-any.whl", hash = "sha256:50cbc5c66fd1b8f65ecb66bc490ab73217993632809b6e505687de18e9dea39f"}, - {file = "jupyter_client-8.6.2.tar.gz", hash = "sha256:2bda14d55ee5ba58552a8c53ae43d215ad9868853489213f37da060ced54d8df"}, -] - -[package.dependencies] -jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" -python-dateutil = ">=2.8.2" -pyzmq = ">=23.0" -tornado = ">=6.2" -traitlets = ">=5.3" - -[package.extras] -docs = ["ipykernel", "myst-parser", "pydata-sphinx-theme", "sphinx (>=4)", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling"] -test = ["coverage", "ipykernel (>=6.14)", "mypy", "paramiko", "pre-commit", "pytest (<8.2.0)", "pytest-cov", "pytest-jupyter[client] (>=0.4.1)", "pytest-timeout"] - -[[package]] -name = "jupyter-core" -version = "5.7.2" -description = "Jupyter core package. A base package on which Jupyter projects rely." -optional = false -python-versions = ">=3.8" -files = [ - {file = "jupyter_core-5.7.2-py3-none-any.whl", hash = "sha256:4f7315d2f6b4bcf2e3e7cb6e46772eba760ae459cd1f59d29eb57b0a01bd7409"}, - {file = "jupyter_core-5.7.2.tar.gz", hash = "sha256:aa5f8d32bbf6b431ac830496da7392035d6f61b4f54872f15c4bd2a9c3f536d9"}, -] - -[package.dependencies] -platformdirs = ">=2.5" -pywin32 = {version = ">=300", markers = "sys_platform == \"win32\" and platform_python_implementation != \"PyPy\""} -traitlets = ">=5.3" - -[package.extras] -docs = ["myst-parser", "pydata-sphinx-theme", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling", "traitlets"] -test = ["ipykernel", "pre-commit", "pytest (<8)", "pytest-cov", "pytest-timeout"] - -[[package]] -name = "jupyter-events" -version = "0.10.0" -description = "Jupyter Event System library" -optional = false -python-versions = ">=3.8" -files = [ - {file = "jupyter_events-0.10.0-py3-none-any.whl", hash = "sha256:4b72130875e59d57716d327ea70d3ebc3af1944d3717e5a498b8a06c6c159960"}, - {file = "jupyter_events-0.10.0.tar.gz", hash = "sha256:670b8229d3cc882ec782144ed22e0d29e1c2d639263f92ca8383e66682845e22"}, -] - -[package.dependencies] -jsonschema = {version = ">=4.18.0", extras = ["format-nongpl"]} -python-json-logger = ">=2.0.4" -pyyaml = ">=5.3" -referencing = "*" -rfc3339-validator = "*" -rfc3986-validator = ">=0.1.1" -traitlets = ">=5.3" - -[package.extras] -cli = ["click", "rich"] -docs = ["jupyterlite-sphinx", "myst-parser", "pydata-sphinx-theme", "sphinxcontrib-spelling"] -test = ["click", "pre-commit", "pytest (>=7.0)", "pytest-asyncio (>=0.19.0)", "pytest-console-scripts", "rich"] - -[[package]] -name = "jupyter-lsp" -version = "2.2.5" -description = "Multi-Language Server WebSocket proxy for Jupyter Notebook/Lab server" -optional = false -python-versions = ">=3.8" -files = [ - {file = "jupyter-lsp-2.2.5.tar.gz", hash = "sha256:793147a05ad446f809fd53ef1cd19a9f5256fd0a2d6b7ce943a982cb4f545001"}, - {file = "jupyter_lsp-2.2.5-py3-none-any.whl", hash = "sha256:45fbddbd505f3fbfb0b6cb2f1bc5e15e83ab7c79cd6e89416b248cb3c00c11da"}, -] - -[package.dependencies] -jupyter-server = ">=1.1.2" - -[[package]] -name = "jupyter-server" -version = "2.14.2" -description = "The backend—i.e. core services, APIs, and REST endpoints—to Jupyter web applications." -optional = false -python-versions = ">=3.8" -files = [ - {file = "jupyter_server-2.14.2-py3-none-any.whl", hash = "sha256:47ff506127c2f7851a17bf4713434208fc490955d0e8632e95014a9a9afbeefd"}, - {file = "jupyter_server-2.14.2.tar.gz", hash = "sha256:66095021aa9638ced276c248b1d81862e4c50f292d575920bbe960de1c56b12b"}, -] - -[package.dependencies] -anyio = ">=3.1.0" -argon2-cffi = ">=21.1" -jinja2 = ">=3.0.3" -jupyter-client = ">=7.4.4" -jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" -jupyter-events = ">=0.9.0" -jupyter-server-terminals = ">=0.4.4" -nbconvert = ">=6.4.4" -nbformat = ">=5.3.0" -overrides = ">=5.0" -packaging = ">=22.0" -prometheus-client = ">=0.9" -pywinpty = {version = ">=2.0.1", markers = "os_name == \"nt\""} -pyzmq = ">=24" -send2trash = ">=1.8.2" -terminado = ">=0.8.3" -tornado = ">=6.2.0" -traitlets = ">=5.6.0" -websocket-client = ">=1.7" - -[package.extras] -docs = ["ipykernel", "jinja2", "jupyter-client", "myst-parser", "nbformat", "prometheus-client", "pydata-sphinx-theme", "send2trash", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-openapi (>=0.8.0)", "sphinxcontrib-spelling", "sphinxemoji", "tornado", "typing-extensions"] -test = ["flaky", "ipykernel", "pre-commit", "pytest (>=7.0,<9)", "pytest-console-scripts", "pytest-jupyter[server] (>=0.7)", "pytest-timeout", "requests"] - -[[package]] -name = "jupyter-server-terminals" -version = "0.5.3" -description = "A Jupyter Server Extension Providing Terminals." -optional = false -python-versions = ">=3.8" -files = [ - {file = "jupyter_server_terminals-0.5.3-py3-none-any.whl", hash = "sha256:41ee0d7dc0ebf2809c668e0fc726dfaf258fcd3e769568996ca731b6194ae9aa"}, - {file = "jupyter_server_terminals-0.5.3.tar.gz", hash = "sha256:5ae0295167220e9ace0edcfdb212afd2b01ee8d179fe6f23c899590e9b8a5269"}, -] - -[package.dependencies] -pywinpty = {version = ">=2.0.3", markers = "os_name == \"nt\""} -terminado = ">=0.8.3" - -[package.extras] -docs = ["jinja2", "jupyter-server", "mistune (<4.0)", "myst-parser", "nbformat", "packaging", "pydata-sphinx-theme", "sphinxcontrib-github-alt", "sphinxcontrib-openapi", "sphinxcontrib-spelling", "sphinxemoji", "tornado"] -test = ["jupyter-server (>=2.0.0)", "pytest (>=7.0)", "pytest-jupyter[server] (>=0.5.3)", "pytest-timeout"] - -[[package]] -name = "jupyterlab" -version = "4.2.5" -description = "JupyterLab computational environment" -optional = false -python-versions = ">=3.8" -files = [ - {file = "jupyterlab-4.2.5-py3-none-any.whl", hash = "sha256:73b6e0775d41a9fee7ee756c80f58a6bed4040869ccc21411dc559818874d321"}, - {file = "jupyterlab-4.2.5.tar.gz", hash = "sha256:ae7f3a1b8cb88b4f55009ce79fa7c06f99d70cd63601ee4aa91815d054f46f75"}, -] - -[package.dependencies] -async-lru = ">=1.0.0" -httpx = ">=0.25.0" -ipykernel = ">=6.5.0" -jinja2 = ">=3.0.3" -jupyter-core = "*" -jupyter-lsp = ">=2.0.0" -jupyter-server = ">=2.4.0,<3" -jupyterlab-server = ">=2.27.1,<3" -notebook-shim = ">=0.2" -packaging = "*" -setuptools = ">=40.1.0" -tomli = {version = ">=1.2.2", markers = "python_version < \"3.11\""} -tornado = ">=6.2.0" -traitlets = "*" - -[package.extras] -dev = ["build", "bump2version", "coverage", "hatch", "pre-commit", "pytest-cov", "ruff (==0.3.5)"] -docs = ["jsx-lexer", "myst-parser", "pydata-sphinx-theme (>=0.13.0)", "pytest", "pytest-check-links", "pytest-jupyter", "sphinx (>=1.8,<7.3.0)", "sphinx-copybutton"] -docs-screenshots = ["altair (==5.3.0)", "ipython (==8.16.1)", "ipywidgets (==8.1.2)", "jupyterlab-geojson (==3.4.0)", "jupyterlab-language-pack-zh-cn (==4.1.post2)", "matplotlib (==3.8.3)", "nbconvert (>=7.0.0)", "pandas (==2.2.1)", "scipy (==1.12.0)", "vega-datasets (==0.9.0)"] -test = ["coverage", "pytest (>=7.0)", "pytest-check-links (>=0.7)", "pytest-console-scripts", "pytest-cov", "pytest-jupyter (>=0.5.3)", "pytest-timeout", "pytest-tornasync", "requests", "requests-cache", "virtualenv"] -upgrade-extension = ["copier (>=9,<10)", "jinja2-time (<0.3)", "pydantic (<3.0)", "pyyaml-include (<3.0)", "tomli-w (<2.0)"] - -[[package]] -name = "jupyterlab-pygments" -version = "0.3.0" -description = "Pygments theme using JupyterLab CSS variables" -optional = false -python-versions = ">=3.8" -files = [ - {file = "jupyterlab_pygments-0.3.0-py3-none-any.whl", hash = "sha256:841a89020971da1d8693f1a99997aefc5dc424bb1b251fd6322462a1b8842780"}, - {file = "jupyterlab_pygments-0.3.0.tar.gz", hash = "sha256:721aca4d9029252b11cfa9d185e5b5af4d54772bb8072f9b7036f4170054d35d"}, -] - -[[package]] -name = "jupyterlab-server" -version = "2.27.3" -description = "A set of server components for JupyterLab and JupyterLab like applications." -optional = false -python-versions = ">=3.8" -files = [ - {file = "jupyterlab_server-2.27.3-py3-none-any.whl", hash = "sha256:e697488f66c3db49df675158a77b3b017520d772c6e1548c7d9bcc5df7944ee4"}, - {file = "jupyterlab_server-2.27.3.tar.gz", hash = "sha256:eb36caca59e74471988f0ae25c77945610b887f777255aa21f8065def9e51ed4"}, -] - -[package.dependencies] -babel = ">=2.10" -jinja2 = ">=3.0.3" -json5 = ">=0.9.0" -jsonschema = ">=4.18.0" -jupyter-server = ">=1.21,<3" -packaging = ">=21.3" -requests = ">=2.31" - -[package.extras] -docs = ["autodoc-traits", "jinja2 (<3.2.0)", "mistune (<4)", "myst-parser", "pydata-sphinx-theme", "sphinx", "sphinx-copybutton", "sphinxcontrib-openapi (>0.8)"] -openapi = ["openapi-core (>=0.18.0,<0.19.0)", "ruamel-yaml"] -test = ["hatch", "ipykernel", "openapi-core (>=0.18.0,<0.19.0)", "openapi-spec-validator (>=0.6.0,<0.8.0)", "pytest (>=7.0,<8)", "pytest-console-scripts", "pytest-cov", "pytest-jupyter[server] (>=0.6.2)", "pytest-timeout", "requests-mock", "ruamel-yaml", "sphinxcontrib-spelling", "strict-rfc3339", "werkzeug"] - -[[package]] -name = "jupyterlab-widgets" -version = "3.0.13" -description = "Jupyter interactive widgets for JupyterLab" -optional = false -python-versions = ">=3.7" -files = [ - {file = "jupyterlab_widgets-3.0.13-py3-none-any.whl", hash = "sha256:e3cda2c233ce144192f1e29914ad522b2f4c40e77214b0cc97377ca3d323db54"}, - {file = "jupyterlab_widgets-3.0.13.tar.gz", hash = "sha256:a2966d385328c1942b683a8cd96b89b8dd82c8b8f81dda902bb2bc06d46f5bed"}, -] - -[[package]] -name = "makefun" -version = "1.15.4" -description = "Small library to dynamically create python functions." -optional = false -python-versions = "*" -files = [ - {file = "makefun-1.15.4-py2.py3-none-any.whl", hash = "sha256:945d078a7e01a903f2cbef738b33e0ebc52b8d35fb7e20c528ed87b5c80db5b7"}, - {file = "makefun-1.15.4.tar.gz", hash = "sha256:9f9b9904e7c397759374a88f4c57781fbab2a458dec78df4b3ee6272cd9fb010"}, -] - -[[package]] -name = "markupsafe" -version = "2.1.5" -description = "Safely add untrusted strings to HTML/XML markup." -optional = false -python-versions = ">=3.7" -files = [ - {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, - {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, -] - -[[package]] -name = "marshmallow" -version = "3.22.0" -description = "A lightweight library for converting complex datatypes to and from native Python datatypes." -optional = false -python-versions = ">=3.8" -files = [ - {file = "marshmallow-3.22.0-py3-none-any.whl", hash = "sha256:71a2dce49ef901c3f97ed296ae5051135fd3febd2bf43afe0ae9a82143a494d9"}, - {file = "marshmallow-3.22.0.tar.gz", hash = "sha256:4972f529104a220bb8637d595aa4c9762afbe7f7a77d82dc58c1615d70c5823e"}, -] - -[package.dependencies] -packaging = ">=17.0" - -[package.extras] -dev = ["marshmallow[tests]", "pre-commit (>=3.5,<4.0)", "tox"] -docs = ["alabaster (==1.0.0)", "autodocsumm (==0.2.13)", "sphinx (==8.0.2)", "sphinx-issues (==4.1.0)", "sphinx-version-warning (==1.1.2)"] -tests = ["pytest", "pytz", "simplejson"] - -[[package]] -name = "matplotlib-inline" -version = "0.1.7" -description = "Inline Matplotlib backend for Jupyter" -optional = false -python-versions = ">=3.8" -files = [ - {file = "matplotlib_inline-0.1.7-py3-none-any.whl", hash = "sha256:df192d39a4ff8f21b1895d72e6a13f5fcc5099f00fa84384e0ea28c2cc0653ca"}, - {file = "matplotlib_inline-0.1.7.tar.gz", hash = "sha256:8423b23ec666be3d16e16b60bdd8ac4e86e840ebd1dd11a30b9f117f2fa0ab90"}, -] - -[package.dependencies] -traitlets = "*" - -[[package]] -name = "mccabe" -version = "0.7.0" -description = "McCabe checker, plugin for flake8" -optional = false -python-versions = ">=3.6" -files = [ - {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, - {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, -] - -[[package]] -name = "mistune" -version = "3.0.2" -description = "A sane and fast Markdown parser with useful plugins and renderers" -optional = false -python-versions = ">=3.7" -files = [ - {file = "mistune-3.0.2-py3-none-any.whl", hash = "sha256:71481854c30fdbc938963d3605b72501f5c10a9320ecd412c121c163a1c7d205"}, - {file = "mistune-3.0.2.tar.gz", hash = "sha256:fc7f93ded930c92394ef2cb6f04a8aabab4117a91449e72dcc8dfa646a508be8"}, -] - -[[package]] -name = "mypy" -version = "1.11.2" -description = "Optional static typing for Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "mypy-1.11.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d42a6dd818ffce7be66cce644f1dff482f1d97c53ca70908dff0b9ddc120b77a"}, - {file = "mypy-1.11.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:801780c56d1cdb896eacd5619a83e427ce436d86a3bdf9112527f24a66618fef"}, - {file = "mypy-1.11.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:41ea707d036a5307ac674ea172875f40c9d55c5394f888b168033177fce47383"}, - {file = "mypy-1.11.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6e658bd2d20565ea86da7d91331b0eed6d2eee22dc031579e6297f3e12c758c8"}, - {file = "mypy-1.11.2-cp310-cp310-win_amd64.whl", hash = "sha256:478db5f5036817fe45adb7332d927daa62417159d49783041338921dcf646fc7"}, - {file = "mypy-1.11.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:75746e06d5fa1e91bfd5432448d00d34593b52e7e91a187d981d08d1f33d4385"}, - {file = "mypy-1.11.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a976775ab2256aadc6add633d44f100a2517d2388906ec4f13231fafbb0eccca"}, - {file = "mypy-1.11.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:cd953f221ac1379050a8a646585a29574488974f79d8082cedef62744f0a0104"}, - {file = "mypy-1.11.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:57555a7715c0a34421013144a33d280e73c08df70f3a18a552938587ce9274f4"}, - {file = "mypy-1.11.2-cp311-cp311-win_amd64.whl", hash = "sha256:36383a4fcbad95f2657642a07ba22ff797de26277158f1cc7bd234821468b1b6"}, - {file = "mypy-1.11.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e8960dbbbf36906c5c0b7f4fbf2f0c7ffb20f4898e6a879fcf56a41a08b0d318"}, - {file = "mypy-1.11.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:06d26c277962f3fb50e13044674aa10553981ae514288cb7d0a738f495550b36"}, - {file = "mypy-1.11.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6e7184632d89d677973a14d00ae4d03214c8bc301ceefcdaf5c474866814c987"}, - {file = "mypy-1.11.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3a66169b92452f72117e2da3a576087025449018afc2d8e9bfe5ffab865709ca"}, - {file = "mypy-1.11.2-cp312-cp312-win_amd64.whl", hash = "sha256:969ea3ef09617aff826885a22ece0ddef69d95852cdad2f60c8bb06bf1f71f70"}, - {file = "mypy-1.11.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:37c7fa6121c1cdfcaac97ce3d3b5588e847aa79b580c1e922bb5d5d2902df19b"}, - {file = "mypy-1.11.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4a8a53bc3ffbd161b5b2a4fff2f0f1e23a33b0168f1c0778ec70e1a3d66deb86"}, - {file = "mypy-1.11.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2ff93107f01968ed834f4256bc1fc4475e2fecf6c661260066a985b52741ddce"}, - {file = "mypy-1.11.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:edb91dded4df17eae4537668b23f0ff6baf3707683734b6a818d5b9d0c0c31a1"}, - {file = "mypy-1.11.2-cp38-cp38-win_amd64.whl", hash = "sha256:ee23de8530d99b6db0573c4ef4bd8f39a2a6f9b60655bf7a1357e585a3486f2b"}, - {file = "mypy-1.11.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:801ca29f43d5acce85f8e999b1e431fb479cb02d0e11deb7d2abb56bdaf24fd6"}, - {file = "mypy-1.11.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:af8d155170fcf87a2afb55b35dc1a0ac21df4431e7d96717621962e4b9192e70"}, - {file = "mypy-1.11.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f7821776e5c4286b6a13138cc935e2e9b6fde05e081bdebf5cdb2bb97c9df81d"}, - {file = "mypy-1.11.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:539c570477a96a4e6fb718b8d5c3e0c0eba1f485df13f86d2970c91f0673148d"}, - {file = "mypy-1.11.2-cp39-cp39-win_amd64.whl", hash = "sha256:3f14cd3d386ac4d05c5a39a51b84387403dadbd936e17cb35882134d4f8f0d24"}, - {file = "mypy-1.11.2-py3-none-any.whl", hash = "sha256:b499bc07dbdcd3de92b0a8b29fdf592c111276f6a12fe29c30f6c417dd546d12"}, - {file = "mypy-1.11.2.tar.gz", hash = "sha256:7f9993ad3e0ffdc95c2a14b66dee63729f021968bff8ad911867579c65d13a79"}, -] - -[package.dependencies] -mypy-extensions = ">=1.0.0" -tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} -typing-extensions = ">=4.6.0" - -[package.extras] -dmypy = ["psutil (>=4.0)"] -install-types = ["pip"] -mypyc = ["setuptools (>=50)"] -reports = ["lxml"] - -[[package]] -name = "mypy-extensions" -version = "1.0.0" -description = "Type system extensions for programs checked with the mypy type checker." -optional = false -python-versions = ">=3.5" -files = [ - {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, - {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, -] - -[[package]] -name = "nbclient" -version = "0.10.0" -description = "A client library for executing notebooks. Formerly nbconvert's ExecutePreprocessor." -optional = false -python-versions = ">=3.8.0" -files = [ - {file = "nbclient-0.10.0-py3-none-any.whl", hash = "sha256:f13e3529332a1f1f81d82a53210322476a168bb7090a0289c795fe9cc11c9d3f"}, - {file = "nbclient-0.10.0.tar.gz", hash = "sha256:4b3f1b7dba531e498449c4db4f53da339c91d449dc11e9af3a43b4eb5c5abb09"}, -] - -[package.dependencies] -jupyter-client = ">=6.1.12" -jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" -nbformat = ">=5.1" -traitlets = ">=5.4" - -[package.extras] -dev = ["pre-commit"] -docs = ["autodoc-traits", "mock", "moto", "myst-parser", "nbclient[test]", "sphinx (>=1.7)", "sphinx-book-theme", "sphinxcontrib-spelling"] -test = ["flaky", "ipykernel (>=6.19.3)", "ipython", "ipywidgets", "nbconvert (>=7.0.0)", "pytest (>=7.0,<8)", "pytest-asyncio", "pytest-cov (>=4.0)", "testpath", "xmltodict"] - -[[package]] -name = "nbconvert" -version = "7.16.4" -description = "Converting Jupyter Notebooks (.ipynb files) to other formats. Output formats include asciidoc, html, latex, markdown, pdf, py, rst, script. nbconvert can be used both as a Python library (`import nbconvert`) or as a command line tool (invoked as `jupyter nbconvert ...`)." -optional = false -python-versions = ">=3.8" -files = [ - {file = "nbconvert-7.16.4-py3-none-any.whl", hash = "sha256:05873c620fe520b6322bf8a5ad562692343fe3452abda5765c7a34b7d1aa3eb3"}, - {file = "nbconvert-7.16.4.tar.gz", hash = "sha256:86ca91ba266b0a448dc96fa6c5b9d98affabde2867b363258703536807f9f7f4"}, -] - -[package.dependencies] -beautifulsoup4 = "*" -bleach = "!=5.0.0" -defusedxml = "*" -jinja2 = ">=3.0" -jupyter-core = ">=4.7" -jupyterlab-pygments = "*" -markupsafe = ">=2.0" -mistune = ">=2.0.3,<4" -nbclient = ">=0.5.0" -nbformat = ">=5.7" -packaging = "*" -pandocfilters = ">=1.4.1" -pygments = ">=2.4.1" -tinycss2 = "*" -traitlets = ">=5.1" - -[package.extras] -all = ["flaky", "ipykernel", "ipython", "ipywidgets (>=7.5)", "myst-parser", "nbsphinx (>=0.2.12)", "playwright", "pydata-sphinx-theme", "pyqtwebengine (>=5.15)", "pytest (>=7)", "sphinx (==5.0.2)", "sphinxcontrib-spelling", "tornado (>=6.1)"] -docs = ["ipykernel", "ipython", "myst-parser", "nbsphinx (>=0.2.12)", "pydata-sphinx-theme", "sphinx (==5.0.2)", "sphinxcontrib-spelling"] -qtpdf = ["pyqtwebengine (>=5.15)"] -qtpng = ["pyqtwebengine (>=5.15)"] -serve = ["tornado (>=6.1)"] -test = ["flaky", "ipykernel", "ipywidgets (>=7.5)", "pytest (>=7)"] -webpdf = ["playwright"] - -[[package]] -name = "nbformat" -version = "5.10.4" -description = "The Jupyter Notebook format" -optional = false -python-versions = ">=3.8" -files = [ - {file = "nbformat-5.10.4-py3-none-any.whl", hash = "sha256:3b48d6c8fbca4b299bf3982ea7db1af21580e4fec269ad087b9e81588891200b"}, - {file = "nbformat-5.10.4.tar.gz", hash = "sha256:322168b14f937a5d11362988ecac2a4952d3d8e3a2cbeb2319584631226d5b3a"}, -] - -[package.dependencies] -fastjsonschema = ">=2.15" -jsonschema = ">=2.6" -jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" -traitlets = ">=5.1" - -[package.extras] -docs = ["myst-parser", "pydata-sphinx-theme", "sphinx", "sphinxcontrib-github-alt", "sphinxcontrib-spelling"] -test = ["pep440", "pre-commit", "pytest", "testpath"] - -[[package]] -name = "nest-asyncio" -version = "1.6.0" -description = "Patch asyncio to allow nested event loops" -optional = false -python-versions = ">=3.5" -files = [ - {file = "nest_asyncio-1.6.0-py3-none-any.whl", hash = "sha256:87af6efd6b5e897c81050477ef65c62e2b2f35d51703cae01aff2905b1852e1c"}, - {file = "nest_asyncio-1.6.0.tar.gz", hash = "sha256:6f172d5449aca15afd6c646851f4e31e02c598d553a667e38cafa997cfec55fe"}, -] - -[[package]] -name = "networkx" -version = "2.8.8" -description = "Python package for creating and manipulating graphs and networks" -optional = false -python-versions = ">=3.8" -files = [ - {file = "networkx-2.8.8-py3-none-any.whl", hash = "sha256:e435dfa75b1d7195c7b8378c3859f0445cd88c6b0375c181ed66823a9ceb7524"}, - {file = "networkx-2.8.8.tar.gz", hash = "sha256:230d388117af870fce5647a3c52401fcf753e94720e6ea6b4197a5355648885e"}, -] - -[package.extras] -default = ["matplotlib (>=3.4)", "numpy (>=1.19)", "pandas (>=1.3)", "scipy (>=1.8)"] -developer = ["mypy (>=0.982)", "pre-commit (>=2.20)"] -doc = ["nb2plots (>=0.6)", "numpydoc (>=1.5)", "pillow (>=9.2)", "pydata-sphinx-theme (>=0.11)", "sphinx (>=5.2)", "sphinx-gallery (>=0.11)", "texext (>=0.6.6)"] -extra = ["lxml (>=4.6)", "pydot (>=1.4.2)", "pygraphviz (>=1.9)", "sympy (>=1.10)"] -test = ["codecov (>=2.1)", "pytest (>=7.2)", "pytest-cov (>=4.0)"] - -[[package]] -name = "notebook" -version = "7.2.2" -description = "Jupyter Notebook - A web-based notebook environment for interactive computing" -optional = false -python-versions = ">=3.8" -files = [ - {file = "notebook-7.2.2-py3-none-any.whl", hash = "sha256:c89264081f671bc02eec0ed470a627ed791b9156cad9285226b31611d3e9fe1c"}, - {file = "notebook-7.2.2.tar.gz", hash = "sha256:2ef07d4220421623ad3fe88118d687bc0450055570cdd160814a59cf3a1c516e"}, -] - -[package.dependencies] -jupyter-server = ">=2.4.0,<3" -jupyterlab = ">=4.2.0,<4.3" -jupyterlab-server = ">=2.27.1,<3" -notebook-shim = ">=0.2,<0.3" -tornado = ">=6.2.0" - -[package.extras] -dev = ["hatch", "pre-commit"] -docs = ["myst-parser", "nbsphinx", "pydata-sphinx-theme", "sphinx (>=1.3.6)", "sphinxcontrib-github-alt", "sphinxcontrib-spelling"] -test = ["importlib-resources (>=5.0)", "ipykernel", "jupyter-server[test] (>=2.4.0,<3)", "jupyterlab-server[test] (>=2.27.1,<3)", "nbval", "pytest (>=7.0)", "pytest-console-scripts", "pytest-timeout", "pytest-tornasync", "requests"] - -[[package]] -name = "notebook-shim" -version = "0.2.4" -description = "A shim layer for notebook traits and config" -optional = false -python-versions = ">=3.7" -files = [ - {file = "notebook_shim-0.2.4-py3-none-any.whl", hash = "sha256:411a5be4e9dc882a074ccbcae671eda64cceb068767e9a3419096986560e1cef"}, - {file = "notebook_shim-0.2.4.tar.gz", hash = "sha256:b4b2cfa1b65d98307ca24361f5b30fe785b53c3fd07b7a47e89acb5e6ac638cb"}, -] - -[package.dependencies] -jupyter-server = ">=1.8,<3" - -[package.extras] -test = ["pytest", "pytest-console-scripts", "pytest-jupyter", "pytest-tornasync"] - -[[package]] -name = "numpy" -version = "1.26.4" -description = "Fundamental package for array computing in Python" -optional = false -python-versions = ">=3.9" -files = [ - {file = "numpy-1.26.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ff0f4f29c51e2803569d7a51c2304de5554655a60c5d776e35b4a41413830d0"}, - {file = "numpy-1.26.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2e4ee3380d6de9c9ec04745830fd9e2eccb3e6cf790d39d7b98ffd19b0dd754a"}, - {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d209d8969599b27ad20994c8e41936ee0964e6da07478d6c35016bc386b66ad4"}, - {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffa75af20b44f8dba823498024771d5ac50620e6915abac414251bd971b4529f"}, - {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:62b8e4b1e28009ef2846b4c7852046736bab361f7aeadeb6a5b89ebec3c7055a"}, - {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a4abb4f9001ad2858e7ac189089c42178fcce737e4169dc61321660f1a96c7d2"}, - {file = "numpy-1.26.4-cp310-cp310-win32.whl", hash = "sha256:bfe25acf8b437eb2a8b2d49d443800a5f18508cd811fea3181723922a8a82b07"}, - {file = "numpy-1.26.4-cp310-cp310-win_amd64.whl", hash = "sha256:b97fe8060236edf3662adfc2c633f56a08ae30560c56310562cb4f95500022d5"}, - {file = "numpy-1.26.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c66707fabe114439db9068ee468c26bbdf909cac0fb58686a42a24de1760c71"}, - {file = "numpy-1.26.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:edd8b5fe47dab091176d21bb6de568acdd906d1887a4584a15a9a96a1dca06ef"}, - {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab55401287bfec946ced39700c053796e7cc0e3acbef09993a9ad2adba6ca6e"}, - {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:666dbfb6ec68962c033a450943ded891bed2d54e6755e35e5835d63f4f6931d5"}, - {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:96ff0b2ad353d8f990b63294c8986f1ec3cb19d749234014f4e7eb0112ceba5a"}, - {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:60dedbb91afcbfdc9bc0b1f3f402804070deed7392c23eb7a7f07fa857868e8a"}, - {file = "numpy-1.26.4-cp311-cp311-win32.whl", hash = "sha256:1af303d6b2210eb850fcf03064d364652b7120803a0b872f5211f5234b399f20"}, - {file = "numpy-1.26.4-cp311-cp311-win_amd64.whl", hash = "sha256:cd25bcecc4974d09257ffcd1f098ee778f7834c3ad767fe5db785be9a4aa9cb2"}, - {file = "numpy-1.26.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b3ce300f3644fb06443ee2222c2201dd3a89ea6040541412b8fa189341847218"}, - {file = "numpy-1.26.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:03a8c78d01d9781b28a6989f6fa1bb2c4f2d51201cf99d3dd875df6fbd96b23b"}, - {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9fad7dcb1aac3c7f0584a5a8133e3a43eeb2fe127f47e3632d43d677c66c102b"}, - {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675d61ffbfa78604709862923189bad94014bef562cc35cf61d3a07bba02a7ed"}, - {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ab47dbe5cc8210f55aa58e4805fe224dac469cde56b9f731a4c098b91917159a"}, - {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1dda2e7b4ec9dd512f84935c5f126c8bd8b9f2fc001e9f54af255e8c5f16b0e0"}, - {file = "numpy-1.26.4-cp312-cp312-win32.whl", hash = "sha256:50193e430acfc1346175fcbdaa28ffec49947a06918b7b92130744e81e640110"}, - {file = "numpy-1.26.4-cp312-cp312-win_amd64.whl", hash = "sha256:08beddf13648eb95f8d867350f6a018a4be2e5ad54c8d8caed89ebca558b2818"}, - {file = "numpy-1.26.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7349ab0fa0c429c82442a27a9673fc802ffdb7c7775fad780226cb234965e53c"}, - {file = "numpy-1.26.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:52b8b60467cd7dd1e9ed082188b4e6bb35aa5cdd01777621a1658910745b90be"}, - {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5241e0a80d808d70546c697135da2c613f30e28251ff8307eb72ba696945764"}, - {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f870204a840a60da0b12273ef34f7051e98c3b5961b61b0c2c1be6dfd64fbcd3"}, - {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:679b0076f67ecc0138fd2ede3a8fd196dddc2ad3254069bcb9faf9a79b1cebcd"}, - {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:47711010ad8555514b434df65f7d7b076bb8261df1ca9bb78f53d3b2db02e95c"}, - {file = "numpy-1.26.4-cp39-cp39-win32.whl", hash = "sha256:a354325ee03388678242a4d7ebcd08b5c727033fcff3b2f536aea978e15ee9e6"}, - {file = "numpy-1.26.4-cp39-cp39-win_amd64.whl", hash = "sha256:3373d5d70a5fe74a2c1bb6d2cfd9609ecf686d47a2d7b1d37a8f3b6bf6003aea"}, - {file = "numpy-1.26.4-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:afedb719a9dcfc7eaf2287b839d8198e06dcd4cb5d276a3df279231138e83d30"}, - {file = "numpy-1.26.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95a7476c59002f2f6c590b9b7b998306fba6a5aa646b1e22ddfeaf8f78c3a29c"}, - {file = "numpy-1.26.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7e50d0a0cc3189f9cb0aeb3a6a6af18c16f59f004b866cd2be1c14b36134a4a0"}, - {file = "numpy-1.26.4.tar.gz", hash = "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010"}, -] - -[[package]] -name = "oauth2client" -version = "4.1.3" -description = "OAuth 2.0 client library" -optional = false -python-versions = "*" -files = [ - {file = "oauth2client-4.1.3-py2.py3-none-any.whl", hash = "sha256:b8a81cc5d60e2d364f0b1b98f958dbd472887acaf1a5b05e21c28c31a2d6d3ac"}, - {file = "oauth2client-4.1.3.tar.gz", hash = "sha256:d486741e451287f69568a4d26d70d9acd73a2bbfa275746c535b4209891cccc6"}, -] - -[package.dependencies] -httplib2 = ">=0.9.1" -pyasn1 = ">=0.1.7" -pyasn1-modules = ">=0.0.5" -rsa = ">=3.1.4" -six = ">=1.6.1" - -[[package]] -name = "oauthlib" -version = "3.2.2" -description = "A generic, spec-compliant, thorough implementation of the OAuth request-signing logic" -optional = false -python-versions = ">=3.6" -files = [ - {file = "oauthlib-3.2.2-py3-none-any.whl", hash = "sha256:8139f29aac13e25d502680e9e19963e83f16838d48a0d71c287fe40e7067fbca"}, - {file = "oauthlib-3.2.2.tar.gz", hash = "sha256:9859c40929662bec5d64f34d01c99e093149682a3f38915dc0655d5a633dd918"}, -] - -[package.extras] -rsa = ["cryptography (>=3.0.0)"] -signals = ["blinker (>=1.4.0)"] -signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"] - -[[package]] -name = "openpyxl" -version = "3.1.5" -description = "A Python library to read/write Excel 2010 xlsx/xlsm files" -optional = false -python-versions = ">=3.8" -files = [ - {file = "openpyxl-3.1.5-py2.py3-none-any.whl", hash = "sha256:5282c12b107bffeef825f4617dc029afaf41d0ea60823bbb665ef3079dc79de2"}, - {file = "openpyxl-3.1.5.tar.gz", hash = "sha256:cf0e3cf56142039133628b5acffe8ef0c12bc902d2aadd3e0fe5878dc08d1050"}, -] - -[package.dependencies] -et-xmlfile = "*" - -[[package]] -name = "opentelemetry-api" -version = "1.21.0" -description = "OpenTelemetry Python API" -optional = false -python-versions = ">=3.7" -files = [ - {file = "opentelemetry_api-1.21.0-py3-none-any.whl", hash = "sha256:4bb86b28627b7e41098f0e93280fe4892a1abed1b79a19aec6f928f39b17dffb"}, - {file = "opentelemetry_api-1.21.0.tar.gz", hash = "sha256:d6185fd5043e000075d921822fd2d26b953eba8ca21b1e2fa360dd46a7686316"}, -] - -[package.dependencies] -deprecated = ">=1.2.6" -importlib-metadata = ">=6.0,<7.0" - -[[package]] -name = "opentelemetry-exporter-otlp-proto-common" -version = "1.21.0" -description = "OpenTelemetry Protobuf encoding" -optional = false -python-versions = ">=3.7" -files = [ - {file = "opentelemetry_exporter_otlp_proto_common-1.21.0-py3-none-any.whl", hash = "sha256:97b1022b38270ec65d11fbfa348e0cd49d12006485c2321ea3b1b7037d42b6ec"}, - {file = "opentelemetry_exporter_otlp_proto_common-1.21.0.tar.gz", hash = "sha256:61db274d8a68d636fb2ec2a0f281922949361cdd8236e25ff5539edf942b3226"}, -] - -[package.dependencies] -backoff = {version = ">=1.10.0,<3.0.0", markers = "python_version >= \"3.7\""} -opentelemetry-proto = "1.21.0" - -[[package]] -name = "opentelemetry-exporter-otlp-proto-http" -version = "1.21.0" -description = "OpenTelemetry Collector Protobuf over HTTP Exporter" -optional = false -python-versions = ">=3.7" -files = [ - {file = "opentelemetry_exporter_otlp_proto_http-1.21.0-py3-none-any.whl", hash = "sha256:56837773de6fb2714c01fc4895caebe876f6397bbc4d16afddf89e1299a55ee2"}, - {file = "opentelemetry_exporter_otlp_proto_http-1.21.0.tar.gz", hash = "sha256:19d60afa4ae8597f7ef61ad75c8b6c6b7ef8cb73a33fb4aed4dbc86d5c8d3301"}, -] - -[package.dependencies] -backoff = {version = ">=1.10.0,<3.0.0", markers = "python_version >= \"3.7\""} -deprecated = ">=1.2.6" -googleapis-common-protos = ">=1.52,<2.0" -opentelemetry-api = ">=1.15,<2.0" -opentelemetry-exporter-otlp-proto-common = "1.21.0" -opentelemetry-proto = "1.21.0" -opentelemetry-sdk = ">=1.21.0,<1.22.0" -requests = ">=2.7,<3.0" - -[package.extras] -test = ["responses (==0.22.0)"] - -[[package]] -name = "opentelemetry-proto" -version = "1.21.0" -description = "OpenTelemetry Python Proto" -optional = false -python-versions = ">=3.7" -files = [ - {file = "opentelemetry_proto-1.21.0-py3-none-any.whl", hash = "sha256:32fc4248e83eebd80994e13963e683f25f3b443226336bb12b5b6d53638f50ba"}, - {file = "opentelemetry_proto-1.21.0.tar.gz", hash = "sha256:7d5172c29ed1b525b5ecf4ebe758c7138a9224441b3cfe683d0a237c33b1941f"}, -] - -[package.dependencies] -protobuf = ">=3.19,<5.0" - -[[package]] -name = "opentelemetry-sdk" -version = "1.21.0" -description = "OpenTelemetry Python SDK" -optional = false -python-versions = ">=3.7" -files = [ - {file = "opentelemetry_sdk-1.21.0-py3-none-any.whl", hash = "sha256:9fe633243a8c655fedace3a0b89ccdfc654c0290ea2d8e839bd5db3131186f73"}, - {file = "opentelemetry_sdk-1.21.0.tar.gz", hash = "sha256:3ec8cd3020328d6bc5c9991ccaf9ae820ccb6395a5648d9a95d3ec88275b8879"}, -] - -[package.dependencies] -opentelemetry-api = "1.21.0" -opentelemetry-semantic-conventions = "0.42b0" -typing-extensions = ">=3.7.4" - -[[package]] -name = "opentelemetry-semantic-conventions" -version = "0.42b0" -description = "OpenTelemetry Semantic Conventions" -optional = false -python-versions = ">=3.7" -files = [ - {file = "opentelemetry_semantic_conventions-0.42b0-py3-none-any.whl", hash = "sha256:5cd719cbfec448af658860796c5d0fcea2fdf0945a2bed2363f42cb1ee39f526"}, - {file = "opentelemetry_semantic_conventions-0.42b0.tar.gz", hash = "sha256:44ae67a0a3252a05072877857e5cc1242c98d4cf12870159f1a94bec800d38ec"}, -] - -[[package]] -name = "overrides" -version = "7.7.0" -description = "A decorator to automatically detect mismatch when overriding a method." -optional = false -python-versions = ">=3.6" -files = [ - {file = "overrides-7.7.0-py3-none-any.whl", hash = "sha256:c7ed9d062f78b8e4c1a7b70bd8796b35ead4d9f510227ef9c5dc7626c60d7e49"}, - {file = "overrides-7.7.0.tar.gz", hash = "sha256:55158fa3d93b98cc75299b1e67078ad9003ca27945c76162c1c0766d6f91820a"}, -] - -[[package]] -name = "packaging" -version = "24.1" -description = "Core utilities for Python packages" -optional = false -python-versions = ">=3.8" -files = [ - {file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"}, - {file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"}, -] - -[[package]] -name = "pandarallel" -version = "1.6.5" -description = "An easy to use library to speed up computation (by parallelizing on multi CPUs) with pandas." -optional = false -python-versions = ">=3.7" -files = [ - {file = "pandarallel-1.6.5.tar.gz", hash = "sha256:1c2df98ff6441e8ae13ff428ceebaa7ec42d731f7f972c41ce4fdef1d3adf640"}, -] - -[package.dependencies] -dill = ">=0.3.1" -pandas = ">=1" -psutil = "*" - -[package.extras] -dev = ["numpy", "pytest", "pytest-cov"] -doc = ["mkdocs-material"] - -[[package]] -name = "pandas" -version = "2.2.2" -description = "Powerful data structures for data analysis, time series, and statistics" -optional = false -python-versions = ">=3.9" -files = [ - {file = "pandas-2.2.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:90c6fca2acf139569e74e8781709dccb6fe25940488755716d1d354d6bc58bce"}, - {file = "pandas-2.2.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c7adfc142dac335d8c1e0dcbd37eb8617eac386596eb9e1a1b77791cf2498238"}, - {file = "pandas-2.2.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4abfe0be0d7221be4f12552995e58723c7422c80a659da13ca382697de830c08"}, - {file = "pandas-2.2.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8635c16bf3d99040fdf3ca3db669a7250ddf49c55dc4aa8fe0ae0fa8d6dcc1f0"}, - {file = "pandas-2.2.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:40ae1dffb3967a52203105a077415a86044a2bea011b5f321c6aa64b379a3f51"}, - {file = "pandas-2.2.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8e5a0b00e1e56a842f922e7fae8ae4077aee4af0acb5ae3622bd4b4c30aedf99"}, - {file = "pandas-2.2.2-cp310-cp310-win_amd64.whl", hash = "sha256:ddf818e4e6c7c6f4f7c8a12709696d193976b591cc7dc50588d3d1a6b5dc8772"}, - {file = "pandas-2.2.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:696039430f7a562b74fa45f540aca068ea85fa34c244d0deee539cb6d70aa288"}, - {file = "pandas-2.2.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8e90497254aacacbc4ea6ae5e7a8cd75629d6ad2b30025a4a8b09aa4faf55151"}, - {file = "pandas-2.2.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:58b84b91b0b9f4bafac2a0ac55002280c094dfc6402402332c0913a59654ab2b"}, - {file = "pandas-2.2.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d2123dc9ad6a814bcdea0f099885276b31b24f7edf40f6cdbc0912672e22eee"}, - {file = "pandas-2.2.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:2925720037f06e89af896c70bca73459d7e6a4be96f9de79e2d440bd499fe0db"}, - {file = "pandas-2.2.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0cace394b6ea70c01ca1595f839cf193df35d1575986e484ad35c4aeae7266c1"}, - {file = "pandas-2.2.2-cp311-cp311-win_amd64.whl", hash = "sha256:873d13d177501a28b2756375d59816c365e42ed8417b41665f346289adc68d24"}, - {file = "pandas-2.2.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:9dfde2a0ddef507a631dc9dc4af6a9489d5e2e740e226ad426a05cabfbd7c8ef"}, - {file = "pandas-2.2.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:e9b79011ff7a0f4b1d6da6a61aa1aa604fb312d6647de5bad20013682d1429ce"}, - {file = "pandas-2.2.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1cb51fe389360f3b5a4d57dbd2848a5f033350336ca3b340d1c53a1fad33bcad"}, - {file = "pandas-2.2.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eee3a87076c0756de40b05c5e9a6069c035ba43e8dd71c379e68cab2c20f16ad"}, - {file = "pandas-2.2.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3e374f59e440d4ab45ca2fffde54b81ac3834cf5ae2cdfa69c90bc03bde04d76"}, - {file = "pandas-2.2.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:43498c0bdb43d55cb162cdc8c06fac328ccb5d2eabe3cadeb3529ae6f0517c32"}, - {file = "pandas-2.2.2-cp312-cp312-win_amd64.whl", hash = "sha256:d187d355ecec3629624fccb01d104da7d7f391db0311145817525281e2804d23"}, - {file = "pandas-2.2.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0ca6377b8fca51815f382bd0b697a0814c8bda55115678cbc94c30aacbb6eff2"}, - {file = "pandas-2.2.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9057e6aa78a584bc93a13f0a9bf7e753a5e9770a30b4d758b8d5f2a62a9433cd"}, - {file = "pandas-2.2.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:001910ad31abc7bf06f49dcc903755d2f7f3a9186c0c040b827e522e9cef0863"}, - {file = "pandas-2.2.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:66b479b0bd07204e37583c191535505410daa8df638fd8e75ae1b383851fe921"}, - {file = "pandas-2.2.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a77e9d1c386196879aa5eb712e77461aaee433e54c68cf253053a73b7e49c33a"}, - {file = "pandas-2.2.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:92fd6b027924a7e178ac202cfbe25e53368db90d56872d20ffae94b96c7acc57"}, - {file = "pandas-2.2.2-cp39-cp39-win_amd64.whl", hash = "sha256:640cef9aa381b60e296db324337a554aeeb883ead99dc8f6c18e81a93942f5f4"}, - {file = "pandas-2.2.2.tar.gz", hash = "sha256:9e79019aba43cb4fda9e4d983f8e88ca0373adbb697ae9c6c43093218de28b54"}, -] - -[package.dependencies] -numpy = {version = ">=1.22.4", markers = "python_version < \"3.11\""} -python-dateutil = ">=2.8.2" -pytz = ">=2020.1" -tzdata = ">=2022.7" - -[package.extras] -all = ["PyQt5 (>=5.15.9)", "SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)", "beautifulsoup4 (>=4.11.2)", "bottleneck (>=1.3.6)", "dataframe-api-compat (>=0.1.7)", "fastparquet (>=2022.12.0)", "fsspec (>=2022.11.0)", "gcsfs (>=2022.11.0)", "html5lib (>=1.1)", "hypothesis (>=6.46.1)", "jinja2 (>=3.1.2)", "lxml (>=4.9.2)", "matplotlib (>=3.6.3)", "numba (>=0.56.4)", "numexpr (>=2.8.4)", "odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "pandas-gbq (>=0.19.0)", "psycopg2 (>=2.9.6)", "pyarrow (>=10.0.1)", "pymysql (>=1.0.2)", "pyreadstat (>=1.2.0)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "qtpy (>=2.3.0)", "s3fs (>=2022.11.0)", "scipy (>=1.10.0)", "tables (>=3.8.0)", "tabulate (>=0.9.0)", "xarray (>=2022.12.0)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)", "zstandard (>=0.19.0)"] -aws = ["s3fs (>=2022.11.0)"] -clipboard = ["PyQt5 (>=5.15.9)", "qtpy (>=2.3.0)"] -compression = ["zstandard (>=0.19.0)"] -computation = ["scipy (>=1.10.0)", "xarray (>=2022.12.0)"] -consortium-standard = ["dataframe-api-compat (>=0.1.7)"] -excel = ["odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)"] -feather = ["pyarrow (>=10.0.1)"] -fss = ["fsspec (>=2022.11.0)"] -gcp = ["gcsfs (>=2022.11.0)", "pandas-gbq (>=0.19.0)"] -hdf5 = ["tables (>=3.8.0)"] -html = ["beautifulsoup4 (>=4.11.2)", "html5lib (>=1.1)", "lxml (>=4.9.2)"] -mysql = ["SQLAlchemy (>=2.0.0)", "pymysql (>=1.0.2)"] -output-formatting = ["jinja2 (>=3.1.2)", "tabulate (>=0.9.0)"] -parquet = ["pyarrow (>=10.0.1)"] -performance = ["bottleneck (>=1.3.6)", "numba (>=0.56.4)", "numexpr (>=2.8.4)"] -plot = ["matplotlib (>=3.6.3)"] -postgresql = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "psycopg2 (>=2.9.6)"] -pyarrow = ["pyarrow (>=10.0.1)"] -spss = ["pyreadstat (>=1.2.0)"] -sql-other = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)"] -test = ["hypothesis (>=6.46.1)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)"] -xml = ["lxml (>=4.9.2)"] - -[[package]] -name = "pandocfilters" -version = "1.5.1" -description = "Utilities for writing pandoc filters in python" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -files = [ - {file = "pandocfilters-1.5.1-py2.py3-none-any.whl", hash = "sha256:93be382804a9cdb0a7267585f157e5d1731bbe5545a85b268d6f5fe6232de2bc"}, - {file = "pandocfilters-1.5.1.tar.gz", hash = "sha256:002b4a555ee4ebc03f8b66307e287fa492e4a77b4ea14d3f934328297bb4939e"}, -] - -[[package]] -name = "parso" -version = "0.8.4" -description = "A Python Parser" -optional = false -python-versions = ">=3.6" -files = [ - {file = "parso-0.8.4-py2.py3-none-any.whl", hash = "sha256:a418670a20291dacd2dddc80c377c5c3791378ee1e8d12bffc35420643d43f18"}, - {file = "parso-0.8.4.tar.gz", hash = "sha256:eb3a7b58240fb99099a345571deecc0f9540ea5f4dd2fe14c2a99d6b281ab92d"}, -] - -[package.extras] -qa = ["flake8 (==5.0.4)", "mypy (==0.971)", "types-setuptools (==67.2.0.1)"] -testing = ["docopt", "pytest"] - -[[package]] -name = "pdoc" -version = "12.3.1" -description = "API Documentation for Python Projects" -optional = false -python-versions = ">=3.7" -files = [ - {file = "pdoc-12.3.1-py3-none-any.whl", hash = "sha256:c3f24f31286e634de9c76fa6e67bd5c0c5e74360b41dc91e6b82499831eb52d8"}, - {file = "pdoc-12.3.1.tar.gz", hash = "sha256:453236f225feddb8a9071428f1982a78d74b9b3da4bc4433aedb64dbd0cc87ab"}, -] - -[package.dependencies] -Jinja2 = ">=2.11.0" -MarkupSafe = "*" -pygments = ">=2.12.0" - -[package.extras] -dev = ["black", "hypothesis", "mypy", "pytest", "pytest-cov", "pytest-timeout", "ruff", "tox", "types-pygments"] - -[[package]] -name = "pexpect" -version = "4.9.0" -description = "Pexpect allows easy control of interactive console applications." -optional = false -python-versions = "*" -files = [ - {file = "pexpect-4.9.0-py2.py3-none-any.whl", hash = "sha256:7236d1e080e4936be2dc3e326cec0af72acf9212a7e1d060210e70a47e253523"}, - {file = "pexpect-4.9.0.tar.gz", hash = "sha256:ee7d41123f3c9911050ea2c2dac107568dc43b2d3b0c7557a33212c398ead30f"}, -] - -[package.dependencies] -ptyprocess = ">=0.5" - -[[package]] -name = "platformdirs" -version = "4.3.2" -description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." -optional = false -python-versions = ">=3.8" -files = [ - {file = "platformdirs-4.3.2-py3-none-any.whl", hash = "sha256:eb1c8582560b34ed4ba105009a4badf7f6f85768b30126f351328507b2beb617"}, - {file = "platformdirs-4.3.2.tar.gz", hash = "sha256:9e5e27a08aa095dd127b9f2e764d74254f482fef22b0970773bfba79d091ab8c"}, -] - -[package.extras] -docs = ["furo (>=2024.8.6)", "proselint (>=0.14)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=8.3.2)", "pytest-cov (>=5)", "pytest-mock (>=3.14)"] -type = ["mypy (>=1.11.2)"] - -[[package]] -name = "pluggy" -version = "1.5.0" -description = "plugin and hook calling mechanisms for python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, - {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, -] - -[package.extras] -dev = ["pre-commit", "tox"] -testing = ["pytest", "pytest-benchmark"] - -[[package]] -name = "prometheus-client" -version = "0.20.0" -description = "Python client for the Prometheus monitoring system." -optional = false -python-versions = ">=3.8" -files = [ - {file = "prometheus_client-0.20.0-py3-none-any.whl", hash = "sha256:cde524a85bce83ca359cc837f28b8c0db5cac7aa653a588fd7e84ba061c329e7"}, - {file = "prometheus_client-0.20.0.tar.gz", hash = "sha256:287629d00b147a32dcb2be0b9df905da599b2d82f80377083ec8463309a4bb89"}, -] - -[package.extras] -twisted = ["twisted"] - -[[package]] -name = "prompt-toolkit" -version = "3.0.47" -description = "Library for building powerful interactive command lines in Python" -optional = false -python-versions = ">=3.7.0" -files = [ - {file = "prompt_toolkit-3.0.47-py3-none-any.whl", hash = "sha256:0d7bfa67001d5e39d02c224b663abc33687405033a8c422d0d675a5a13361d10"}, - {file = "prompt_toolkit-3.0.47.tar.gz", hash = "sha256:1e1b29cb58080b1e69f207c893a1a7bf16d127a5c30c9d17a25a5d77792e5360"}, -] - -[package.dependencies] -wcwidth = "*" - -[[package]] -name = "proto-plus" -version = "1.24.0" -description = "Beautiful, Pythonic protocol buffers." -optional = false -python-versions = ">=3.7" -files = [ - {file = "proto-plus-1.24.0.tar.gz", hash = "sha256:30b72a5ecafe4406b0d339db35b56c4059064e69227b8c3bda7462397f966445"}, - {file = "proto_plus-1.24.0-py3-none-any.whl", hash = "sha256:402576830425e5f6ce4c2a6702400ac79897dab0b4343821aa5188b0fab81a12"}, -] - -[package.dependencies] -protobuf = ">=3.19.0,<6.0.0dev" - -[package.extras] -testing = ["google-api-core (>=1.31.5)"] - -[[package]] -name = "protobuf" -version = "4.25.4" -description = "" -optional = false -python-versions = ">=3.8" -files = [ - {file = "protobuf-4.25.4-cp310-abi3-win32.whl", hash = "sha256:db9fd45183e1a67722cafa5c1da3e85c6492a5383f127c86c4c4aa4845867dc4"}, - {file = "protobuf-4.25.4-cp310-abi3-win_amd64.whl", hash = "sha256:ba3d8504116a921af46499471c63a85260c1a5fc23333154a427a310e015d26d"}, - {file = "protobuf-4.25.4-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:eecd41bfc0e4b1bd3fa7909ed93dd14dd5567b98c941d6c1ad08fdcab3d6884b"}, - {file = "protobuf-4.25.4-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:4c8a70fdcb995dcf6c8966cfa3a29101916f7225e9afe3ced4395359955d3835"}, - {file = "protobuf-4.25.4-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:3319e073562e2515c6ddc643eb92ce20809f5d8f10fead3332f71c63be6a7040"}, - {file = "protobuf-4.25.4-cp38-cp38-win32.whl", hash = "sha256:7e372cbbda66a63ebca18f8ffaa6948455dfecc4e9c1029312f6c2edcd86c4e1"}, - {file = "protobuf-4.25.4-cp38-cp38-win_amd64.whl", hash = "sha256:051e97ce9fa6067a4546e75cb14f90cf0232dcb3e3d508c448b8d0e4265b61c1"}, - {file = "protobuf-4.25.4-cp39-cp39-win32.whl", hash = "sha256:90bf6fd378494eb698805bbbe7afe6c5d12c8e17fca817a646cd6a1818c696ca"}, - {file = "protobuf-4.25.4-cp39-cp39-win_amd64.whl", hash = "sha256:ac79a48d6b99dfed2729ccccee547b34a1d3d63289c71cef056653a846a2240f"}, - {file = "protobuf-4.25.4-py3-none-any.whl", hash = "sha256:bfbebc1c8e4793cfd58589acfb8a1026be0003e852b9da7db5a4285bde996978"}, - {file = "protobuf-4.25.4.tar.gz", hash = "sha256:0dc4a62cc4052a036ee2204d26fe4d835c62827c855c8a03f29fe6da146b380d"}, -] - -[[package]] -name = "psutil" -version = "5.9.8" -description = "Cross-platform lib for process and system monitoring in Python." -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" -files = [ - {file = "psutil-5.9.8-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:26bd09967ae00920df88e0352a91cff1a78f8d69b3ecabbfe733610c0af486c8"}, - {file = "psutil-5.9.8-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:05806de88103b25903dff19bb6692bd2e714ccf9e668d050d144012055cbca73"}, - {file = "psutil-5.9.8-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:611052c4bc70432ec770d5d54f64206aa7203a101ec273a0cd82418c86503bb7"}, - {file = "psutil-5.9.8-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:50187900d73c1381ba1454cf40308c2bf6f34268518b3f36a9b663ca87e65e36"}, - {file = "psutil-5.9.8-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:02615ed8c5ea222323408ceba16c60e99c3f91639b07da6373fb7e6539abc56d"}, - {file = "psutil-5.9.8-cp27-none-win32.whl", hash = "sha256:36f435891adb138ed3c9e58c6af3e2e6ca9ac2f365efe1f9cfef2794e6c93b4e"}, - {file = "psutil-5.9.8-cp27-none-win_amd64.whl", hash = "sha256:bd1184ceb3f87651a67b2708d4c3338e9b10c5df903f2e3776b62303b26cb631"}, - {file = "psutil-5.9.8-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:aee678c8720623dc456fa20659af736241f575d79429a0e5e9cf88ae0605cc81"}, - {file = "psutil-5.9.8-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8cb6403ce6d8e047495a701dc7c5bd788add903f8986d523e3e20b98b733e421"}, - {file = "psutil-5.9.8-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d06016f7f8625a1825ba3732081d77c94589dca78b7a3fc072194851e88461a4"}, - {file = "psutil-5.9.8-cp36-cp36m-win32.whl", hash = "sha256:7d79560ad97af658a0f6adfef8b834b53f64746d45b403f225b85c5c2c140eee"}, - {file = "psutil-5.9.8-cp36-cp36m-win_amd64.whl", hash = "sha256:27cc40c3493bb10de1be4b3f07cae4c010ce715290a5be22b98493509c6299e2"}, - {file = "psutil-5.9.8-cp37-abi3-win32.whl", hash = "sha256:bc56c2a1b0d15aa3eaa5a60c9f3f8e3e565303b465dbf57a1b730e7a2b9844e0"}, - {file = "psutil-5.9.8-cp37-abi3-win_amd64.whl", hash = "sha256:8db4c1b57507eef143a15a6884ca10f7c73876cdf5d51e713151c1236a0e68cf"}, - {file = "psutil-5.9.8-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:d16bbddf0693323b8c6123dd804100241da461e41d6e332fb0ba6058f630f8c8"}, - {file = "psutil-5.9.8.tar.gz", hash = "sha256:6be126e3225486dff286a8fb9a06246a5253f4c7c53b475ea5f5ac934e64194c"}, -] - -[package.extras] -test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"] - -[[package]] -name = "ptyprocess" -version = "0.7.0" -description = "Run a subprocess in a pseudo terminal" -optional = false -python-versions = "*" -files = [ - {file = "ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35"}, - {file = "ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220"}, -] - -[[package]] -name = "pure-eval" -version = "0.2.3" -description = "Safely evaluate AST nodes without side effects" -optional = false -python-versions = "*" -files = [ - {file = "pure_eval-0.2.3-py3-none-any.whl", hash = "sha256:1db8e35b67b3d218d818ae653e27f06c3aa420901fa7b081ca98cbedc874e0d0"}, - {file = "pure_eval-0.2.3.tar.gz", hash = "sha256:5f4e983f40564c576c7c8635ae88db5956bb2229d7e9237d03b3c0b0190eaf42"}, -] - -[package.extras] -tests = ["pytest"] - -[[package]] -name = "py" -version = "1.11.0" -description = "library with cross-python path, ini-parsing, io, code, log facilities" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -files = [ - {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, - {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, -] - -[[package]] -name = "pyasn1" -version = "0.6.1" -description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pyasn1-0.6.1-py3-none-any.whl", hash = "sha256:0d632f46f2ba09143da3a8afe9e33fb6f92fa2320ab7e886e2d0f7672af84629"}, - {file = "pyasn1-0.6.1.tar.gz", hash = "sha256:6f580d2bdd84365380830acf45550f2511469f673cb4a5ae3857a3170128b034"}, -] - -[[package]] -name = "pyasn1-modules" -version = "0.4.1" -description = "A collection of ASN.1-based protocols modules" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pyasn1_modules-0.4.1-py3-none-any.whl", hash = "sha256:49bfa96b45a292b711e986f222502c1c9a5e1f4e568fc30e2574a6c7d07838fd"}, - {file = "pyasn1_modules-0.4.1.tar.gz", hash = "sha256:c28e2dbf9c06ad61c71a075c7e0f9fd0f1b0bb2d2ad4377f240d33ac2ab60a7c"}, -] - -[package.dependencies] -pyasn1 = ">=0.4.6,<0.7.0" - -[[package]] -name = "pycparser" -version = "2.22" -description = "C parser in Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, - {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, -] - -[[package]] -name = "pydantic" -version = "1.10.18" -description = "Data validation and settings management using python type hints" -optional = false -python-versions = ">=3.7" -files = [ - {file = "pydantic-1.10.18-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e405ffcc1254d76bb0e760db101ee8916b620893e6edfbfee563b3c6f7a67c02"}, - {file = "pydantic-1.10.18-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e306e280ebebc65040034bff1a0a81fd86b2f4f05daac0131f29541cafd80b80"}, - {file = "pydantic-1.10.18-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11d9d9b87b50338b1b7de4ebf34fd29fdb0d219dc07ade29effc74d3d2609c62"}, - {file = "pydantic-1.10.18-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b661ce52c7b5e5f600c0c3c5839e71918346af2ef20062705ae76b5c16914cab"}, - {file = "pydantic-1.10.18-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:c20f682defc9ef81cd7eaa485879ab29a86a0ba58acf669a78ed868e72bb89e0"}, - {file = "pydantic-1.10.18-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c5ae6b7c8483b1e0bf59e5f1843e4fd8fd405e11df7de217ee65b98eb5462861"}, - {file = "pydantic-1.10.18-cp310-cp310-win_amd64.whl", hash = "sha256:74fe19dda960b193b0eb82c1f4d2c8e5e26918d9cda858cbf3f41dd28549cb70"}, - {file = "pydantic-1.10.18-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:72fa46abace0a7743cc697dbb830a41ee84c9db8456e8d77a46d79b537efd7ec"}, - {file = "pydantic-1.10.18-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ef0fe7ad7cbdb5f372463d42e6ed4ca9c443a52ce544472d8842a0576d830da5"}, - {file = "pydantic-1.10.18-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a00e63104346145389b8e8f500bc6a241e729feaf0559b88b8aa513dd2065481"}, - {file = "pydantic-1.10.18-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae6fa2008e1443c46b7b3a5eb03800121868d5ab6bc7cda20b5df3e133cde8b3"}, - {file = "pydantic-1.10.18-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:9f463abafdc92635da4b38807f5b9972276be7c8c5121989768549fceb8d2588"}, - {file = "pydantic-1.10.18-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3445426da503c7e40baccefb2b2989a0c5ce6b163679dd75f55493b460f05a8f"}, - {file = "pydantic-1.10.18-cp311-cp311-win_amd64.whl", hash = "sha256:467a14ee2183bc9c902579bb2f04c3d3dac00eff52e252850509a562255b2a33"}, - {file = "pydantic-1.10.18-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:efbc8a7f9cb5fe26122acba1852d8dcd1e125e723727c59dcd244da7bdaa54f2"}, - {file = "pydantic-1.10.18-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:24a4a159d0f7a8e26bf6463b0d3d60871d6a52eac5bb6a07a7df85c806f4c048"}, - {file = "pydantic-1.10.18-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b74be007703547dc52e3c37344d130a7bfacca7df112a9e5ceeb840a9ce195c7"}, - {file = "pydantic-1.10.18-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fcb20d4cb355195c75000a49bb4a31d75e4295200df620f454bbc6bdf60ca890"}, - {file = "pydantic-1.10.18-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:46f379b8cb8a3585e3f61bf9ae7d606c70d133943f339d38b76e041ec234953f"}, - {file = "pydantic-1.10.18-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:cbfbca662ed3729204090c4d09ee4beeecc1a7ecba5a159a94b5a4eb24e3759a"}, - {file = "pydantic-1.10.18-cp312-cp312-win_amd64.whl", hash = "sha256:c6d0a9f9eccaf7f438671a64acf654ef0d045466e63f9f68a579e2383b63f357"}, - {file = "pydantic-1.10.18-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3d5492dbf953d7d849751917e3b2433fb26010d977aa7a0765c37425a4026ff1"}, - {file = "pydantic-1.10.18-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe734914977eed33033b70bfc097e1baaffb589517863955430bf2e0846ac30f"}, - {file = "pydantic-1.10.18-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:15fdbe568beaca9aacfccd5ceadfb5f1a235087a127e8af5e48df9d8a45ae85c"}, - {file = "pydantic-1.10.18-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c3e742f62198c9eb9201781fbebe64533a3bbf6a76a91b8d438d62b813079dbc"}, - {file = "pydantic-1.10.18-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:19a3bd00b9dafc2cd7250d94d5b578edf7a0bd7daf102617153ff9a8fa37871c"}, - {file = "pydantic-1.10.18-cp37-cp37m-win_amd64.whl", hash = "sha256:2ce3fcf75b2bae99aa31bd4968de0474ebe8c8258a0110903478bd83dfee4e3b"}, - {file = "pydantic-1.10.18-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:335a32d72c51a313b33fa3a9b0fe283503272ef6467910338e123f90925f0f03"}, - {file = "pydantic-1.10.18-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:34a3613c7edb8c6fa578e58e9abe3c0f5e7430e0fc34a65a415a1683b9c32d9a"}, - {file = "pydantic-1.10.18-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e9ee4e6ca1d9616797fa2e9c0bfb8815912c7d67aca96f77428e316741082a1b"}, - {file = "pydantic-1.10.18-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:23e8ec1ce4e57b4f441fc91e3c12adba023fedd06868445a5b5f1d48f0ab3682"}, - {file = "pydantic-1.10.18-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:44ae8a3e35a54d2e8fa88ed65e1b08967a9ef8c320819a969bfa09ce5528fafe"}, - {file = "pydantic-1.10.18-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d5389eb3b48a72da28c6e061a247ab224381435256eb541e175798483368fdd3"}, - {file = "pydantic-1.10.18-cp38-cp38-win_amd64.whl", hash = "sha256:069b9c9fc645474d5ea3653788b544a9e0ccd3dca3ad8c900c4c6eac844b4620"}, - {file = "pydantic-1.10.18-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:80b982d42515632eb51f60fa1d217dfe0729f008e81a82d1544cc392e0a50ddf"}, - {file = "pydantic-1.10.18-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:aad8771ec8dbf9139b01b56f66386537c6fe4e76c8f7a47c10261b69ad25c2c9"}, - {file = "pydantic-1.10.18-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:941a2eb0a1509bd7f31e355912eb33b698eb0051730b2eaf9e70e2e1589cae1d"}, - {file = "pydantic-1.10.18-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:65f7361a09b07915a98efd17fdec23103307a54db2000bb92095457ca758d485"}, - {file = "pydantic-1.10.18-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:6951f3f47cb5ca4da536ab161ac0163cab31417d20c54c6de5ddcab8bc813c3f"}, - {file = "pydantic-1.10.18-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7a4c5eec138a9b52c67f664c7d51d4c7234c5ad65dd8aacd919fb47445a62c86"}, - {file = "pydantic-1.10.18-cp39-cp39-win_amd64.whl", hash = "sha256:49e26c51ca854286bffc22b69787a8d4063a62bf7d83dc21d44d2ff426108518"}, - {file = "pydantic-1.10.18-py3-none-any.whl", hash = "sha256:06a189b81ffc52746ec9c8c007f16e5167c8b0a696e1a726369327e3db7b2a82"}, - {file = "pydantic-1.10.18.tar.gz", hash = "sha256:baebdff1907d1d96a139c25136a9bb7d17e118f133a76a2ef3b845e831e3403a"}, -] - -[package.dependencies] -typing-extensions = ">=4.2.0" - -[package.extras] -dotenv = ["python-dotenv (>=0.10.4)"] -email = ["email-validator (>=1.0.3)"] - -[[package]] -name = "pygments" -version = "2.18.0" -description = "Pygments is a syntax highlighting package written in Python." -optional = false -python-versions = ">=3.8" -files = [ - {file = "pygments-2.18.0-py3-none-any.whl", hash = "sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a"}, - {file = "pygments-2.18.0.tar.gz", hash = "sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199"}, -] - -[package.extras] -windows-terminal = ["colorama (>=0.4.6)"] - -[[package]] -name = "pygsheets" -version = "2.0.6" -description = "Google Spreadsheets Python API v4" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -files = [ - {file = "pygsheets-2.0.6-py3-none-any.whl", hash = "sha256:3338c2eb8990fdee9f463b42a370ec0870c118d607d775471a6dfb8b08f6cd87"}, - {file = "pygsheets-2.0.6.tar.gz", hash = "sha256:bff46c812e99f9b8b81a09b456581365281c797620ec08530b0d0e48fa9299e2"}, -] - -[package.dependencies] -google-api-python-client = ">=2.50.0" -google-auth-oauthlib = ">=0.7.1" - -[package.extras] -pandas = ["pandas (>=0.14.0)"] - -[[package]] -name = "pylint" -version = "3.2.7" -description = "python code static checker" -optional = false -python-versions = ">=3.8.0" -files = [ - {file = "pylint-3.2.7-py3-none-any.whl", hash = "sha256:02f4aedeac91be69fb3b4bea997ce580a4ac68ce58b89eaefeaf06749df73f4b"}, - {file = "pylint-3.2.7.tar.gz", hash = "sha256:1b7a721b575eaeaa7d39db076b6e7743c993ea44f57979127c517c6c572c803e"}, -] - -[package.dependencies] -astroid = ">=3.2.4,<=3.3.0-dev0" -colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} -dill = {version = ">=0.2", markers = "python_version < \"3.11\""} -isort = ">=4.2.5,<5.13.0 || >5.13.0,<6" -mccabe = ">=0.6,<0.8" -platformdirs = ">=2.2.0" -tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} -tomlkit = ">=0.10.1" - -[package.extras] -spelling = ["pyenchant (>=3.2,<4.0)"] -testutils = ["gitpython (>3)"] - -[[package]] -name = "pyparsing" -version = "3.1.4" -description = "pyparsing module - Classes and methods to define and execute parsing grammars" -optional = false -python-versions = ">=3.6.8" -files = [ - {file = "pyparsing-3.1.4-py3-none-any.whl", hash = "sha256:a6a7ee4235a3f944aa1fa2249307708f893fe5717dc603503c6c7969c070fb7c"}, - {file = "pyparsing-3.1.4.tar.gz", hash = "sha256:f86ec8d1a83f11977c9a6ea7598e8c27fc5cddfa5b07ea2241edbbde1d7bc032"}, -] - -[package.extras] -diagrams = ["jinja2", "railroad-diagrams"] - -[[package]] -name = "pyproject-api" -version = "1.7.1" -description = "API to interact with the python pyproject.toml based projects" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pyproject_api-1.7.1-py3-none-any.whl", hash = "sha256:2dc1654062c2b27733d8fd4cdda672b22fe8741ef1dde8e3a998a9547b071eeb"}, - {file = "pyproject_api-1.7.1.tar.gz", hash = "sha256:7ebc6cd10710f89f4cf2a2731710a98abce37ebff19427116ff2174c9236a827"}, -] - -[package.dependencies] -packaging = ">=24.1" -tomli = {version = ">=2.0.1", markers = "python_version < \"3.11\""} - -[package.extras] -docs = ["furo (>=2024.5.6)", "sphinx-autodoc-typehints (>=2.2.1)"] -testing = ["covdefaults (>=2.3)", "pytest (>=8.2.2)", "pytest-cov (>=5)", "pytest-mock (>=3.14)", "setuptools (>=70.1)"] - -[[package]] -name = "pytest" -version = "7.2.0" -description = "pytest: simple powerful testing with Python" -optional = false -python-versions = ">=3.7" -files = [ - {file = "pytest-7.2.0-py3-none-any.whl", hash = "sha256:892f933d339f068883b6fd5a459f03d85bfcb355e4981e146d2c7616c21fef71"}, - {file = "pytest-7.2.0.tar.gz", hash = "sha256:c4014eb40e10f11f355ad4e3c2fb2c6c6d1919c73f3b5a433de4708202cade59"}, -] - -[package.dependencies] -attrs = ">=19.2.0" -colorama = {version = "*", markers = "sys_platform == \"win32\""} -exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} -iniconfig = "*" -packaging = "*" -pluggy = ">=0.12,<2.0" -tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} - -[package.extras] -testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "xmlschema"] - -[[package]] -name = "pytest-cov" -version = "4.0.0" -description = "Pytest plugin for measuring coverage." -optional = false -python-versions = ">=3.6" -files = [ - {file = "pytest-cov-4.0.0.tar.gz", hash = "sha256:996b79efde6433cdbd0088872dbc5fb3ed7fe1578b68cdbba634f14bb8dd0470"}, - {file = "pytest_cov-4.0.0-py3-none-any.whl", hash = "sha256:2feb1b751d66a8bd934e5edfa2e961d11309dc37b73b0eabe73b5945fee20f6b"}, -] - -[package.dependencies] -coverage = {version = ">=5.2.1", extras = ["toml"]} -pytest = ">=4.6" - -[package.extras] -testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"] - -[[package]] -name = "pytest-randomly" -version = "3.12.0" -description = "Pytest plugin to randomly order tests and control random.seed." -optional = false -python-versions = ">=3.7" -files = [ - {file = "pytest-randomly-3.12.0.tar.gz", hash = "sha256:d60c2db71ac319aee0fc6c4110a7597d611a8b94a5590918bfa8583f00caccb2"}, - {file = "pytest_randomly-3.12.0-py3-none-any.whl", hash = "sha256:f4f2e803daf5d1ba036cc22bf4fe9dbbf99389ec56b00e5cba732fb5c1d07fdd"}, -] - -[package.dependencies] -pytest = "*" - -[[package]] -name = "python-dateutil" -version = "2.8.2" -description = "Extensions to the standard Python datetime module" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" -files = [ - {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, - {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, -] - -[package.dependencies] -six = ">=1.5" - -[[package]] -name = "python-dotenv" -version = "1.0.1" -description = "Read key-value pairs from a .env file and set them as environment variables" -optional = false -python-versions = ">=3.8" -files = [ - {file = "python-dotenv-1.0.1.tar.gz", hash = "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca"}, - {file = "python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a"}, -] - -[package.extras] -cli = ["click (>=5.0)"] - -[[package]] -name = "python-json-logger" -version = "2.0.7" -description = "A python library adding a json log formatter" -optional = false -python-versions = ">=3.6" -files = [ - {file = "python-json-logger-2.0.7.tar.gz", hash = "sha256:23e7ec02d34237c5aa1e29a070193a4ea87583bb4e7f8fd06d3de8264c4b2e1c"}, - {file = "python_json_logger-2.0.7-py3-none-any.whl", hash = "sha256:f380b826a991ebbe3de4d897aeec42760035ac760345e57b812938dc8b35e2bd"}, -] - -[[package]] -name = "pytz" -version = "2024.2" -description = "World timezone definitions, modern and historical" -optional = false -python-versions = "*" -files = [ - {file = "pytz-2024.2-py2.py3-none-any.whl", hash = "sha256:31c7c1817eb7fae7ca4b8c7ee50c72f93aa2dd863de768e1ef4245d426aa0725"}, - {file = "pytz-2024.2.tar.gz", hash = "sha256:2aa355083c50a0f93fa581709deac0c9ad65cca8a9e9beac660adcbd493c798a"}, -] - -[[package]] -name = "pywin32" -version = "306" -description = "Python for Window Extensions" -optional = false -python-versions = "*" -files = [ - {file = "pywin32-306-cp310-cp310-win32.whl", hash = "sha256:06d3420a5155ba65f0b72f2699b5bacf3109f36acbe8923765c22938a69dfc8d"}, - {file = "pywin32-306-cp310-cp310-win_amd64.whl", hash = "sha256:84f4471dbca1887ea3803d8848a1616429ac94a4a8d05f4bc9c5dcfd42ca99c8"}, - {file = "pywin32-306-cp311-cp311-win32.whl", hash = "sha256:e65028133d15b64d2ed8f06dd9fbc268352478d4f9289e69c190ecd6818b6407"}, - {file = "pywin32-306-cp311-cp311-win_amd64.whl", hash = "sha256:a7639f51c184c0272e93f244eb24dafca9b1855707d94c192d4a0b4c01e1100e"}, - {file = "pywin32-306-cp311-cp311-win_arm64.whl", hash = "sha256:70dba0c913d19f942a2db25217d9a1b726c278f483a919f1abfed79c9cf64d3a"}, - {file = "pywin32-306-cp312-cp312-win32.whl", hash = "sha256:383229d515657f4e3ed1343da8be101000562bf514591ff383ae940cad65458b"}, - {file = "pywin32-306-cp312-cp312-win_amd64.whl", hash = "sha256:37257794c1ad39ee9be652da0462dc2e394c8159dfd913a8a4e8eb6fd346da0e"}, - {file = "pywin32-306-cp312-cp312-win_arm64.whl", hash = "sha256:5821ec52f6d321aa59e2db7e0a35b997de60c201943557d108af9d4ae1ec7040"}, - {file = "pywin32-306-cp37-cp37m-win32.whl", hash = "sha256:1c73ea9a0d2283d889001998059f5eaaba3b6238f767c9cf2833b13e6a685f65"}, - {file = "pywin32-306-cp37-cp37m-win_amd64.whl", hash = "sha256:72c5f621542d7bdd4fdb716227be0dd3f8565c11b280be6315b06ace35487d36"}, - {file = "pywin32-306-cp38-cp38-win32.whl", hash = "sha256:e4c092e2589b5cf0d365849e73e02c391c1349958c5ac3e9d5ccb9a28e017b3a"}, - {file = "pywin32-306-cp38-cp38-win_amd64.whl", hash = "sha256:e8ac1ae3601bee6ca9f7cb4b5363bf1c0badb935ef243c4733ff9a393b1690c0"}, - {file = "pywin32-306-cp39-cp39-win32.whl", hash = "sha256:e25fd5b485b55ac9c057f67d94bc203f3f6595078d1fb3b458c9c28b7153a802"}, - {file = "pywin32-306-cp39-cp39-win_amd64.whl", hash = "sha256:39b61c15272833b5c329a2989999dcae836b1eed650252ab1b7bfbe1d59f30f4"}, -] - -[[package]] -name = "pywinpty" -version = "2.0.13" -description = "Pseudo terminal support for Windows from Python." -optional = false -python-versions = ">=3.8" -files = [ - {file = "pywinpty-2.0.13-cp310-none-win_amd64.whl", hash = "sha256:697bff211fb5a6508fee2dc6ff174ce03f34a9a233df9d8b5fe9c8ce4d5eaf56"}, - {file = "pywinpty-2.0.13-cp311-none-win_amd64.whl", hash = "sha256:b96fb14698db1284db84ca38c79f15b4cfdc3172065b5137383910567591fa99"}, - {file = "pywinpty-2.0.13-cp312-none-win_amd64.whl", hash = "sha256:2fd876b82ca750bb1333236ce98488c1be96b08f4f7647cfdf4129dfad83c2d4"}, - {file = "pywinpty-2.0.13-cp38-none-win_amd64.whl", hash = "sha256:61d420c2116c0212808d31625611b51caf621fe67f8a6377e2e8b617ea1c1f7d"}, - {file = "pywinpty-2.0.13-cp39-none-win_amd64.whl", hash = "sha256:71cb613a9ee24174730ac7ae439fd179ca34ccb8c5349e8d7b72ab5dea2c6f4b"}, - {file = "pywinpty-2.0.13.tar.gz", hash = "sha256:c34e32351a3313ddd0d7da23d27f835c860d32fe4ac814d372a3ea9594f41dde"}, -] - -[[package]] -name = "pyyaml" -version = "6.0.2" -description = "YAML parser and emitter for Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, - {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, - {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237"}, - {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b"}, - {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed"}, - {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180"}, - {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68"}, - {file = "PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99"}, - {file = "PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e"}, - {file = "PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774"}, - {file = "PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee"}, - {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c"}, - {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317"}, - {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85"}, - {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4"}, - {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e"}, - {file = "PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5"}, - {file = "PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44"}, - {file = "PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab"}, - {file = "PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725"}, - {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5"}, - {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425"}, - {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476"}, - {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48"}, - {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b"}, - {file = "PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4"}, - {file = "PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8"}, - {file = "PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba"}, - {file = "PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1"}, - {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133"}, - {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484"}, - {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5"}, - {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc"}, - {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652"}, - {file = "PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183"}, - {file = "PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563"}, - {file = "PyYAML-6.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:24471b829b3bf607e04e88d79542a9d48bb037c2267d7927a874e6c205ca7e9a"}, - {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7fded462629cfa4b685c5416b949ebad6cec74af5e2d42905d41e257e0869f5"}, - {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d84a1718ee396f54f3a086ea0a66d8e552b2ab2017ef8b420e92edbc841c352d"}, - {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9056c1ecd25795207ad294bcf39f2db3d845767be0ea6e6a34d856f006006083"}, - {file = "PyYAML-6.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:82d09873e40955485746739bcb8b4586983670466c23382c19cffecbf1fd8706"}, - {file = "PyYAML-6.0.2-cp38-cp38-win32.whl", hash = "sha256:43fa96a3ca0d6b1812e01ced1044a003533c47f6ee8aca31724f78e93ccc089a"}, - {file = "PyYAML-6.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff"}, - {file = "PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d"}, - {file = "PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f"}, - {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290"}, - {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12"}, - {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19"}, - {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e"}, - {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725"}, - {file = "PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631"}, - {file = "PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8"}, - {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"}, -] - -[[package]] -name = "pyzmq" -version = "26.2.0" -description = "Python bindings for 0MQ" -optional = false -python-versions = ">=3.7" -files = [ - {file = "pyzmq-26.2.0-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:ddf33d97d2f52d89f6e6e7ae66ee35a4d9ca6f36eda89c24591b0c40205a3629"}, - {file = "pyzmq-26.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dacd995031a01d16eec825bf30802fceb2c3791ef24bcce48fa98ce40918c27b"}, - {file = "pyzmq-26.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:89289a5ee32ef6c439086184529ae060c741334b8970a6855ec0b6ad3ff28764"}, - {file = "pyzmq-26.2.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5506f06d7dc6ecf1efacb4a013b1f05071bb24b76350832c96449f4a2d95091c"}, - {file = "pyzmq-26.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8ea039387c10202ce304af74def5021e9adc6297067f3441d348d2b633e8166a"}, - {file = "pyzmq-26.2.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:a2224fa4a4c2ee872886ed00a571f5e967c85e078e8e8c2530a2fb01b3309b88"}, - {file = "pyzmq-26.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:28ad5233e9c3b52d76196c696e362508959741e1a005fb8fa03b51aea156088f"}, - {file = "pyzmq-26.2.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:1c17211bc037c7d88e85ed8b7d8f7e52db6dc8eca5590d162717c654550f7282"}, - {file = "pyzmq-26.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b8f86dd868d41bea9a5f873ee13bf5551c94cf6bc51baebc6f85075971fe6eea"}, - {file = "pyzmq-26.2.0-cp310-cp310-win32.whl", hash = "sha256:46a446c212e58456b23af260f3d9fb785054f3e3653dbf7279d8f2b5546b21c2"}, - {file = "pyzmq-26.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:49d34ab71db5a9c292a7644ce74190b1dd5a3475612eefb1f8be1d6961441971"}, - {file = "pyzmq-26.2.0-cp310-cp310-win_arm64.whl", hash = "sha256:bfa832bfa540e5b5c27dcf5de5d82ebc431b82c453a43d141afb1e5d2de025fa"}, - {file = "pyzmq-26.2.0-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:8f7e66c7113c684c2b3f1c83cdd3376103ee0ce4c49ff80a648643e57fb22218"}, - {file = "pyzmq-26.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3a495b30fc91db2db25120df5847d9833af237546fd59170701acd816ccc01c4"}, - {file = "pyzmq-26.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77eb0968da535cba0470a5165468b2cac7772cfb569977cff92e240f57e31bef"}, - {file = "pyzmq-26.2.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ace4f71f1900a548f48407fc9be59c6ba9d9aaf658c2eea6cf2779e72f9f317"}, - {file = "pyzmq-26.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:92a78853d7280bffb93df0a4a6a2498cba10ee793cc8076ef797ef2f74d107cf"}, - {file = "pyzmq-26.2.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:689c5d781014956a4a6de61d74ba97b23547e431e9e7d64f27d4922ba96e9d6e"}, - {file = "pyzmq-26.2.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0aca98bc423eb7d153214b2df397c6421ba6373d3397b26c057af3c904452e37"}, - {file = "pyzmq-26.2.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:1f3496d76b89d9429a656293744ceca4d2ac2a10ae59b84c1da9b5165f429ad3"}, - {file = "pyzmq-26.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5c2b3bfd4b9689919db068ac6c9911f3fcb231c39f7dd30e3138be94896d18e6"}, - {file = "pyzmq-26.2.0-cp311-cp311-win32.whl", hash = "sha256:eac5174677da084abf378739dbf4ad245661635f1600edd1221f150b165343f4"}, - {file = "pyzmq-26.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:5a509df7d0a83a4b178d0f937ef14286659225ef4e8812e05580776c70e155d5"}, - {file = "pyzmq-26.2.0-cp311-cp311-win_arm64.whl", hash = "sha256:c0e6091b157d48cbe37bd67233318dbb53e1e6327d6fc3bb284afd585d141003"}, - {file = "pyzmq-26.2.0-cp312-cp312-macosx_10_15_universal2.whl", hash = "sha256:ded0fc7d90fe93ae0b18059930086c51e640cdd3baebdc783a695c77f123dcd9"}, - {file = "pyzmq-26.2.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:17bf5a931c7f6618023cdacc7081f3f266aecb68ca692adac015c383a134ca52"}, - {file = "pyzmq-26.2.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:55cf66647e49d4621a7e20c8d13511ef1fe1efbbccf670811864452487007e08"}, - {file = "pyzmq-26.2.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4661c88db4a9e0f958c8abc2b97472e23061f0bc737f6f6179d7a27024e1faa5"}, - {file = "pyzmq-26.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ea7f69de383cb47522c9c208aec6dd17697db7875a4674c4af3f8cfdac0bdeae"}, - {file = "pyzmq-26.2.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:7f98f6dfa8b8ccaf39163ce872bddacca38f6a67289116c8937a02e30bbe9711"}, - {file = "pyzmq-26.2.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e3e0210287329272539eea617830a6a28161fbbd8a3271bf4150ae3e58c5d0e6"}, - {file = "pyzmq-26.2.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:6b274e0762c33c7471f1a7471d1a2085b1a35eba5cdc48d2ae319f28b6fc4de3"}, - {file = "pyzmq-26.2.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:29c6a4635eef69d68a00321e12a7d2559fe2dfccfa8efae3ffb8e91cd0b36a8b"}, - {file = "pyzmq-26.2.0-cp312-cp312-win32.whl", hash = "sha256:989d842dc06dc59feea09e58c74ca3e1678c812a4a8a2a419046d711031f69c7"}, - {file = "pyzmq-26.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:2a50625acdc7801bc6f74698c5c583a491c61d73c6b7ea4dee3901bb99adb27a"}, - {file = "pyzmq-26.2.0-cp312-cp312-win_arm64.whl", hash = "sha256:4d29ab8592b6ad12ebbf92ac2ed2bedcfd1cec192d8e559e2e099f648570e19b"}, - {file = "pyzmq-26.2.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:9dd8cd1aeb00775f527ec60022004d030ddc51d783d056e3e23e74e623e33726"}, - {file = "pyzmq-26.2.0-cp313-cp313-macosx_10_15_universal2.whl", hash = "sha256:28c812d9757fe8acecc910c9ac9dafd2ce968c00f9e619db09e9f8f54c3a68a3"}, - {file = "pyzmq-26.2.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4d80b1dd99c1942f74ed608ddb38b181b87476c6a966a88a950c7dee118fdf50"}, - {file = "pyzmq-26.2.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8c997098cc65e3208eca09303630e84d42718620e83b733d0fd69543a9cab9cb"}, - {file = "pyzmq-26.2.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ad1bc8d1b7a18497dda9600b12dc193c577beb391beae5cd2349184db40f187"}, - {file = "pyzmq-26.2.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:bea2acdd8ea4275e1278350ced63da0b166421928276c7c8e3f9729d7402a57b"}, - {file = "pyzmq-26.2.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:23f4aad749d13698f3f7b64aad34f5fc02d6f20f05999eebc96b89b01262fb18"}, - {file = "pyzmq-26.2.0-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:a4f96f0d88accc3dbe4a9025f785ba830f968e21e3e2c6321ccdfc9aef755115"}, - {file = "pyzmq-26.2.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ced65e5a985398827cc9276b93ef6dfabe0273c23de8c7931339d7e141c2818e"}, - {file = "pyzmq-26.2.0-cp313-cp313-win32.whl", hash = "sha256:31507f7b47cc1ead1f6e86927f8ebb196a0bab043f6345ce070f412a59bf87b5"}, - {file = "pyzmq-26.2.0-cp313-cp313-win_amd64.whl", hash = "sha256:70fc7fcf0410d16ebdda9b26cbd8bf8d803d220a7f3522e060a69a9c87bf7bad"}, - {file = "pyzmq-26.2.0-cp313-cp313-win_arm64.whl", hash = "sha256:c3789bd5768ab5618ebf09cef6ec2b35fed88709b104351748a63045f0ff9797"}, - {file = "pyzmq-26.2.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:034da5fc55d9f8da09015d368f519478a52675e558c989bfcb5cf6d4e16a7d2a"}, - {file = "pyzmq-26.2.0-cp313-cp313t-macosx_10_15_universal2.whl", hash = "sha256:c92d73464b886931308ccc45b2744e5968cbaade0b1d6aeb40d8ab537765f5bc"}, - {file = "pyzmq-26.2.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:794a4562dcb374f7dbbfb3f51d28fb40123b5a2abadee7b4091f93054909add5"}, - {file = "pyzmq-26.2.0-cp313-cp313t-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aee22939bb6075e7afededabad1a56a905da0b3c4e3e0c45e75810ebe3a52672"}, - {file = "pyzmq-26.2.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ae90ff9dad33a1cfe947d2c40cb9cb5e600d759ac4f0fd22616ce6540f72797"}, - {file = "pyzmq-26.2.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:43a47408ac52647dfabbc66a25b05b6a61700b5165807e3fbd40063fcaf46386"}, - {file = "pyzmq-26.2.0-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:25bf2374a2a8433633c65ccb9553350d5e17e60c8eb4de4d92cc6bd60f01d306"}, - {file = "pyzmq-26.2.0-cp313-cp313t-musllinux_1_1_i686.whl", hash = "sha256:007137c9ac9ad5ea21e6ad97d3489af654381324d5d3ba614c323f60dab8fae6"}, - {file = "pyzmq-26.2.0-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:470d4a4f6d48fb34e92d768b4e8a5cc3780db0d69107abf1cd7ff734b9766eb0"}, - {file = "pyzmq-26.2.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3b55a4229ce5da9497dd0452b914556ae58e96a4381bb6f59f1305dfd7e53fc8"}, - {file = "pyzmq-26.2.0-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:9cb3a6460cdea8fe8194a76de8895707e61ded10ad0be97188cc8463ffa7e3a8"}, - {file = "pyzmq-26.2.0-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:8ab5cad923cc95c87bffee098a27856c859bd5d0af31bd346035aa816b081fe1"}, - {file = "pyzmq-26.2.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9ed69074a610fad1c2fda66180e7b2edd4d31c53f2d1872bc2d1211563904cd9"}, - {file = "pyzmq-26.2.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:cccba051221b916a4f5e538997c45d7d136a5646442b1231b916d0164067ea27"}, - {file = "pyzmq-26.2.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:0eaa83fc4c1e271c24eaf8fb083cbccef8fde77ec8cd45f3c35a9a123e6da097"}, - {file = "pyzmq-26.2.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:9edda2df81daa129b25a39b86cb57dfdfe16f7ec15b42b19bfac503360d27a93"}, - {file = "pyzmq-26.2.0-cp37-cp37m-win32.whl", hash = "sha256:ea0eb6af8a17fa272f7b98d7bebfab7836a0d62738e16ba380f440fceca2d951"}, - {file = "pyzmq-26.2.0-cp37-cp37m-win_amd64.whl", hash = "sha256:4ff9dc6bc1664bb9eec25cd17506ef6672d506115095411e237d571e92a58231"}, - {file = "pyzmq-26.2.0-cp38-cp38-macosx_10_15_universal2.whl", hash = "sha256:2eb7735ee73ca1b0d71e0e67c3739c689067f055c764f73aac4cc8ecf958ee3f"}, - {file = "pyzmq-26.2.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1a534f43bc738181aa7cbbaf48e3eca62c76453a40a746ab95d4b27b1111a7d2"}, - {file = "pyzmq-26.2.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:aedd5dd8692635813368e558a05266b995d3d020b23e49581ddd5bbe197a8ab6"}, - {file = "pyzmq-26.2.0-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:8be4700cd8bb02cc454f630dcdf7cfa99de96788b80c51b60fe2fe1dac480289"}, - {file = "pyzmq-26.2.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fcc03fa4997c447dce58264e93b5aa2d57714fbe0f06c07b7785ae131512732"}, - {file = "pyzmq-26.2.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:402b190912935d3db15b03e8f7485812db350d271b284ded2b80d2e5704be780"}, - {file = "pyzmq-26.2.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8685fa9c25ff00f550c1fec650430c4b71e4e48e8d852f7ddcf2e48308038640"}, - {file = "pyzmq-26.2.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:76589c020680778f06b7e0b193f4b6dd66d470234a16e1df90329f5e14a171cd"}, - {file = "pyzmq-26.2.0-cp38-cp38-win32.whl", hash = "sha256:8423c1877d72c041f2c263b1ec6e34360448decfb323fa8b94e85883043ef988"}, - {file = "pyzmq-26.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:76589f2cd6b77b5bdea4fca5992dc1c23389d68b18ccc26a53680ba2dc80ff2f"}, - {file = "pyzmq-26.2.0-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:b1d464cb8d72bfc1a3adc53305a63a8e0cac6bc8c5a07e8ca190ab8d3faa43c2"}, - {file = "pyzmq-26.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4da04c48873a6abdd71811c5e163bd656ee1b957971db7f35140a2d573f6949c"}, - {file = "pyzmq-26.2.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:d049df610ac811dcffdc147153b414147428567fbbc8be43bb8885f04db39d98"}, - {file = "pyzmq-26.2.0-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:05590cdbc6b902101d0e65d6a4780af14dc22914cc6ab995d99b85af45362cc9"}, - {file = "pyzmq-26.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c811cfcd6a9bf680236c40c6f617187515269ab2912f3d7e8c0174898e2519db"}, - {file = "pyzmq-26.2.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:6835dd60355593de10350394242b5757fbbd88b25287314316f266e24c61d073"}, - {file = "pyzmq-26.2.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc6bee759a6bddea5db78d7dcd609397449cb2d2d6587f48f3ca613b19410cfc"}, - {file = "pyzmq-26.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c530e1eecd036ecc83c3407f77bb86feb79916d4a33d11394b8234f3bd35b940"}, - {file = "pyzmq-26.2.0-cp39-cp39-win32.whl", hash = "sha256:367b4f689786fca726ef7a6c5ba606958b145b9340a5e4808132cc65759abd44"}, - {file = "pyzmq-26.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:e6fa2e3e683f34aea77de8112f6483803c96a44fd726d7358b9888ae5bb394ec"}, - {file = "pyzmq-26.2.0-cp39-cp39-win_arm64.whl", hash = "sha256:7445be39143a8aa4faec43b076e06944b8f9d0701b669df4af200531b21e40bb"}, - {file = "pyzmq-26.2.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:706e794564bec25819d21a41c31d4df2d48e1cc4b061e8d345d7fb4dd3e94072"}, - {file = "pyzmq-26.2.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b435f2753621cd36e7c1762156815e21c985c72b19135dac43a7f4f31d28dd1"}, - {file = "pyzmq-26.2.0-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:160c7e0a5eb178011e72892f99f918c04a131f36056d10d9c1afb223fc952c2d"}, - {file = "pyzmq-26.2.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c4a71d5d6e7b28a47a394c0471b7e77a0661e2d651e7ae91e0cab0a587859ca"}, - {file = "pyzmq-26.2.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:90412f2db8c02a3864cbfc67db0e3dcdbda336acf1c469526d3e869394fe001c"}, - {file = "pyzmq-26.2.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:2ea4ad4e6a12e454de05f2949d4beddb52460f3de7c8b9d5c46fbb7d7222e02c"}, - {file = "pyzmq-26.2.0-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:fc4f7a173a5609631bb0c42c23d12c49df3966f89f496a51d3eb0ec81f4519d6"}, - {file = "pyzmq-26.2.0-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:878206a45202247781472a2d99df12a176fef806ca175799e1c6ad263510d57c"}, - {file = "pyzmq-26.2.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:17c412bad2eb9468e876f556eb4ee910e62d721d2c7a53c7fa31e643d35352e6"}, - {file = "pyzmq-26.2.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:0d987a3ae5a71c6226b203cfd298720e0086c7fe7c74f35fa8edddfbd6597eed"}, - {file = "pyzmq-26.2.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:39887ac397ff35b7b775db7201095fc6310a35fdbae85bac4523f7eb3b840e20"}, - {file = "pyzmq-26.2.0-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:fdb5b3e311d4d4b0eb8b3e8b4d1b0a512713ad7e6a68791d0923d1aec433d919"}, - {file = "pyzmq-26.2.0-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:226af7dcb51fdb0109f0016449b357e182ea0ceb6b47dfb5999d569e5db161d5"}, - {file = "pyzmq-26.2.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0bed0e799e6120b9c32756203fb9dfe8ca2fb8467fed830c34c877e25638c3fc"}, - {file = "pyzmq-26.2.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:29c7947c594e105cb9e6c466bace8532dc1ca02d498684128b339799f5248277"}, - {file = "pyzmq-26.2.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:cdeabcff45d1c219636ee2e54d852262e5c2e085d6cb476d938aee8d921356b3"}, - {file = "pyzmq-26.2.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:35cffef589bcdc587d06f9149f8d5e9e8859920a071df5a2671de2213bef592a"}, - {file = "pyzmq-26.2.0-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18c8dc3b7468d8b4bdf60ce9d7141897da103c7a4690157b32b60acb45e333e6"}, - {file = "pyzmq-26.2.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7133d0a1677aec369d67dd78520d3fa96dd7f3dcec99d66c1762870e5ea1a50a"}, - {file = "pyzmq-26.2.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:6a96179a24b14fa6428cbfc08641c779a53f8fcec43644030328f44034c7f1f4"}, - {file = "pyzmq-26.2.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:4f78c88905461a9203eac9faac157a2a0dbba84a0fd09fd29315db27be40af9f"}, - {file = "pyzmq-26.2.0.tar.gz", hash = "sha256:070672c258581c8e4f640b5159297580a9974b026043bd4ab0470be9ed324f1f"}, -] - -[package.dependencies] -cffi = {version = "*", markers = "implementation_name == \"pypy\""} - -[[package]] -name = "rdflib" -version = "6.3.2" -description = "RDFLib is a Python library for working with RDF, a simple yet powerful language for representing information." -optional = false -python-versions = ">=3.7,<4.0" -files = [ - {file = "rdflib-6.3.2-py3-none-any.whl", hash = "sha256:36b4e74a32aa1e4fa7b8719876fb192f19ecd45ff932ea5ebbd2e417a0247e63"}, - {file = "rdflib-6.3.2.tar.gz", hash = "sha256:72af591ff704f4caacea7ecc0c5a9056b8553e0489dd4f35a9bc52dbd41522e0"}, -] - -[package.dependencies] -isodate = ">=0.6.0,<0.7.0" -pyparsing = ">=2.1.0,<4" - -[package.extras] -berkeleydb = ["berkeleydb (>=18.1.0,<19.0.0)"] -html = ["html5lib (>=1.0,<2.0)"] -lxml = ["lxml (>=4.3.0,<5.0.0)"] -networkx = ["networkx (>=2.0.0,<3.0.0)"] - -[[package]] -name = "referencing" -version = "0.35.1" -description = "JSON Referencing + Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "referencing-0.35.1-py3-none-any.whl", hash = "sha256:eda6d3234d62814d1c64e305c1331c9a3a6132da475ab6382eaa997b21ee75de"}, - {file = "referencing-0.35.1.tar.gz", hash = "sha256:25b42124a6c8b632a425174f24087783efb348a6f1e0008e63cd4466fedf703c"}, -] - -[package.dependencies] -attrs = ">=22.2.0" -rpds-py = ">=0.7.0" - -[[package]] -name = "regex" -version = "2024.9.11" -description = "Alternative regular expression module, to replace re." -optional = false -python-versions = ">=3.8" -files = [ - {file = "regex-2024.9.11-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:1494fa8725c285a81d01dc8c06b55287a1ee5e0e382d8413adc0a9197aac6408"}, - {file = "regex-2024.9.11-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0e12c481ad92d129c78f13a2a3662317e46ee7ef96c94fd332e1c29131875b7d"}, - {file = "regex-2024.9.11-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:16e13a7929791ac1216afde26f712802e3df7bf0360b32e4914dca3ab8baeea5"}, - {file = "regex-2024.9.11-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:46989629904bad940bbec2106528140a218b4a36bb3042d8406980be1941429c"}, - {file = "regex-2024.9.11-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a906ed5e47a0ce5f04b2c981af1c9acf9e8696066900bf03b9d7879a6f679fc8"}, - {file = "regex-2024.9.11-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e9a091b0550b3b0207784a7d6d0f1a00d1d1c8a11699c1a4d93db3fbefc3ad35"}, - {file = "regex-2024.9.11-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ddcd9a179c0a6fa8add279a4444015acddcd7f232a49071ae57fa6e278f1f71"}, - {file = "regex-2024.9.11-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6b41e1adc61fa347662b09398e31ad446afadff932a24807d3ceb955ed865cc8"}, - {file = "regex-2024.9.11-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ced479f601cd2f8ca1fd7b23925a7e0ad512a56d6e9476f79b8f381d9d37090a"}, - {file = "regex-2024.9.11-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:635a1d96665f84b292e401c3d62775851aedc31d4f8784117b3c68c4fcd4118d"}, - {file = "regex-2024.9.11-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:c0256beda696edcf7d97ef16b2a33a8e5a875affd6fa6567b54f7c577b30a137"}, - {file = "regex-2024.9.11-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:3ce4f1185db3fbde8ed8aa223fc9620f276c58de8b0d4f8cc86fd1360829edb6"}, - {file = "regex-2024.9.11-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:09d77559e80dcc9d24570da3745ab859a9cf91953062e4ab126ba9d5993688ca"}, - {file = "regex-2024.9.11-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:7a22ccefd4db3f12b526eccb129390942fe874a3a9fdbdd24cf55773a1faab1a"}, - {file = "regex-2024.9.11-cp310-cp310-win32.whl", hash = "sha256:f745ec09bc1b0bd15cfc73df6fa4f726dcc26bb16c23a03f9e3367d357eeedd0"}, - {file = "regex-2024.9.11-cp310-cp310-win_amd64.whl", hash = "sha256:01c2acb51f8a7d6494c8c5eafe3d8e06d76563d8a8a4643b37e9b2dd8a2ff623"}, - {file = "regex-2024.9.11-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2cce2449e5927a0bf084d346da6cd5eb016b2beca10d0013ab50e3c226ffc0df"}, - {file = "regex-2024.9.11-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3b37fa423beefa44919e009745ccbf353d8c981516e807995b2bd11c2c77d268"}, - {file = "regex-2024.9.11-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:64ce2799bd75039b480cc0360907c4fb2f50022f030bf9e7a8705b636e408fad"}, - {file = "regex-2024.9.11-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a4cc92bb6db56ab0c1cbd17294e14f5e9224f0cc6521167ef388332604e92679"}, - {file = "regex-2024.9.11-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d05ac6fa06959c4172eccd99a222e1fbf17b5670c4d596cb1e5cde99600674c4"}, - {file = "regex-2024.9.11-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:040562757795eeea356394a7fb13076ad4f99d3c62ab0f8bdfb21f99a1f85664"}, - {file = "regex-2024.9.11-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6113c008a7780792efc80f9dfe10ba0cd043cbf8dc9a76ef757850f51b4edc50"}, - {file = "regex-2024.9.11-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8e5fb5f77c8745a60105403a774fe2c1759b71d3e7b4ca237a5e67ad066c7199"}, - {file = "regex-2024.9.11-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:54d9ff35d4515debf14bc27f1e3b38bfc453eff3220f5bce159642fa762fe5d4"}, - {file = "regex-2024.9.11-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:df5cbb1fbc74a8305b6065d4ade43b993be03dbe0f8b30032cced0d7740994bd"}, - {file = "regex-2024.9.11-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:7fb89ee5d106e4a7a51bce305ac4efb981536301895f7bdcf93ec92ae0d91c7f"}, - {file = "regex-2024.9.11-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:a738b937d512b30bf75995c0159c0ddf9eec0775c9d72ac0202076c72f24aa96"}, - {file = "regex-2024.9.11-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:e28f9faeb14b6f23ac55bfbbfd3643f5c7c18ede093977f1df249f73fd22c7b1"}, - {file = "regex-2024.9.11-cp311-cp311-win32.whl", hash = "sha256:18e707ce6c92d7282dfce370cd205098384b8ee21544e7cb29b8aab955b66fa9"}, - {file = "regex-2024.9.11-cp311-cp311-win_amd64.whl", hash = "sha256:313ea15e5ff2a8cbbad96ccef6be638393041b0a7863183c2d31e0c6116688cf"}, - {file = "regex-2024.9.11-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:b0d0a6c64fcc4ef9c69bd5b3b3626cc3776520a1637d8abaa62b9edc147a58f7"}, - {file = "regex-2024.9.11-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:49b0e06786ea663f933f3710a51e9385ce0cba0ea56b67107fd841a55d56a231"}, - {file = "regex-2024.9.11-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5b513b6997a0b2f10e4fd3a1313568e373926e8c252bd76c960f96fd039cd28d"}, - {file = "regex-2024.9.11-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ee439691d8c23e76f9802c42a95cfeebf9d47cf4ffd06f18489122dbb0a7ad64"}, - {file = "regex-2024.9.11-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a8f877c89719d759e52783f7fe6e1c67121076b87b40542966c02de5503ace42"}, - {file = "regex-2024.9.11-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:23b30c62d0f16827f2ae9f2bb87619bc4fba2044911e2e6c2eb1af0161cdb766"}, - {file = "regex-2024.9.11-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85ab7824093d8f10d44330fe1e6493f756f252d145323dd17ab6b48733ff6c0a"}, - {file = "regex-2024.9.11-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8dee5b4810a89447151999428fe096977346cf2f29f4d5e29609d2e19e0199c9"}, - {file = "regex-2024.9.11-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:98eeee2f2e63edae2181c886d7911ce502e1292794f4c5ee71e60e23e8d26b5d"}, - {file = "regex-2024.9.11-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:57fdd2e0b2694ce6fc2e5ccf189789c3e2962916fb38779d3e3521ff8fe7a822"}, - {file = "regex-2024.9.11-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:d552c78411f60b1fdaafd117a1fca2f02e562e309223b9d44b7de8be451ec5e0"}, - {file = "regex-2024.9.11-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:a0b2b80321c2ed3fcf0385ec9e51a12253c50f146fddb2abbb10f033fe3d049a"}, - {file = "regex-2024.9.11-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:18406efb2f5a0e57e3a5881cd9354c1512d3bb4f5c45d96d110a66114d84d23a"}, - {file = "regex-2024.9.11-cp312-cp312-win32.whl", hash = "sha256:e464b467f1588e2c42d26814231edecbcfe77f5ac414d92cbf4e7b55b2c2a776"}, - {file = "regex-2024.9.11-cp312-cp312-win_amd64.whl", hash = "sha256:9e8719792ca63c6b8340380352c24dcb8cd7ec49dae36e963742a275dfae6009"}, - {file = "regex-2024.9.11-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:c157bb447303070f256e084668b702073db99bbb61d44f85d811025fcf38f784"}, - {file = "regex-2024.9.11-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:4db21ece84dfeefc5d8a3863f101995de646c6cb0536952c321a2650aa202c36"}, - {file = "regex-2024.9.11-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:220e92a30b426daf23bb67a7962900ed4613589bab80382be09b48896d211e92"}, - {file = "regex-2024.9.11-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eb1ae19e64c14c7ec1995f40bd932448713d3c73509e82d8cd7744dc00e29e86"}, - {file = "regex-2024.9.11-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f47cd43a5bfa48f86925fe26fbdd0a488ff15b62468abb5d2a1e092a4fb10e85"}, - {file = "regex-2024.9.11-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9d4a76b96f398697fe01117093613166e6aa8195d63f1b4ec3f21ab637632963"}, - {file = "regex-2024.9.11-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ea51dcc0835eea2ea31d66456210a4e01a076d820e9039b04ae8d17ac11dee6"}, - {file = "regex-2024.9.11-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b7aaa315101c6567a9a45d2839322c51c8d6e81f67683d529512f5bcfb99c802"}, - {file = "regex-2024.9.11-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:c57d08ad67aba97af57a7263c2d9006d5c404d721c5f7542f077f109ec2a4a29"}, - {file = "regex-2024.9.11-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:f8404bf61298bb6f8224bb9176c1424548ee1181130818fcd2cbffddc768bed8"}, - {file = "regex-2024.9.11-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:dd4490a33eb909ef5078ab20f5f000087afa2a4daa27b4c072ccb3cb3050ad84"}, - {file = "regex-2024.9.11-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:eee9130eaad130649fd73e5cd92f60e55708952260ede70da64de420cdcad554"}, - {file = "regex-2024.9.11-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6a2644a93da36c784e546de579ec1806bfd2763ef47babc1b03d765fe560c9f8"}, - {file = "regex-2024.9.11-cp313-cp313-win32.whl", hash = "sha256:e997fd30430c57138adc06bba4c7c2968fb13d101e57dd5bb9355bf8ce3fa7e8"}, - {file = "regex-2024.9.11-cp313-cp313-win_amd64.whl", hash = "sha256:042c55879cfeb21a8adacc84ea347721d3d83a159da6acdf1116859e2427c43f"}, - {file = "regex-2024.9.11-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:35f4a6f96aa6cb3f2f7247027b07b15a374f0d5b912c0001418d1d55024d5cb4"}, - {file = "regex-2024.9.11-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:55b96e7ce3a69a8449a66984c268062fbaa0d8ae437b285428e12797baefce7e"}, - {file = "regex-2024.9.11-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cb130fccd1a37ed894824b8c046321540263013da72745d755f2d35114b81a60"}, - {file = "regex-2024.9.11-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:323c1f04be6b2968944d730e5c2091c8c89767903ecaa135203eec4565ed2b2b"}, - {file = "regex-2024.9.11-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:be1c8ed48c4c4065ecb19d882a0ce1afe0745dfad8ce48c49586b90a55f02366"}, - {file = "regex-2024.9.11-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b5b029322e6e7b94fff16cd120ab35a253236a5f99a79fb04fda7ae71ca20ae8"}, - {file = "regex-2024.9.11-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6fff13ef6b5f29221d6904aa816c34701462956aa72a77f1f151a8ec4f56aeb"}, - {file = "regex-2024.9.11-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:587d4af3979376652010e400accc30404e6c16b7df574048ab1f581af82065e4"}, - {file = "regex-2024.9.11-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:079400a8269544b955ffa9e31f186f01d96829110a3bf79dc338e9910f794fca"}, - {file = "regex-2024.9.11-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:f9268774428ec173654985ce55fc6caf4c6d11ade0f6f914d48ef4719eb05ebb"}, - {file = "regex-2024.9.11-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:23f9985c8784e544d53fc2930fc1ac1a7319f5d5332d228437acc9f418f2f168"}, - {file = "regex-2024.9.11-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:ae2941333154baff9838e88aa71c1d84f4438189ecc6021a12c7573728b5838e"}, - {file = "regex-2024.9.11-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:e93f1c331ca8e86fe877a48ad64e77882c0c4da0097f2212873a69bbfea95d0c"}, - {file = "regex-2024.9.11-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:846bc79ee753acf93aef4184c040d709940c9d001029ceb7b7a52747b80ed2dd"}, - {file = "regex-2024.9.11-cp38-cp38-win32.whl", hash = "sha256:c94bb0a9f1db10a1d16c00880bdebd5f9faf267273b8f5bd1878126e0fbde771"}, - {file = "regex-2024.9.11-cp38-cp38-win_amd64.whl", hash = "sha256:2b08fce89fbd45664d3df6ad93e554b6c16933ffa9d55cb7e01182baaf971508"}, - {file = "regex-2024.9.11-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:07f45f287469039ffc2c53caf6803cd506eb5f5f637f1d4acb37a738f71dd066"}, - {file = "regex-2024.9.11-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4838e24ee015101d9f901988001038f7f0d90dc0c3b115541a1365fb439add62"}, - {file = "regex-2024.9.11-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6edd623bae6a737f10ce853ea076f56f507fd7726bee96a41ee3d68d347e4d16"}, - {file = "regex-2024.9.11-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c69ada171c2d0e97a4b5aa78fbb835e0ffbb6b13fc5da968c09811346564f0d3"}, - {file = "regex-2024.9.11-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:02087ea0a03b4af1ed6ebab2c54d7118127fee8d71b26398e8e4b05b78963199"}, - {file = "regex-2024.9.11-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:69dee6a020693d12a3cf892aba4808fe168d2a4cef368eb9bf74f5398bfd4ee8"}, - {file = "regex-2024.9.11-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:297f54910247508e6e5cae669f2bc308985c60540a4edd1c77203ef19bfa63ca"}, - {file = "regex-2024.9.11-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ecea58b43a67b1b79805f1a0255730edaf5191ecef84dbc4cc85eb30bc8b63b9"}, - {file = "regex-2024.9.11-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:eab4bb380f15e189d1313195b062a6aa908f5bd687a0ceccd47c8211e9cf0d4a"}, - {file = "regex-2024.9.11-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:0cbff728659ce4bbf4c30b2a1be040faafaa9eca6ecde40aaff86f7889f4ab39"}, - {file = "regex-2024.9.11-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:54c4a097b8bc5bb0dfc83ae498061d53ad7b5762e00f4adaa23bee22b012e6ba"}, - {file = "regex-2024.9.11-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:73d6d2f64f4d894c96626a75578b0bf7d9e56dcda8c3d037a2118fdfe9b1c664"}, - {file = "regex-2024.9.11-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:e53b5fbab5d675aec9f0c501274c467c0f9a5d23696cfc94247e1fb56501ed89"}, - {file = "regex-2024.9.11-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:0ffbcf9221e04502fc35e54d1ce9567541979c3fdfb93d2c554f0ca583a19b35"}, - {file = "regex-2024.9.11-cp39-cp39-win32.whl", hash = "sha256:e4c22e1ac1f1ec1e09f72e6c44d8f2244173db7eb9629cc3a346a8d7ccc31142"}, - {file = "regex-2024.9.11-cp39-cp39-win_amd64.whl", hash = "sha256:faa3c142464efec496967359ca99696c896c591c56c53506bac1ad465f66e919"}, - {file = "regex-2024.9.11.tar.gz", hash = "sha256:6c188c307e8433bcb63dc1915022deb553b4203a70722fc542c363bf120a01fd"}, -] - -[[package]] -name = "requests" -version = "2.32.3" -description = "Python HTTP for Humans." -optional = false -python-versions = ">=3.8" -files = [ - {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, - {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, -] - -[package.dependencies] -certifi = ">=2017.4.17" -charset-normalizer = ">=2,<4" -idna = ">=2.5,<4" -urllib3 = ">=1.21.1,<3" - -[package.extras] -socks = ["PySocks (>=1.5.6,!=1.5.7)"] -use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] - -[[package]] -name = "requests-oauthlib" -version = "2.0.0" -description = "OAuthlib authentication support for Requests." -optional = false -python-versions = ">=3.4" -files = [ - {file = "requests-oauthlib-2.0.0.tar.gz", hash = "sha256:b3dffaebd884d8cd778494369603a9e7b58d29111bf6b41bdc2dcd87203af4e9"}, - {file = "requests_oauthlib-2.0.0-py2.py3-none-any.whl", hash = "sha256:7dd8a5c40426b779b0868c404bdef9768deccf22749cde15852df527e6269b36"}, -] - -[package.dependencies] -oauthlib = ">=3.0.0" -requests = ">=2.0.0" - -[package.extras] -rsa = ["oauthlib[signedtoken] (>=3.0.0)"] - -[[package]] -name = "rfc3339-validator" -version = "0.1.4" -description = "A pure python RFC3339 validator" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -files = [ - {file = "rfc3339_validator-0.1.4-py2.py3-none-any.whl", hash = "sha256:24f6ec1eda14ef823da9e36ec7113124b39c04d50a4d3d3a3c2859577e7791fa"}, - {file = "rfc3339_validator-0.1.4.tar.gz", hash = "sha256:138a2abdf93304ad60530167e51d2dfb9549521a836871b88d7f4695d0022f6b"}, -] - -[package.dependencies] -six = "*" - -[[package]] -name = "rfc3986-validator" -version = "0.1.1" -description = "Pure python rfc3986 validator" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -files = [ - {file = "rfc3986_validator-0.1.1-py2.py3-none-any.whl", hash = "sha256:2f235c432ef459970b4306369336b9d5dbdda31b510ca1e327636e01f528bfa9"}, - {file = "rfc3986_validator-0.1.1.tar.gz", hash = "sha256:3d44bde7921b3b9ec3ae4e3adca370438eccebc676456449b145d533b240d055"}, -] - -[[package]] -name = "rpds-py" -version = "0.20.0" -description = "Python bindings to Rust's persistent data structures (rpds)" -optional = false -python-versions = ">=3.8" -files = [ - {file = "rpds_py-0.20.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3ad0fda1635f8439cde85c700f964b23ed5fc2d28016b32b9ee5fe30da5c84e2"}, - {file = "rpds_py-0.20.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9bb4a0d90fdb03437c109a17eade42dfbf6190408f29b2744114d11586611d6f"}, - {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c6377e647bbfd0a0b159fe557f2c6c602c159fc752fa316572f012fc0bf67150"}, - {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eb851b7df9dda52dc1415ebee12362047ce771fc36914586b2e9fcbd7d293b3e"}, - {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1e0f80b739e5a8f54837be5d5c924483996b603d5502bfff79bf33da06164ee2"}, - {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5a8c94dad2e45324fc74dce25e1645d4d14df9a4e54a30fa0ae8bad9a63928e3"}, - {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8e604fe73ba048c06085beaf51147eaec7df856824bfe7b98657cf436623daf"}, - {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:df3de6b7726b52966edf29663e57306b23ef775faf0ac01a3e9f4012a24a4140"}, - {file = "rpds_py-0.20.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:cf258ede5bc22a45c8e726b29835b9303c285ab46fc7c3a4cc770736b5304c9f"}, - {file = "rpds_py-0.20.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:55fea87029cded5df854ca7e192ec7bdb7ecd1d9a3f63d5c4eb09148acf4a7ce"}, - {file = "rpds_py-0.20.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ae94bd0b2f02c28e199e9bc51485d0c5601f58780636185660f86bf80c89af94"}, - {file = "rpds_py-0.20.0-cp310-none-win32.whl", hash = "sha256:28527c685f237c05445efec62426d285e47a58fb05ba0090a4340b73ecda6dee"}, - {file = "rpds_py-0.20.0-cp310-none-win_amd64.whl", hash = "sha256:238a2d5b1cad28cdc6ed15faf93a998336eb041c4e440dd7f902528b8891b399"}, - {file = "rpds_py-0.20.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:ac2f4f7a98934c2ed6505aead07b979e6f999389f16b714448fb39bbaa86a489"}, - {file = "rpds_py-0.20.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:220002c1b846db9afd83371d08d239fdc865e8f8c5795bbaec20916a76db3318"}, - {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d7919548df3f25374a1f5d01fbcd38dacab338ef5f33e044744b5c36729c8db"}, - {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:758406267907b3781beee0f0edfe4a179fbd97c0be2e9b1154d7f0a1279cf8e5"}, - {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3d61339e9f84a3f0767b1995adfb171a0d00a1185192718a17af6e124728e0f5"}, - {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1259c7b3705ac0a0bd38197565a5d603218591d3f6cee6e614e380b6ba61c6f6"}, - {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5c1dc0f53856b9cc9a0ccca0a7cc61d3d20a7088201c0937f3f4048c1718a209"}, - {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7e60cb630f674a31f0368ed32b2a6b4331b8350d67de53c0359992444b116dd3"}, - {file = "rpds_py-0.20.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:dbe982f38565bb50cb7fb061ebf762c2f254ca3d8c20d4006878766e84266272"}, - {file = "rpds_py-0.20.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:514b3293b64187172bc77c8fb0cdae26981618021053b30d8371c3a902d4d5ad"}, - {file = "rpds_py-0.20.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:d0a26ffe9d4dd35e4dfdd1e71f46401cff0181c75ac174711ccff0459135fa58"}, - {file = "rpds_py-0.20.0-cp311-none-win32.whl", hash = "sha256:89c19a494bf3ad08c1da49445cc5d13d8fefc265f48ee7e7556839acdacf69d0"}, - {file = "rpds_py-0.20.0-cp311-none-win_amd64.whl", hash = "sha256:c638144ce971df84650d3ed0096e2ae7af8e62ecbbb7b201c8935c370df00a2c"}, - {file = "rpds_py-0.20.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a84ab91cbe7aab97f7446652d0ed37d35b68a465aeef8fc41932a9d7eee2c1a6"}, - {file = "rpds_py-0.20.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:56e27147a5a4c2c21633ff8475d185734c0e4befd1c989b5b95a5d0db699b21b"}, - {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2580b0c34583b85efec8c5c5ec9edf2dfe817330cc882ee972ae650e7b5ef739"}, - {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b80d4a7900cf6b66bb9cee5c352b2d708e29e5a37fe9bf784fa97fc11504bf6c"}, - {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:50eccbf054e62a7b2209b28dc7a22d6254860209d6753e6b78cfaeb0075d7bee"}, - {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:49a8063ea4296b3a7e81a5dfb8f7b2d73f0b1c20c2af401fb0cdf22e14711a96"}, - {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ea438162a9fcbee3ecf36c23e6c68237479f89f962f82dae83dc15feeceb37e4"}, - {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:18d7585c463087bddcfa74c2ba267339f14f2515158ac4db30b1f9cbdb62c8ef"}, - {file = "rpds_py-0.20.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d4c7d1a051eeb39f5c9547e82ea27cbcc28338482242e3e0b7768033cb083821"}, - {file = "rpds_py-0.20.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:e4df1e3b3bec320790f699890d41c59d250f6beda159ea3c44c3f5bac1976940"}, - {file = "rpds_py-0.20.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2cf126d33a91ee6eedc7f3197b53e87a2acdac63602c0f03a02dd69e4b138174"}, - {file = "rpds_py-0.20.0-cp312-none-win32.whl", hash = "sha256:8bc7690f7caee50b04a79bf017a8d020c1f48c2a1077ffe172abec59870f1139"}, - {file = "rpds_py-0.20.0-cp312-none-win_amd64.whl", hash = "sha256:0e13e6952ef264c40587d510ad676a988df19adea20444c2b295e536457bc585"}, - {file = "rpds_py-0.20.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:aa9a0521aeca7d4941499a73ad7d4f8ffa3d1affc50b9ea11d992cd7eff18a29"}, - {file = "rpds_py-0.20.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:4a1f1d51eccb7e6c32ae89243cb352389228ea62f89cd80823ea7dd1b98e0b91"}, - {file = "rpds_py-0.20.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8a86a9b96070674fc88b6f9f71a97d2c1d3e5165574615d1f9168ecba4cecb24"}, - {file = "rpds_py-0.20.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6c8ef2ebf76df43f5750b46851ed1cdf8f109d7787ca40035fe19fbdc1acc5a7"}, - {file = "rpds_py-0.20.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b74b25f024b421d5859d156750ea9a65651793d51b76a2e9238c05c9d5f203a9"}, - {file = "rpds_py-0.20.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:57eb94a8c16ab08fef6404301c38318e2c5a32216bf5de453e2714c964c125c8"}, - {file = "rpds_py-0.20.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e1940dae14e715e2e02dfd5b0f64a52e8374a517a1e531ad9412319dc3ac7879"}, - {file = "rpds_py-0.20.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d20277fd62e1b992a50c43f13fbe13277a31f8c9f70d59759c88f644d66c619f"}, - {file = "rpds_py-0.20.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:06db23d43f26478303e954c34c75182356ca9aa7797d22c5345b16871ab9c45c"}, - {file = "rpds_py-0.20.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:b2a5db5397d82fa847e4c624b0c98fe59d2d9b7cf0ce6de09e4d2e80f8f5b3f2"}, - {file = "rpds_py-0.20.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5a35df9f5548fd79cb2f52d27182108c3e6641a4feb0f39067911bf2adaa3e57"}, - {file = "rpds_py-0.20.0-cp313-none-win32.whl", hash = "sha256:fd2d84f40633bc475ef2d5490b9c19543fbf18596dcb1b291e3a12ea5d722f7a"}, - {file = "rpds_py-0.20.0-cp313-none-win_amd64.whl", hash = "sha256:9bc2d153989e3216b0559251b0c260cfd168ec78b1fac33dd485750a228db5a2"}, - {file = "rpds_py-0.20.0-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:f2fbf7db2012d4876fb0d66b5b9ba6591197b0f165db8d99371d976546472a24"}, - {file = "rpds_py-0.20.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:1e5f3cd7397c8f86c8cc72d5a791071431c108edd79872cdd96e00abd8497d29"}, - {file = "rpds_py-0.20.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ce9845054c13696f7af7f2b353e6b4f676dab1b4b215d7fe5e05c6f8bb06f965"}, - {file = "rpds_py-0.20.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c3e130fd0ec56cb76eb49ef52faead8ff09d13f4527e9b0c400307ff72b408e1"}, - {file = "rpds_py-0.20.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4b16aa0107ecb512b568244ef461f27697164d9a68d8b35090e9b0c1c8b27752"}, - {file = "rpds_py-0.20.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aa7f429242aae2947246587d2964fad750b79e8c233a2367f71b554e9447949c"}, - {file = "rpds_py-0.20.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af0fc424a5842a11e28956e69395fbbeab2c97c42253169d87e90aac2886d751"}, - {file = "rpds_py-0.20.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b8c00a3b1e70c1d3891f0db1b05292747f0dbcfb49c43f9244d04c70fbc40eb8"}, - {file = "rpds_py-0.20.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:40ce74fc86ee4645d0a225498d091d8bc61f39b709ebef8204cb8b5a464d3c0e"}, - {file = "rpds_py-0.20.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:4fe84294c7019456e56d93e8ababdad5a329cd25975be749c3f5f558abb48253"}, - {file = "rpds_py-0.20.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:338ca4539aad4ce70a656e5187a3a31c5204f261aef9f6ab50e50bcdffaf050a"}, - {file = "rpds_py-0.20.0-cp38-none-win32.whl", hash = "sha256:54b43a2b07db18314669092bb2de584524d1ef414588780261e31e85846c26a5"}, - {file = "rpds_py-0.20.0-cp38-none-win_amd64.whl", hash = "sha256:a1862d2d7ce1674cffa6d186d53ca95c6e17ed2b06b3f4c476173565c862d232"}, - {file = "rpds_py-0.20.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:3fde368e9140312b6e8b6c09fb9f8c8c2f00999d1823403ae90cc00480221b22"}, - {file = "rpds_py-0.20.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9824fb430c9cf9af743cf7aaf6707bf14323fb51ee74425c380f4c846ea70789"}, - {file = "rpds_py-0.20.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:11ef6ce74616342888b69878d45e9f779b95d4bd48b382a229fe624a409b72c5"}, - {file = "rpds_py-0.20.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c52d3f2f82b763a24ef52f5d24358553e8403ce05f893b5347098014f2d9eff2"}, - {file = "rpds_py-0.20.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9d35cef91e59ebbeaa45214861874bc6f19eb35de96db73e467a8358d701a96c"}, - {file = "rpds_py-0.20.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d72278a30111e5b5525c1dd96120d9e958464316f55adb030433ea905866f4de"}, - {file = "rpds_py-0.20.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b4c29cbbba378759ac5786730d1c3cb4ec6f8ababf5c42a9ce303dc4b3d08cda"}, - {file = "rpds_py-0.20.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6632f2d04f15d1bd6fe0eedd3b86d9061b836ddca4c03d5cf5c7e9e6b7c14580"}, - {file = "rpds_py-0.20.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:d0b67d87bb45ed1cd020e8fbf2307d449b68abc45402fe1a4ac9e46c3c8b192b"}, - {file = "rpds_py-0.20.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:ec31a99ca63bf3cd7f1a5ac9fe95c5e2d060d3c768a09bc1d16e235840861420"}, - {file = "rpds_py-0.20.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:22e6c9976e38f4d8c4a63bd8a8edac5307dffd3ee7e6026d97f3cc3a2dc02a0b"}, - {file = "rpds_py-0.20.0-cp39-none-win32.whl", hash = "sha256:569b3ea770c2717b730b61998b6c54996adee3cef69fc28d444f3e7920313cf7"}, - {file = "rpds_py-0.20.0-cp39-none-win_amd64.whl", hash = "sha256:e6900ecdd50ce0facf703f7a00df12374b74bbc8ad9fe0f6559947fb20f82364"}, - {file = "rpds_py-0.20.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:617c7357272c67696fd052811e352ac54ed1d9b49ab370261a80d3b6ce385045"}, - {file = "rpds_py-0.20.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:9426133526f69fcaba6e42146b4e12d6bc6c839b8b555097020e2b78ce908dcc"}, - {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:deb62214c42a261cb3eb04d474f7155279c1a8a8c30ac89b7dcb1721d92c3c02"}, - {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fcaeb7b57f1a1e071ebd748984359fef83ecb026325b9d4ca847c95bc7311c92"}, - {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d454b8749b4bd70dd0a79f428731ee263fa6995f83ccb8bada706e8d1d3ff89d"}, - {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d807dc2051abe041b6649681dce568f8e10668e3c1c6543ebae58f2d7e617855"}, - {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c3c20f0ddeb6e29126d45f89206b8291352b8c5b44384e78a6499d68b52ae511"}, - {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b7f19250ceef892adf27f0399b9e5afad019288e9be756d6919cb58892129f51"}, - {file = "rpds_py-0.20.0-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:4f1ed4749a08379555cebf4650453f14452eaa9c43d0a95c49db50c18b7da075"}, - {file = "rpds_py-0.20.0-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:dcedf0b42bcb4cfff4101d7771a10532415a6106062f005ab97d1d0ab5681c60"}, - {file = "rpds_py-0.20.0-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:39ed0d010457a78f54090fafb5d108501b5aa5604cc22408fc1c0c77eac14344"}, - {file = "rpds_py-0.20.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:bb273176be34a746bdac0b0d7e4e2c467323d13640b736c4c477881a3220a989"}, - {file = "rpds_py-0.20.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f918a1a130a6dfe1d7fe0f105064141342e7dd1611f2e6a21cd2f5c8cb1cfb3e"}, - {file = "rpds_py-0.20.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:f60012a73aa396be721558caa3a6fd49b3dd0033d1675c6d59c4502e870fcf0c"}, - {file = "rpds_py-0.20.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3d2b1ad682a3dfda2a4e8ad8572f3100f95fad98cb99faf37ff0ddfe9cbf9d03"}, - {file = "rpds_py-0.20.0-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:614fdafe9f5f19c63ea02817fa4861c606a59a604a77c8cdef5aa01d28b97921"}, - {file = "rpds_py-0.20.0-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fa518bcd7600c584bf42e6617ee8132869e877db2f76bcdc281ec6a4113a53ab"}, - {file = "rpds_py-0.20.0-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f0475242f447cc6cb8a9dd486d68b2ef7fbee84427124c232bff5f63b1fe11e5"}, - {file = "rpds_py-0.20.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f90a4cd061914a60bd51c68bcb4357086991bd0bb93d8aa66a6da7701370708f"}, - {file = "rpds_py-0.20.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:def7400461c3a3f26e49078302e1c1b38f6752342c77e3cf72ce91ca69fb1bc1"}, - {file = "rpds_py-0.20.0-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:65794e4048ee837494aea3c21a28ad5fc080994dfba5b036cf84de37f7ad5074"}, - {file = "rpds_py-0.20.0-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:faefcc78f53a88f3076b7f8be0a8f8d35133a3ecf7f3770895c25f8813460f08"}, - {file = "rpds_py-0.20.0-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:5b4f105deeffa28bbcdff6c49b34e74903139afa690e35d2d9e3c2c2fba18cec"}, - {file = "rpds_py-0.20.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:fdfc3a892927458d98f3d55428ae46b921d1f7543b89382fdb483f5640daaec8"}, - {file = "rpds_py-0.20.0.tar.gz", hash = "sha256:d72a210824facfdaf8768cf2d7ca25a042c30320b3020de2fa04640920d4e121"}, -] - -[[package]] -name = "rsa" -version = "4.9" -description = "Pure-Python RSA implementation" -optional = false -python-versions = ">=3.6,<4" -files = [ - {file = "rsa-4.9-py3-none-any.whl", hash = "sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7"}, - {file = "rsa-4.9.tar.gz", hash = "sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21"}, -] - -[package.dependencies] -pyasn1 = ">=0.1.3" - -[[package]] -name = "ruamel-yaml" -version = "0.17.17" -description = "ruamel.yaml is a YAML parser/emitter that supports roundtrip preservation of comments, seq/map flow style, and map key order" -optional = false -python-versions = ">=3" -files = [ - {file = "ruamel.yaml-0.17.17-py3-none-any.whl", hash = "sha256:9af3ec5d7f8065582f3aa841305465025d0afd26c5fb54e15b964e11838fc74f"}, - {file = "ruamel.yaml-0.17.17.tar.gz", hash = "sha256:9751de4cbb57d4bfbf8fc394e125ed4a2f170fbff3dc3d78abf50be85924f8be"}, -] - -[package.extras] -docs = ["ryd"] -jinja2 = ["ruamel.yaml.jinja2 (>=0.2)"] - -[[package]] -name = "schematic-db" -version = "0.0.41" -description = "" -optional = false -python-versions = ">=3.9,<4.0" -files = [ - {file = "schematic_db-0.0.41-py3-none-any.whl", hash = "sha256:bf8e8a73fb06113431a89a25df15f3eefbe7b40c2cfe149c4e9afa6e6b33fd5b"}, - {file = "schematic_db-0.0.41.tar.gz", hash = "sha256:cd5ec936cdb4fca203de57aa0c771b2b251c5eec7e0af719c388cad70d8d9f6d"}, -] - -[package.dependencies] -deprecation = ">=2.1.0,<3.0.0" -interrogate = ">=1.5.0,<2.0.0" -networkx = ">=2.8.6,<3.0.0" -pandas = ">=2.0.0,<3.0.0" -pydantic = ">=1.10.7,<2.0.0" -PyYAML = ">=6.0,<7.0" -requests = ">=2.28.1,<3.0.0" -SQLAlchemy = ">=2.0.19,<3.0.0" -SQLAlchemy-Utils = ">=0.41.1,<0.42.0" -synapseclient = {version = ">=4.0.0,<5.0.0", optional = true, markers = "extra == \"synapse\""} -tenacity = ">=8.1.0,<9.0.0" -validators = ">=0.20.0,<0.21.0" - -[package.extras] -mysql = ["mysqlclient (>=2.1.1,<3.0.0)"] -postgres = ["psycopg2-binary (>=2.9.5,<3.0.0)"] -synapse = ["synapseclient (>=4.0.0,<5.0.0)"] - -[[package]] -name = "schematicpy" -version = "24.7.2" -description = "Package for biomedical data model and metadata ingress management" -optional = false -python-versions = "<3.11,>=3.9.0" -files = [ - {file = "schematicpy-24.7.2-py3-none-any.whl", hash = "sha256:e208fd4d694808f29c775facc6772dc5185751e546813ee5d211794c5a0bd253"}, - {file = "schematicpy-24.7.2.tar.gz", hash = "sha256:04113ddc43893a9552eae995edb0837eca8e26c09d68c8183cde413a04db46fc"}, -] - -[package.dependencies] -click = ">=8.0.0,<9.0.0" -click-log = ">=0.4.0,<0.5.0" -dataclasses-json = ">=0.6.1,<0.7.0" -dateparser = ">=1.1.4,<2.0.0" -google-api-python-client = ">=2.0.0,<3.0.0" -google-auth-httplib2 = ">=0.1.0,<0.2.0" -google-auth-oauthlib = ">=0.8.0,<0.9.0" -graphviz = ">=0.20.0,<0.21.0" -great-expectations = ">=0.15.0,<0.16.0" -inflection = ">=0.5.1,<0.6.0" -itsdangerous = ">=2.0.0,<3.0.0" -jsonschema = ">=4.0.0,<5.0.0" -networkx = ">=2.2.8" -numpy = ">=1.26.4,<2.0.0" -oauth2client = ">=4.1.0,<5.0.0" -openpyxl = ">=3.0.9,<4.0.0" -pandarallel = ">=1.6.4,<2.0.0" -pandas = ">=2.2.2,<3.0.0" -pdoc = ">=12.2.0,<13.0.0" -pydantic = ">=1.10.4,<2.0.0" -pygsheets = ">=2.0.4,<3.0.0" -PyYAML = ">=6.0.0,<7.0.0" -rdflib = ">=6.0.0,<7.0.0" -schematic-db = {version = "0.0.41", extras = ["synapse"]} -setuptools = ">=66.0.0,<67.0.0" -sphinx-click = ">=4.0.0,<5.0.0" -synapseclient = "4.3.1" -tenacity = ">=8.0.1,<9.0.0" -toml = ">=0.10.2,<0.11.0" - -[package.extras] -api = ["Flask (==2.1.3)", "Flask-Cors (>=3.0.10,<4.0.0)", "Jinja2 (>2.11.3)", "connexion[swagger-ui] (>=2.8.0,<3.0.0)", "flask-opentracing (>=2.0.0,<3.0.0)", "jaeger-client (>=4.8.0,<5.0.0)", "pyopenssl (>=23.0.0,<24.0.0)"] -aws = ["uWSGI (>=2.0.21,<3.0.0)"] - -[[package]] -name = "scipy" -version = "1.14.1" -description = "Fundamental algorithms for scientific computing in Python" -optional = false -python-versions = ">=3.10" -files = [ - {file = "scipy-1.14.1-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:b28d2ca4add7ac16ae8bb6632a3c86e4b9e4d52d3e34267f6e1b0c1f8d87e389"}, - {file = "scipy-1.14.1-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:d0d2821003174de06b69e58cef2316a6622b60ee613121199cb2852a873f8cf3"}, - {file = "scipy-1.14.1-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:8bddf15838ba768bb5f5083c1ea012d64c9a444e16192762bd858f1e126196d0"}, - {file = "scipy-1.14.1-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:97c5dddd5932bd2a1a31c927ba5e1463a53b87ca96b5c9bdf5dfd6096e27efc3"}, - {file = "scipy-1.14.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2ff0a7e01e422c15739ecd64432743cf7aae2b03f3084288f399affcefe5222d"}, - {file = "scipy-1.14.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8e32dced201274bf96899e6491d9ba3e9a5f6b336708656466ad0522d8528f69"}, - {file = "scipy-1.14.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:8426251ad1e4ad903a4514712d2fa8fdd5382c978010d1c6f5f37ef286a713ad"}, - {file = "scipy-1.14.1-cp310-cp310-win_amd64.whl", hash = "sha256:a49f6ed96f83966f576b33a44257d869756df6cf1ef4934f59dd58b25e0327e5"}, - {file = "scipy-1.14.1-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:2da0469a4ef0ecd3693761acbdc20f2fdeafb69e6819cc081308cc978153c675"}, - {file = "scipy-1.14.1-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:c0ee987efa6737242745f347835da2cc5bb9f1b42996a4d97d5c7ff7928cb6f2"}, - {file = "scipy-1.14.1-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:3a1b111fac6baec1c1d92f27e76511c9e7218f1695d61b59e05e0fe04dc59617"}, - {file = "scipy-1.14.1-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:8475230e55549ab3f207bff11ebfc91c805dc3463ef62eda3ccf593254524ce8"}, - {file = "scipy-1.14.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:278266012eb69f4a720827bdd2dc54b2271c97d84255b2faaa8f161a158c3b37"}, - {file = "scipy-1.14.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fef8c87f8abfb884dac04e97824b61299880c43f4ce675dd2cbeadd3c9b466d2"}, - {file = "scipy-1.14.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b05d43735bb2f07d689f56f7b474788a13ed8adc484a85aa65c0fd931cf9ccd2"}, - {file = "scipy-1.14.1-cp311-cp311-win_amd64.whl", hash = "sha256:716e389b694c4bb564b4fc0c51bc84d381735e0d39d3f26ec1af2556ec6aad94"}, - {file = "scipy-1.14.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:631f07b3734d34aced009aaf6fedfd0eb3498a97e581c3b1e5f14a04164a456d"}, - {file = "scipy-1.14.1-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:af29a935803cc707ab2ed7791c44288a682f9c8107bc00f0eccc4f92c08d6e07"}, - {file = "scipy-1.14.1-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:2843f2d527d9eebec9a43e6b406fb7266f3af25a751aa91d62ff416f54170bc5"}, - {file = "scipy-1.14.1-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:eb58ca0abd96911932f688528977858681a59d61a7ce908ffd355957f7025cfc"}, - {file = "scipy-1.14.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:30ac8812c1d2aab7131a79ba62933a2a76f582d5dbbc695192453dae67ad6310"}, - {file = "scipy-1.14.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f9ea80f2e65bdaa0b7627fb00cbeb2daf163caa015e59b7516395fe3bd1e066"}, - {file = "scipy-1.14.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:edaf02b82cd7639db00dbff629995ef185c8df4c3ffa71a5562a595765a06ce1"}, - {file = "scipy-1.14.1-cp312-cp312-win_amd64.whl", hash = "sha256:2ff38e22128e6c03ff73b6bb0f85f897d2362f8c052e3b8ad00532198fbdae3f"}, - {file = "scipy-1.14.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:1729560c906963fc8389f6aac023739ff3983e727b1a4d87696b7bf108316a79"}, - {file = "scipy-1.14.1-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:4079b90df244709e675cdc8b93bfd8a395d59af40b72e339c2287c91860deb8e"}, - {file = "scipy-1.14.1-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:e0cf28db0f24a38b2a0ca33a85a54852586e43cf6fd876365c86e0657cfe7d73"}, - {file = "scipy-1.14.1-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:0c2f95de3b04e26f5f3ad5bb05e74ba7f68b837133a4492414b3afd79dfe540e"}, - {file = "scipy-1.14.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b99722ea48b7ea25e8e015e8341ae74624f72e5f21fc2abd45f3a93266de4c5d"}, - {file = "scipy-1.14.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5149e3fd2d686e42144a093b206aef01932a0059c2a33ddfa67f5f035bdfe13e"}, - {file = "scipy-1.14.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e4f5a7c49323533f9103d4dacf4e4f07078f360743dec7f7596949149efeec06"}, - {file = "scipy-1.14.1-cp313-cp313-win_amd64.whl", hash = "sha256:baff393942b550823bfce952bb62270ee17504d02a1801d7fd0719534dfb9c84"}, - {file = "scipy-1.14.1.tar.gz", hash = "sha256:5a275584e726026a5699459aa72f828a610821006228e841b94275c4a7c08417"}, -] - -[package.dependencies] -numpy = ">=1.23.5,<2.3" - -[package.extras] -dev = ["cython-lint (>=0.12.2)", "doit (>=0.36.0)", "mypy (==1.10.0)", "pycodestyle", "pydevtool", "rich-click", "ruff (>=0.0.292)", "types-psutil", "typing_extensions"] -doc = ["jupyterlite-pyodide-kernel", "jupyterlite-sphinx (>=0.13.1)", "jupytext", "matplotlib (>=3.5)", "myst-nb", "numpydoc", "pooch", "pydata-sphinx-theme (>=0.15.2)", "sphinx (>=5.0.0,<=7.3.7)", "sphinx-design (>=0.4.0)"] -test = ["Cython", "array-api-strict (>=2.0)", "asv", "gmpy2", "hypothesis (>=6.30)", "meson", "mpmath", "ninja", "pooch", "pytest", "pytest-cov", "pytest-timeout", "pytest-xdist", "scikit-umfpack", "threadpoolctl"] - -[[package]] -name = "send2trash" -version = "1.8.3" -description = "Send file to trash natively under Mac OS X, Windows and Linux" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" -files = [ - {file = "Send2Trash-1.8.3-py3-none-any.whl", hash = "sha256:0c31227e0bd08961c7665474a3d1ef7193929fedda4233843689baa056be46c9"}, - {file = "Send2Trash-1.8.3.tar.gz", hash = "sha256:b18e7a3966d99871aefeb00cfbcfdced55ce4871194810fc71f4aa484b953abf"}, -] - -[package.extras] -nativelib = ["pyobjc-framework-Cocoa", "pywin32"] -objc = ["pyobjc-framework-Cocoa"] -win32 = ["pywin32"] - -[[package]] -name = "setuptools" -version = "66.1.1" -description = "Easily download, build, install, upgrade, and uninstall Python packages" -optional = false -python-versions = ">=3.7" -files = [ - {file = "setuptools-66.1.1-py3-none-any.whl", hash = "sha256:6f590d76b713d5de4e49fe4fbca24474469f53c83632d5d0fd056f7ff7e8112b"}, - {file = "setuptools-66.1.1.tar.gz", hash = "sha256:ac4008d396bc9cd983ea483cb7139c0240a07bbc74ffb6232fceffedc6cf03a8"}, -] - -[package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8 (<5)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] - -[[package]] -name = "six" -version = "1.16.0" -description = "Python 2 and 3 compatibility utilities" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" -files = [ - {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, - {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, -] - -[[package]] -name = "sniffio" -version = "1.3.1" -description = "Sniff out which async library your code is running under" -optional = false -python-versions = ">=3.7" -files = [ - {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, - {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, -] - -[[package]] -name = "snowballstemmer" -version = "2.2.0" -description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." -optional = false -python-versions = "*" -files = [ - {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"}, - {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, -] - -[[package]] -name = "soupsieve" -version = "2.6" -description = "A modern CSS selector implementation for Beautiful Soup." -optional = false -python-versions = ">=3.8" -files = [ - {file = "soupsieve-2.6-py3-none-any.whl", hash = "sha256:e72c4ff06e4fb6e4b5a9f0f55fe6e81514581fca1515028625d0f299c602ccc9"}, - {file = "soupsieve-2.6.tar.gz", hash = "sha256:e2e68417777af359ec65daac1057404a3c8a5455bb8abc36f1a9866ab1a51abb"}, -] - -[[package]] -name = "sphinx" -version = "8.0.2" -description = "Python documentation generator" -optional = false -python-versions = ">=3.10" -files = [ - {file = "sphinx-8.0.2-py3-none-any.whl", hash = "sha256:56173572ae6c1b9a38911786e206a110c9749116745873feae4f9ce88e59391d"}, - {file = "sphinx-8.0.2.tar.gz", hash = "sha256:0cce1ddcc4fd3532cf1dd283bc7d886758362c5c1de6598696579ce96d8ffa5b"}, -] - -[package.dependencies] -alabaster = ">=0.7.14" -babel = ">=2.13" -colorama = {version = ">=0.4.6", markers = "sys_platform == \"win32\""} -docutils = ">=0.20,<0.22" -imagesize = ">=1.3" -Jinja2 = ">=3.1" -packaging = ">=23.0" -Pygments = ">=2.17" -requests = ">=2.30.0" -snowballstemmer = ">=2.2" -sphinxcontrib-applehelp = "*" -sphinxcontrib-devhelp = "*" -sphinxcontrib-htmlhelp = ">=2.0.0" -sphinxcontrib-jsmath = "*" -sphinxcontrib-qthelp = "*" -sphinxcontrib-serializinghtml = ">=1.1.9" -tomli = {version = ">=2", markers = "python_version < \"3.11\""} - -[package.extras] -docs = ["sphinxcontrib-websupport"] -lint = ["flake8 (>=6.0)", "mypy (==1.11.0)", "pytest (>=6.0)", "ruff (==0.5.5)", "sphinx-lint (>=0.9)", "tomli (>=2)", "types-Pillow (==10.2.0.20240520)", "types-Pygments (==2.18.0.20240506)", "types-colorama (==0.4.15.20240311)", "types-defusedxml (==0.7.0.20240218)", "types-docutils (==0.21.0.20240724)", "types-requests (>=2.30.0)"] -test = ["cython (>=3.0)", "defusedxml (>=0.7.1)", "pytest (>=8.0)", "setuptools (>=70.0)", "typing_extensions (>=4.9)"] - -[[package]] -name = "sphinx-click" -version = "4.4.0" -description = "Sphinx extension that automatically documents click applications" -optional = false -python-versions = ">=3.7" -files = [ - {file = "sphinx-click-4.4.0.tar.gz", hash = "sha256:cc67692bd28f482c7f01531c61b64e9d2f069bfcf3d24cbbb51d4a84a749fa48"}, - {file = "sphinx_click-4.4.0-py3-none-any.whl", hash = "sha256:2821c10a68fc9ee6ce7c92fad26540d8d8c8f45e6d7258f0e4fb7529ae8fab49"}, -] - -[package.dependencies] -click = ">=7.0" -docutils = "*" -sphinx = ">=2.0" - -[[package]] -name = "sphinxcontrib-applehelp" -version = "2.0.0" -description = "sphinxcontrib-applehelp is a Sphinx extension which outputs Apple help books" -optional = false -python-versions = ">=3.9" -files = [ - {file = "sphinxcontrib_applehelp-2.0.0-py3-none-any.whl", hash = "sha256:4cd3f0ec4ac5dd9c17ec65e9ab272c9b867ea77425228e68ecf08d6b28ddbdb5"}, - {file = "sphinxcontrib_applehelp-2.0.0.tar.gz", hash = "sha256:2f29ef331735ce958efa4734873f084941970894c6090408b079c61b2e1c06d1"}, -] - -[package.extras] -lint = ["mypy", "ruff (==0.5.5)", "types-docutils"] -standalone = ["Sphinx (>=5)"] -test = ["pytest"] - -[[package]] -name = "sphinxcontrib-devhelp" -version = "2.0.0" -description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp documents" -optional = false -python-versions = ">=3.9" -files = [ - {file = "sphinxcontrib_devhelp-2.0.0-py3-none-any.whl", hash = "sha256:aefb8b83854e4b0998877524d1029fd3e6879210422ee3780459e28a1f03a8a2"}, - {file = "sphinxcontrib_devhelp-2.0.0.tar.gz", hash = "sha256:411f5d96d445d1d73bb5d52133377b4248ec79db5c793ce7dbe59e074b4dd1ad"}, -] - -[package.extras] -lint = ["mypy", "ruff (==0.5.5)", "types-docutils"] -standalone = ["Sphinx (>=5)"] -test = ["pytest"] - -[[package]] -name = "sphinxcontrib-htmlhelp" -version = "2.1.0" -description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files" -optional = false -python-versions = ">=3.9" -files = [ - {file = "sphinxcontrib_htmlhelp-2.1.0-py3-none-any.whl", hash = "sha256:166759820b47002d22914d64a075ce08f4c46818e17cfc9470a9786b759b19f8"}, - {file = "sphinxcontrib_htmlhelp-2.1.0.tar.gz", hash = "sha256:c9e2916ace8aad64cc13a0d233ee22317f2b9025b9cf3295249fa985cc7082e9"}, -] - -[package.extras] -lint = ["mypy", "ruff (==0.5.5)", "types-docutils"] -standalone = ["Sphinx (>=5)"] -test = ["html5lib", "pytest"] - -[[package]] -name = "sphinxcontrib-jsmath" -version = "1.0.1" -description = "A sphinx extension which renders display math in HTML via JavaScript" -optional = false -python-versions = ">=3.5" -files = [ - {file = "sphinxcontrib-jsmath-1.0.1.tar.gz", hash = "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8"}, - {file = "sphinxcontrib_jsmath-1.0.1-py2.py3-none-any.whl", hash = "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178"}, -] - -[package.extras] -test = ["flake8", "mypy", "pytest"] - -[[package]] -name = "sphinxcontrib-qthelp" -version = "2.0.0" -description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp documents" -optional = false -python-versions = ">=3.9" -files = [ - {file = "sphinxcontrib_qthelp-2.0.0-py3-none-any.whl", hash = "sha256:b18a828cdba941ccd6ee8445dbe72ffa3ef8cbe7505d8cd1fa0d42d3f2d5f3eb"}, - {file = "sphinxcontrib_qthelp-2.0.0.tar.gz", hash = "sha256:4fe7d0ac8fc171045be623aba3e2a8f613f8682731f9153bb2e40ece16b9bbab"}, -] - -[package.extras] -lint = ["mypy", "ruff (==0.5.5)", "types-docutils"] -standalone = ["Sphinx (>=5)"] -test = ["defusedxml (>=0.7.1)", "pytest"] - -[[package]] -name = "sphinxcontrib-serializinghtml" -version = "2.0.0" -description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)" -optional = false -python-versions = ">=3.9" -files = [ - {file = "sphinxcontrib_serializinghtml-2.0.0-py3-none-any.whl", hash = "sha256:6e2cb0eef194e10c27ec0023bfeb25badbbb5868244cf5bc5bdc04e4464bf331"}, - {file = "sphinxcontrib_serializinghtml-2.0.0.tar.gz", hash = "sha256:e9d912827f872c029017a53f0ef2180b327c3f7fd23c87229f7a8e8b70031d4d"}, -] - -[package.extras] -lint = ["mypy", "ruff (==0.5.5)", "types-docutils"] -standalone = ["Sphinx (>=5)"] -test = ["pytest"] - -[[package]] -name = "sqlalchemy" -version = "2.0.34" -description = "Database Abstraction Library" -optional = false -python-versions = ">=3.7" -files = [ - {file = "SQLAlchemy-2.0.34-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:95d0b2cf8791ab5fb9e3aa3d9a79a0d5d51f55b6357eecf532a120ba3b5524db"}, - {file = "SQLAlchemy-2.0.34-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:243f92596f4fd4c8bd30ab8e8dd5965afe226363d75cab2468f2c707f64cd83b"}, - {file = "SQLAlchemy-2.0.34-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9ea54f7300553af0a2a7235e9b85f4204e1fc21848f917a3213b0e0818de9a24"}, - {file = "SQLAlchemy-2.0.34-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:173f5f122d2e1bff8fbd9f7811b7942bead1f5e9f371cdf9e670b327e6703ebd"}, - {file = "SQLAlchemy-2.0.34-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:196958cde924a00488e3e83ff917be3b73cd4ed8352bbc0f2989333176d1c54d"}, - {file = "SQLAlchemy-2.0.34-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:bd90c221ed4e60ac9d476db967f436cfcecbd4ef744537c0f2d5291439848768"}, - {file = "SQLAlchemy-2.0.34-cp310-cp310-win32.whl", hash = "sha256:3166dfff2d16fe9be3241ee60ece6fcb01cf8e74dd7c5e0b64f8e19fab44911b"}, - {file = "SQLAlchemy-2.0.34-cp310-cp310-win_amd64.whl", hash = "sha256:6831a78bbd3c40f909b3e5233f87341f12d0b34a58f14115c9e94b4cdaf726d3"}, - {file = "SQLAlchemy-2.0.34-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c7db3db284a0edaebe87f8f6642c2b2c27ed85c3e70064b84d1c9e4ec06d5d84"}, - {file = "SQLAlchemy-2.0.34-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:430093fce0efc7941d911d34f75a70084f12f6ca5c15d19595c18753edb7c33b"}, - {file = "SQLAlchemy-2.0.34-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79cb400c360c7c210097b147c16a9e4c14688a6402445ac848f296ade6283bbc"}, - {file = "SQLAlchemy-2.0.34-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb1b30f31a36c7f3fee848391ff77eebdd3af5750bf95fbf9b8b5323edfdb4ec"}, - {file = "SQLAlchemy-2.0.34-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8fddde2368e777ea2a4891a3fb4341e910a056be0bb15303bf1b92f073b80c02"}, - {file = "SQLAlchemy-2.0.34-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:80bd73ea335203b125cf1d8e50fef06be709619eb6ab9e7b891ea34b5baa2287"}, - {file = "SQLAlchemy-2.0.34-cp311-cp311-win32.whl", hash = "sha256:6daeb8382d0df526372abd9cb795c992e18eed25ef2c43afe518c73f8cccb721"}, - {file = "SQLAlchemy-2.0.34-cp311-cp311-win_amd64.whl", hash = "sha256:5bc08e75ed11693ecb648b7a0a4ed80da6d10845e44be0c98c03f2f880b68ff4"}, - {file = "SQLAlchemy-2.0.34-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:53e68b091492c8ed2bd0141e00ad3089bcc6bf0e6ec4142ad6505b4afe64163e"}, - {file = "SQLAlchemy-2.0.34-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:bcd18441a49499bf5528deaa9dee1f5c01ca491fc2791b13604e8f972877f812"}, - {file = "SQLAlchemy-2.0.34-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:165bbe0b376541092bf49542bd9827b048357f4623486096fc9aaa6d4e7c59a2"}, - {file = "SQLAlchemy-2.0.34-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c3330415cd387d2b88600e8e26b510d0370db9b7eaf984354a43e19c40df2e2b"}, - {file = "SQLAlchemy-2.0.34-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:97b850f73f8abbffb66ccbab6e55a195a0eb655e5dc74624d15cff4bfb35bd74"}, - {file = "SQLAlchemy-2.0.34-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7cee4c6917857fd6121ed84f56d1dc78eb1d0e87f845ab5a568aba73e78adf83"}, - {file = "SQLAlchemy-2.0.34-cp312-cp312-win32.whl", hash = "sha256:fbb034f565ecbe6c530dff948239377ba859420d146d5f62f0271407ffb8c580"}, - {file = "SQLAlchemy-2.0.34-cp312-cp312-win_amd64.whl", hash = "sha256:707c8f44931a4facd4149b52b75b80544a8d824162602b8cd2fe788207307f9a"}, - {file = "SQLAlchemy-2.0.34-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:24af3dc43568f3780b7e1e57c49b41d98b2d940c1fd2e62d65d3928b6f95f021"}, - {file = "SQLAlchemy-2.0.34-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e60ed6ef0a35c6b76b7640fe452d0e47acc832ccbb8475de549a5cc5f90c2c06"}, - {file = "SQLAlchemy-2.0.34-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:413c85cd0177c23e32dee6898c67a5f49296640041d98fddb2c40888fe4daa2e"}, - {file = "SQLAlchemy-2.0.34-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:25691f4adfb9d5e796fd48bf1432272f95f4bbe5f89c475a788f31232ea6afba"}, - {file = "SQLAlchemy-2.0.34-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:526ce723265643dbc4c7efb54f56648cc30e7abe20f387d763364b3ce7506c82"}, - {file = "SQLAlchemy-2.0.34-cp37-cp37m-win32.whl", hash = "sha256:13be2cc683b76977a700948411a94c67ad8faf542fa7da2a4b167f2244781cf3"}, - {file = "SQLAlchemy-2.0.34-cp37-cp37m-win_amd64.whl", hash = "sha256:e54ef33ea80d464c3dcfe881eb00ad5921b60f8115ea1a30d781653edc2fd6a2"}, - {file = "SQLAlchemy-2.0.34-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:43f28005141165edd11fbbf1541c920bd29e167b8bbc1fb410d4fe2269c1667a"}, - {file = "SQLAlchemy-2.0.34-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b68094b165a9e930aedef90725a8fcfafe9ef95370cbb54abc0464062dbf808f"}, - {file = "SQLAlchemy-2.0.34-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a1e03db964e9d32f112bae36f0cc1dcd1988d096cfd75d6a588a3c3def9ab2b"}, - {file = "SQLAlchemy-2.0.34-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:203d46bddeaa7982f9c3cc693e5bc93db476ab5de9d4b4640d5c99ff219bee8c"}, - {file = "SQLAlchemy-2.0.34-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:ae92bebca3b1e6bd203494e5ef919a60fb6dfe4d9a47ed2453211d3bd451b9f5"}, - {file = "SQLAlchemy-2.0.34-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:9661268415f450c95f72f0ac1217cc6f10256f860eed85c2ae32e75b60278ad8"}, - {file = "SQLAlchemy-2.0.34-cp38-cp38-win32.whl", hash = "sha256:895184dfef8708e15f7516bd930bda7e50ead069280d2ce09ba11781b630a434"}, - {file = "SQLAlchemy-2.0.34-cp38-cp38-win_amd64.whl", hash = "sha256:6e7cde3a2221aa89247944cafb1b26616380e30c63e37ed19ff0bba5e968688d"}, - {file = "SQLAlchemy-2.0.34-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:dbcdf987f3aceef9763b6d7b1fd3e4ee210ddd26cac421d78b3c206d07b2700b"}, - {file = "SQLAlchemy-2.0.34-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ce119fc4ce0d64124d37f66a6f2a584fddc3c5001755f8a49f1ca0a177ef9796"}, - {file = "SQLAlchemy-2.0.34-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a17d8fac6df9835d8e2b4c5523666e7051d0897a93756518a1fe101c7f47f2f0"}, - {file = "SQLAlchemy-2.0.34-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ebc11c54c6ecdd07bb4efbfa1554538982f5432dfb8456958b6d46b9f834bb7"}, - {file = "SQLAlchemy-2.0.34-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2e6965346fc1491a566e019a4a1d3dfc081ce7ac1a736536367ca305da6472a8"}, - {file = "SQLAlchemy-2.0.34-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:220574e78ad986aea8e81ac68821e47ea9202b7e44f251b7ed8c66d9ae3f4278"}, - {file = "SQLAlchemy-2.0.34-cp39-cp39-win32.whl", hash = "sha256:b75b00083e7fe6621ce13cfce9d4469c4774e55e8e9d38c305b37f13cf1e874c"}, - {file = "SQLAlchemy-2.0.34-cp39-cp39-win_amd64.whl", hash = "sha256:c29d03e0adf3cc1a8c3ec62d176824972ae29b67a66cbb18daff3062acc6faa8"}, - {file = "SQLAlchemy-2.0.34-py3-none-any.whl", hash = "sha256:7286c353ee6475613d8beff83167374006c6b3e3f0e6491bfe8ca610eb1dec0f"}, - {file = "sqlalchemy-2.0.34.tar.gz", hash = "sha256:10d8f36990dd929690666679b0f42235c159a7051534adb135728ee52828dd22"}, -] - -[package.dependencies] -greenlet = {version = "!=0.4.17", markers = "python_version < \"3.13\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\")"} -typing-extensions = ">=4.6.0" - -[package.extras] -aiomysql = ["aiomysql (>=0.2.0)", "greenlet (!=0.4.17)"] -aioodbc = ["aioodbc", "greenlet (!=0.4.17)"] -aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing_extensions (!=3.10.0.1)"] -asyncio = ["greenlet (!=0.4.17)"] -asyncmy = ["asyncmy (>=0.2.3,!=0.2.4,!=0.2.6)", "greenlet (!=0.4.17)"] -mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2,!=1.1.5)"] -mssql = ["pyodbc"] -mssql-pymssql = ["pymssql"] -mssql-pyodbc = ["pyodbc"] -mypy = ["mypy (>=0.910)"] -mysql = ["mysqlclient (>=1.4.0)"] -mysql-connector = ["mysql-connector-python"] -oracle = ["cx_oracle (>=8)"] -oracle-oracledb = ["oracledb (>=1.0.1)"] -postgresql = ["psycopg2 (>=2.7)"] -postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"] -postgresql-pg8000 = ["pg8000 (>=1.29.1)"] -postgresql-psycopg = ["psycopg (>=3.0.7)"] -postgresql-psycopg2binary = ["psycopg2-binary"] -postgresql-psycopg2cffi = ["psycopg2cffi"] -postgresql-psycopgbinary = ["psycopg[binary] (>=3.0.7)"] -pymysql = ["pymysql"] -sqlcipher = ["sqlcipher3_binary"] - -[[package]] -name = "sqlalchemy-utils" -version = "0.41.2" -description = "Various utility functions for SQLAlchemy." -optional = false -python-versions = ">=3.7" -files = [ - {file = "SQLAlchemy-Utils-0.41.2.tar.gz", hash = "sha256:bc599c8c3b3319e53ce6c5c3c471120bd325d0071fb6f38a10e924e3d07b9990"}, - {file = "SQLAlchemy_Utils-0.41.2-py3-none-any.whl", hash = "sha256:85cf3842da2bf060760f955f8467b87983fb2e30f1764fd0e24a48307dc8ec6e"}, -] - -[package.dependencies] -SQLAlchemy = ">=1.3" - -[package.extras] -arrow = ["arrow (>=0.3.4)"] -babel = ["Babel (>=1.3)"] -color = ["colour (>=0.0.4)"] -encrypted = ["cryptography (>=0.6)"] -intervals = ["intervals (>=0.7.1)"] -password = ["passlib (>=1.6,<2.0)"] -pendulum = ["pendulum (>=2.0.5)"] -phone = ["phonenumbers (>=5.9.2)"] -test = ["Jinja2 (>=2.3)", "Pygments (>=1.2)", "backports.zoneinfo", "docutils (>=0.10)", "flake8 (>=2.4.0)", "flexmock (>=0.9.7)", "isort (>=4.2.2)", "pg8000 (>=1.12.4)", "psycopg (>=3.1.8)", "psycopg2 (>=2.5.1)", "psycopg2cffi (>=2.8.1)", "pymysql", "pyodbc", "pytest (==7.4.4)", "python-dateutil (>=2.6)", "pytz (>=2014.2)"] -test-all = ["Babel (>=1.3)", "Jinja2 (>=2.3)", "Pygments (>=1.2)", "arrow (>=0.3.4)", "backports.zoneinfo", "colour (>=0.0.4)", "cryptography (>=0.6)", "docutils (>=0.10)", "flake8 (>=2.4.0)", "flexmock (>=0.9.7)", "furl (>=0.4.1)", "intervals (>=0.7.1)", "isort (>=4.2.2)", "passlib (>=1.6,<2.0)", "pendulum (>=2.0.5)", "pg8000 (>=1.12.4)", "phonenumbers (>=5.9.2)", "psycopg (>=3.1.8)", "psycopg2 (>=2.5.1)", "psycopg2cffi (>=2.8.1)", "pymysql", "pyodbc", "pytest (==7.4.4)", "python-dateutil", "python-dateutil (>=2.6)", "pytz (>=2014.2)"] -timezone = ["python-dateutil"] -url = ["furl (>=0.4.1)"] - -[[package]] -name = "stack-data" -version = "0.6.3" -description = "Extract data from python stack frames and tracebacks for informative displays" -optional = false -python-versions = "*" -files = [ - {file = "stack_data-0.6.3-py3-none-any.whl", hash = "sha256:d5558e0c25a4cb0853cddad3d77da9891a08cb85dd9f9f91b9f8cd66e511e695"}, - {file = "stack_data-0.6.3.tar.gz", hash = "sha256:836a778de4fec4dcd1dcd89ed8abff8a221f58308462e1c4aa2a3cf30148f0b9"}, -] - -[package.dependencies] -asttokens = ">=2.1.0" -executing = ">=1.2.0" -pure-eval = "*" - -[package.extras] -tests = ["cython", "littleutils", "pygments", "pytest", "typeguard"] - -[[package]] -name = "swagger-ui-bundle" -version = "0.0.9" -description = "swagger_ui_bundle - swagger-ui files in a pip package" -optional = false -python-versions = "*" -files = [ - {file = "swagger_ui_bundle-0.0.9-py3-none-any.whl", hash = "sha256:cea116ed81147c345001027325c1ddc9ca78c1ee7319935c3c75d3669279d575"}, - {file = "swagger_ui_bundle-0.0.9.tar.gz", hash = "sha256:b462aa1460261796ab78fd4663961a7f6f347ce01760f1303bbbdf630f11f516"}, -] - -[package.dependencies] -Jinja2 = ">=2.0" - -[[package]] -name = "synapseclient" -version = "4.3.1" -description = "A client for Synapse, a collaborative, open-source research platform that allows teams to share data, track analyses, and collaborate." -optional = false -python-versions = ">=3.8" -files = [ - {file = "synapseclient-4.3.1-py3-none-any.whl", hash = "sha256:515fff80092c4acee010e272ae313533ae31f7cbe0a590f540f98fd10a18177b"}, - {file = "synapseclient-4.3.1.tar.gz", hash = "sha256:9d1c2cd1d6fe4fabb386290c0eed20944ab7e44e6713db40f19cf28babe3be3c"}, -] - -[package.dependencies] -async-lru = ">=2.0.4,<2.1.0" -asyncio-atexit = ">=1.0.1,<1.1.0" -deprecated = ">=1.2.4,<2.0" -httpx = ">=0.27.0,<0.28.0" -nest-asyncio = ">=1.6.0,<1.7.0" -opentelemetry-api = ">=1.21.0,<1.22.0" -opentelemetry-exporter-otlp-proto-http = ">=1.21.0,<1.22.0" -opentelemetry-sdk = ">=1.21.0,<1.22.0" -psutil = ">=5.9.8,<5.10.0" -requests = ">=2.22.0,<3.0" -tqdm = ">=4.66.2,<5.0" -urllib3 = ">=1.26.18,<2" - -[package.extras] -boto3 = ["boto3 (>=1.7.0,<2.0)"] -dev = ["black", "flake8 (>=3.7.0,<4.0)", "func-timeout (>=4.3,<5.0)", "pandas (>=1.5,<3.0)", "pre-commit", "pytest (>=7.0.0,<8.0)", "pytest-asyncio (>=0.23.6,<1.0)", "pytest-cov (>=4.1.0,<4.2.0)", "pytest-mock (>=3.0,<4.0)", "pytest-rerunfailures (>=12.0,<13.0)", "pytest-socket (>=0.6.0,<0.7.0)", "pytest-xdist[psutil] (>=2.2,<3.0.0)"] -docs = ["markdown-include (>=0.8.1,<0.9.0)", "mkdocs (>=1.5.3)", "mkdocs-material (>=9.4.14)", "mkdocs-open-in-new-tab (>=1.0.3,<1.1.0)", "mkdocstrings (>=0.24.0)", "mkdocstrings-python (>=1.7.5)", "termynal (>=0.11.1)"] -pandas = ["pandas (>=1.5,<3.0)"] -pysftp = ["pysftp (>=0.2.8,<0.3)"] -tests = ["flake8 (>=3.7.0,<4.0)", "func-timeout (>=4.3,<5.0)", "pandas (>=1.5,<3.0)", "pytest (>=7.0.0,<8.0)", "pytest-asyncio (>=0.23.6,<1.0)", "pytest-cov (>=4.1.0,<4.2.0)", "pytest-mock (>=3.0,<4.0)", "pytest-rerunfailures (>=12.0,<13.0)", "pytest-socket (>=0.6.0,<0.7.0)", "pytest-xdist[psutil] (>=2.2,<3.0.0)"] - -[[package]] -name = "tabulate" -version = "0.9.0" -description = "Pretty-print tabular data" -optional = false -python-versions = ">=3.7" -files = [ - {file = "tabulate-0.9.0-py3-none-any.whl", hash = "sha256:024ca478df22e9340661486f85298cff5f6dcdba14f3813e8830015b9ed1948f"}, - {file = "tabulate-0.9.0.tar.gz", hash = "sha256:0095b12bf5966de529c0feb1fa08671671b3368eec77d7ef7ab114be2c068b3c"}, -] - -[package.extras] -widechars = ["wcwidth"] - -[[package]] -name = "tenacity" -version = "8.5.0" -description = "Retry code until it succeeds" -optional = false -python-versions = ">=3.8" -files = [ - {file = "tenacity-8.5.0-py3-none-any.whl", hash = "sha256:b594c2a5945830c267ce6b79a166228323ed52718f30302c1359836112346687"}, - {file = "tenacity-8.5.0.tar.gz", hash = "sha256:8bc6c0c8a09b31e6cad13c47afbed1a567518250a9a171418582ed8d9c20ca78"}, -] - -[package.extras] -doc = ["reno", "sphinx"] -test = ["pytest", "tornado (>=4.5)", "typeguard"] - -[[package]] -name = "terminado" -version = "0.18.1" -description = "Tornado websocket backend for the Xterm.js Javascript terminal emulator library." -optional = false -python-versions = ">=3.8" -files = [ - {file = "terminado-0.18.1-py3-none-any.whl", hash = "sha256:a4468e1b37bb318f8a86514f65814e1afc977cf29b3992a4500d9dd305dcceb0"}, - {file = "terminado-0.18.1.tar.gz", hash = "sha256:de09f2c4b85de4765f7714688fff57d3e75bad1f909b589fde880460c753fd2e"}, -] - -[package.dependencies] -ptyprocess = {version = "*", markers = "os_name != \"nt\""} -pywinpty = {version = ">=1.1.0", markers = "os_name == \"nt\""} -tornado = ">=6.1.0" - -[package.extras] -docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"] -test = ["pre-commit", "pytest (>=7.0)", "pytest-timeout"] -typing = ["mypy (>=1.6,<2.0)", "traitlets (>=5.11.1)"] - -[[package]] -name = "tinycss2" -version = "1.3.0" -description = "A tiny CSS parser" -optional = false -python-versions = ">=3.8" -files = [ - {file = "tinycss2-1.3.0-py3-none-any.whl", hash = "sha256:54a8dbdffb334d536851be0226030e9505965bb2f30f21a4a82c55fb2a80fae7"}, - {file = "tinycss2-1.3.0.tar.gz", hash = "sha256:152f9acabd296a8375fbca5b84c961ff95971fcfc32e79550c8df8e29118c54d"}, -] - -[package.dependencies] -webencodings = ">=0.4" - -[package.extras] -doc = ["sphinx", "sphinx_rtd_theme"] -test = ["pytest", "ruff"] - -[[package]] -name = "toml" -version = "0.10.2" -description = "Python Library for Tom's Obvious, Minimal Language" -optional = false -python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" -files = [ - {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, - {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, -] - -[[package]] -name = "tomli" -version = "2.0.1" -description = "A lil' TOML parser" -optional = false -python-versions = ">=3.7" -files = [ - {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, - {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, -] - -[[package]] -name = "tomlkit" -version = "0.13.2" -description = "Style preserving TOML library" -optional = false -python-versions = ">=3.8" -files = [ - {file = "tomlkit-0.13.2-py3-none-any.whl", hash = "sha256:7a974427f6e119197f670fbbbeae7bef749a6c14e793db934baefc1b5f03efde"}, - {file = "tomlkit-0.13.2.tar.gz", hash = "sha256:fff5fe59a87295b278abd31bec92c15d9bc4a06885ab12bcea52c71119392e79"}, -] - -[[package]] -name = "toolz" -version = "0.12.1" -description = "List processing tools and functional utilities" -optional = false -python-versions = ">=3.7" -files = [ - {file = "toolz-0.12.1-py3-none-any.whl", hash = "sha256:d22731364c07d72eea0a0ad45bafb2c2937ab6fd38a3507bf55eae8744aa7d85"}, - {file = "toolz-0.12.1.tar.gz", hash = "sha256:ecca342664893f177a13dac0e6b41cbd8ac25a358e5f215316d43e2100224f4d"}, -] - -[[package]] -name = "tornado" -version = "6.4.1" -description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed." -optional = false -python-versions = ">=3.8" -files = [ - {file = "tornado-6.4.1-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:163b0aafc8e23d8cdc3c9dfb24c5368af84a81e3364745ccb4427669bf84aec8"}, - {file = "tornado-6.4.1-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:6d5ce3437e18a2b66fbadb183c1d3364fb03f2be71299e7d10dbeeb69f4b2a14"}, - {file = "tornado-6.4.1-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e2e20b9113cd7293f164dc46fffb13535266e713cdb87bd2d15ddb336e96cfc4"}, - {file = "tornado-6.4.1-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8ae50a504a740365267b2a8d1a90c9fbc86b780a39170feca9bcc1787ff80842"}, - {file = "tornado-6.4.1-cp38-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:613bf4ddf5c7a95509218b149b555621497a6cc0d46ac341b30bd9ec19eac7f3"}, - {file = "tornado-6.4.1-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:25486eb223babe3eed4b8aecbac33b37e3dd6d776bc730ca14e1bf93888b979f"}, - {file = "tornado-6.4.1-cp38-abi3-musllinux_1_2_i686.whl", hash = "sha256:454db8a7ecfcf2ff6042dde58404164d969b6f5d58b926da15e6b23817950fc4"}, - {file = "tornado-6.4.1-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a02a08cc7a9314b006f653ce40483b9b3c12cda222d6a46d4ac63bb6c9057698"}, - {file = "tornado-6.4.1-cp38-abi3-win32.whl", hash = "sha256:d9a566c40b89757c9aa8e6f032bcdb8ca8795d7c1a9762910c722b1635c9de4d"}, - {file = "tornado-6.4.1-cp38-abi3-win_amd64.whl", hash = "sha256:b24b8982ed444378d7f21d563f4180a2de31ced9d8d84443907a0a64da2072e7"}, - {file = "tornado-6.4.1.tar.gz", hash = "sha256:92d3ab53183d8c50f8204a51e6f91d18a15d5ef261e84d452800d4ff6fc504e9"}, -] - -[[package]] -name = "tox" -version = "4.0.18" -description = "tox is a generic virtualenv management and test command line tool" -optional = false -python-versions = ">=3.7" -files = [ - {file = "tox-4.0.18-py3-none-any.whl", hash = "sha256:2e3fa6d1b5cc09a971848e36d33bea64120dba9009cad5bff1fa8f6a5f367373"}, - {file = "tox-4.0.18.tar.gz", hash = "sha256:e862abf5b75aa2d84e087a2f9696b97faf01dff8836515216f0453e951745eee"}, -] - -[package.dependencies] -cachetools = ">=5.2" -chardet = ">=5.1" -colorama = ">=0.4.6" -filelock = ">=3.8.2" -packaging = ">=22" -platformdirs = ">=2.6" -pluggy = ">=1" -pyproject-api = ">=1.2.1" -tomli = {version = ">=2.0.1", markers = "python_version < \"3.11\""} -virtualenv = ">=20.17.1" - -[package.extras] -docs = ["furo (>=2022.12.7)", "sphinx (>=5.3)", "sphinx-argparse-cli (>=1.10)", "sphinx-autodoc-typehints (>=1.19.5)", "sphinx-copybutton (>=0.5.1)", "sphinx-inline-tabs (>=2022.1.2b11)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=22.12)"] -testing = ["build[virtualenv] (>=0.9)", "covdefaults (>=2.2.2)", "devpi-process (>=0.3)", "diff-cover (>=7.3)", "distlib (>=0.3.6)", "flaky (>=3.7)", "hatch-vcs (>=0.3)", "hatchling (>=1.11.1)", "psutil (>=5.9.4)", "pytest (>=7.2)", "pytest-cov (>=4)", "pytest-mock (>=3.10)", "pytest-xdist (>=3.1)", "re-assert (>=1.1)", "time-machine (>=2.8.2)"] - -[[package]] -name = "tqdm" -version = "4.66.5" -description = "Fast, Extensible Progress Meter" -optional = false -python-versions = ">=3.7" -files = [ - {file = "tqdm-4.66.5-py3-none-any.whl", hash = "sha256:90279a3770753eafc9194a0364852159802111925aa30eb3f9d85b0e805ac7cd"}, - {file = "tqdm-4.66.5.tar.gz", hash = "sha256:e1020aef2e5096702d8a025ac7d16b1577279c9d63f8375b63083e9a5f0fcbad"}, -] - -[package.dependencies] -colorama = {version = "*", markers = "platform_system == \"Windows\""} - -[package.extras] -dev = ["pytest (>=6)", "pytest-cov", "pytest-timeout", "pytest-xdist"] -notebook = ["ipywidgets (>=6)"] -slack = ["slack-sdk"] -telegram = ["requests"] - -[[package]] -name = "traitlets" -version = "5.14.3" -description = "Traitlets Python configuration system" -optional = false -python-versions = ">=3.8" -files = [ - {file = "traitlets-5.14.3-py3-none-any.whl", hash = "sha256:b74e89e397b1ed28cc831db7aea759ba6640cb3de13090ca145426688ff1ac4f"}, - {file = "traitlets-5.14.3.tar.gz", hash = "sha256:9ed0579d3502c94b4b3732ac120375cda96f923114522847de4b3bb98b96b6b7"}, -] - -[package.extras] -docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"] -test = ["argcomplete (>=3.0.3)", "mypy (>=1.7.0)", "pre-commit", "pytest (>=7.0,<8.2)", "pytest-mock", "pytest-mypy-testing"] - -[[package]] -name = "types-python-dateutil" -version = "2.9.0.20240906" -description = "Typing stubs for python-dateutil" -optional = false -python-versions = ">=3.8" -files = [ - {file = "types-python-dateutil-2.9.0.20240906.tar.gz", hash = "sha256:9706c3b68284c25adffc47319ecc7947e5bb86b3773f843c73906fd598bc176e"}, - {file = "types_python_dateutil-2.9.0.20240906-py3-none-any.whl", hash = "sha256:27c8cc2d058ccb14946eebcaaa503088f4f6dbc4fb6093d3d456a49aef2753f6"}, -] - -[[package]] -name = "typing-extensions" -version = "4.12.2" -description = "Backported and Experimental Type Hints for Python 3.8+" -optional = false -python-versions = ">=3.8" -files = [ - {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, - {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, -] - -[[package]] -name = "typing-inspect" -version = "0.9.0" -description = "Runtime inspection utilities for typing module." -optional = false -python-versions = "*" -files = [ - {file = "typing_inspect-0.9.0-py3-none-any.whl", hash = "sha256:9ee6fc59062311ef8547596ab6b955e1b8aa46242d854bfc78f4f6b0eff35f9f"}, - {file = "typing_inspect-0.9.0.tar.gz", hash = "sha256:b23fc42ff6f6ef6954e4852c1fb512cdd18dbea03134f91f856a95ccc9461f78"}, -] - -[package.dependencies] -mypy-extensions = ">=0.3.0" -typing-extensions = ">=3.7.4" - -[[package]] -name = "tzdata" -version = "2024.1" -description = "Provider of IANA time zone data" -optional = false -python-versions = ">=2" -files = [ - {file = "tzdata-2024.1-py2.py3-none-any.whl", hash = "sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252"}, - {file = "tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd"}, -] - -[[package]] -name = "tzlocal" -version = "5.2" -description = "tzinfo object for the local timezone" -optional = false -python-versions = ">=3.8" -files = [ - {file = "tzlocal-5.2-py3-none-any.whl", hash = "sha256:49816ef2fe65ea8ac19d19aa7a1ae0551c834303d5014c6d5a62e4cbda8047b8"}, - {file = "tzlocal-5.2.tar.gz", hash = "sha256:8d399205578f1a9342816409cc1e46a93ebd5755e39ea2d85334bea911bf0e6e"}, -] - -[package.dependencies] -tzdata = {version = "*", markers = "platform_system == \"Windows\""} - -[package.extras] -devenv = ["check-manifest", "pytest (>=4.3)", "pytest-cov", "pytest-mock (>=3.3)", "zest.releaser"] - -[[package]] -name = "uri-template" -version = "1.3.0" -description = "RFC 6570 URI Template Processor" -optional = false -python-versions = ">=3.7" -files = [ - {file = "uri-template-1.3.0.tar.gz", hash = "sha256:0e00f8eb65e18c7de20d595a14336e9f337ead580c70934141624b6d1ffdacc7"}, - {file = "uri_template-1.3.0-py3-none-any.whl", hash = "sha256:a44a133ea12d44a0c0f06d7d42a52d71282e77e2f937d8abd5655b8d56fc1363"}, -] - -[package.extras] -dev = ["flake8", "flake8-annotations", "flake8-bandit", "flake8-bugbear", "flake8-commas", "flake8-comprehensions", "flake8-continuation", "flake8-datetimez", "flake8-docstrings", "flake8-import-order", "flake8-literal", "flake8-modern-annotations", "flake8-noqa", "flake8-pyproject", "flake8-requirements", "flake8-typechecking-import", "flake8-use-fstring", "mypy", "pep8-naming", "types-PyYAML"] - -[[package]] -name = "uritemplate" -version = "4.1.1" -description = "Implementation of RFC 6570 URI Templates" -optional = false -python-versions = ">=3.6" -files = [ - {file = "uritemplate-4.1.1-py2.py3-none-any.whl", hash = "sha256:830c08b8d99bdd312ea4ead05994a38e8936266f84b9a7878232db50b044e02e"}, - {file = "uritemplate-4.1.1.tar.gz", hash = "sha256:4346edfc5c3b79f694bccd6d6099a322bbeb628dbf2cd86eea55a456ce5124f0"}, -] - -[[package]] -name = "urllib3" -version = "1.26.20" -description = "HTTP library with thread-safe connection pooling, file post, and more." -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" -files = [ - {file = "urllib3-1.26.20-py2.py3-none-any.whl", hash = "sha256:0ed14ccfbf1c30a9072c7ca157e4319b70d65f623e91e7b32fadb2853431016e"}, - {file = "urllib3-1.26.20.tar.gz", hash = "sha256:40c2dc0c681e47eb8f90e7e27bf6ff7df2e677421fd46756da1161c39ca70d32"}, -] - -[package.extras] -brotli = ["brotli (==1.0.9)", "brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] -secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] -socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] - -[[package]] -name = "uwsgi" -version = "2.0.26" -description = "The uWSGI server" -optional = false -python-versions = "*" -files = [ - {file = "uwsgi-2.0.26.tar.gz", hash = "sha256:86e6bfcd4dc20529665f5b7777193cdc48622fb2c59f0a7f1e3dc32b3882e7f9"}, -] - -[[package]] -name = "validators" -version = "0.20.0" -description = "Python Data Validation for Humans™." -optional = false -python-versions = ">=3.4" -files = [ - {file = "validators-0.20.0.tar.gz", hash = "sha256:24148ce4e64100a2d5e267233e23e7afeb55316b47d30faae7eb6e7292bc226a"}, -] - -[package.dependencies] -decorator = ">=3.4.0" - -[package.extras] -test = ["flake8 (>=2.4.0)", "isort (>=4.2.2)", "pytest (>=2.2.3)"] - -[[package]] -name = "virtualenv" -version = "20.26.4" -description = "Virtual Python Environment builder" -optional = false -python-versions = ">=3.7" -files = [ - {file = "virtualenv-20.26.4-py3-none-any.whl", hash = "sha256:48f2695d9809277003f30776d155615ffc11328e6a0a8c1f0ec80188d7874a55"}, - {file = "virtualenv-20.26.4.tar.gz", hash = "sha256:c17f4e0f3e6036e9f26700446f85c76ab11df65ff6d8a9cbfad9f71aabfcf23c"}, -] - -[package.dependencies] -distlib = ">=0.3.7,<1" -filelock = ">=3.12.2,<4" -platformdirs = ">=3.9.1,<5" - -[package.extras] -docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2,!=7.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] -test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"] - -[[package]] -name = "wcwidth" -version = "0.2.13" -description = "Measures the displayed width of unicode strings in a terminal" -optional = false -python-versions = "*" -files = [ - {file = "wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859"}, - {file = "wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5"}, -] - -[[package]] -name = "webcolors" -version = "24.8.0" -description = "A library for working with the color formats defined by HTML and CSS." -optional = false -python-versions = ">=3.8" -files = [ - {file = "webcolors-24.8.0-py3-none-any.whl", hash = "sha256:fc4c3b59358ada164552084a8ebee637c221e4059267d0f8325b3b560f6c7f0a"}, - {file = "webcolors-24.8.0.tar.gz", hash = "sha256:08b07af286a01bcd30d583a7acadf629583d1f79bfef27dd2c2c5c263817277d"}, -] - -[package.extras] -docs = ["furo", "sphinx", "sphinx-copybutton", "sphinx-inline-tabs", "sphinx-notfound-page", "sphinxext-opengraph"] -tests = ["coverage[toml]"] - -[[package]] -name = "webencodings" -version = "0.5.1" -description = "Character encoding aliases for legacy web content" -optional = false -python-versions = "*" -files = [ - {file = "webencodings-0.5.1-py2.py3-none-any.whl", hash = "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78"}, - {file = "webencodings-0.5.1.tar.gz", hash = "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923"}, -] - -[[package]] -name = "websocket-client" -version = "1.8.0" -description = "WebSocket client for Python with low level API options" -optional = false -python-versions = ">=3.8" -files = [ - {file = "websocket_client-1.8.0-py3-none-any.whl", hash = "sha256:17b44cc997f5c498e809b22cdf2d9c7a9e71c02c8cc2b6c56e7c2d1239bfa526"}, - {file = "websocket_client-1.8.0.tar.gz", hash = "sha256:3239df9f44da632f96012472805d40a23281a991027ce11d2f45a6f24ac4c3da"}, -] - -[package.extras] -docs = ["Sphinx (>=6.0)", "myst-parser (>=2.0.0)", "sphinx-rtd-theme (>=1.1.0)"] -optional = ["python-socks", "wsaccel"] -test = ["websockets"] - -[[package]] -name = "werkzeug" -version = "2.3.8" -description = "The comprehensive WSGI web application library." -optional = false -python-versions = ">=3.8" -files = [ - {file = "werkzeug-2.3.8-py3-none-any.whl", hash = "sha256:bba1f19f8ec89d4d607a3bd62f1904bd2e609472d93cd85e9d4e178f472c3748"}, - {file = "werkzeug-2.3.8.tar.gz", hash = "sha256:554b257c74bbeb7a0d254160a4f8ffe185243f52a52035060b761ca62d977f03"}, -] - -[package.dependencies] -MarkupSafe = ">=2.1.1" - -[package.extras] -watchdog = ["watchdog (>=2.3)"] - -[[package]] -name = "widgetsnbextension" -version = "4.0.13" -description = "Jupyter interactive widgets for Jupyter Notebook" -optional = false -python-versions = ">=3.7" -files = [ - {file = "widgetsnbextension-4.0.13-py3-none-any.whl", hash = "sha256:74b2692e8500525cc38c2b877236ba51d34541e6385eeed5aec15a70f88a6c71"}, - {file = "widgetsnbextension-4.0.13.tar.gz", hash = "sha256:ffcb67bc9febd10234a362795f643927f4e0c05d9342c727b65d2384f8feacb6"}, -] - -[[package]] -name = "wrapt" -version = "1.16.0" -description = "Module for decorators, wrappers and monkey patching." -optional = false -python-versions = ">=3.6" -files = [ - {file = "wrapt-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ffa565331890b90056c01db69c0fe634a776f8019c143a5ae265f9c6bc4bd6d4"}, - {file = "wrapt-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e4fdb9275308292e880dcbeb12546df7f3e0f96c6b41197e0cf37d2826359020"}, - {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb2dee3874a500de01c93d5c71415fcaef1d858370d405824783e7a8ef5db440"}, - {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2a88e6010048489cda82b1326889ec075a8c856c2e6a256072b28eaee3ccf487"}, - {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac83a914ebaf589b69f7d0a1277602ff494e21f4c2f743313414378f8f50a4cf"}, - {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:73aa7d98215d39b8455f103de64391cb79dfcad601701a3aa0dddacf74911d72"}, - {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:807cc8543a477ab7422f1120a217054f958a66ef7314f76dd9e77d3f02cdccd0"}, - {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bf5703fdeb350e36885f2875d853ce13172ae281c56e509f4e6eca049bdfb136"}, - {file = "wrapt-1.16.0-cp310-cp310-win32.whl", hash = "sha256:f6b2d0c6703c988d334f297aa5df18c45e97b0af3679bb75059e0e0bd8b1069d"}, - {file = "wrapt-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:decbfa2f618fa8ed81c95ee18a387ff973143c656ef800c9f24fb7e9c16054e2"}, - {file = "wrapt-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1a5db485fe2de4403f13fafdc231b0dbae5eca4359232d2efc79025527375b09"}, - {file = "wrapt-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:75ea7d0ee2a15733684badb16de6794894ed9c55aa5e9903260922f0482e687d"}, - {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a452f9ca3e3267cd4d0fcf2edd0d035b1934ac2bd7e0e57ac91ad6b95c0c6389"}, - {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:43aa59eadec7890d9958748db829df269f0368521ba6dc68cc172d5d03ed8060"}, - {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72554a23c78a8e7aa02abbd699d129eead8b147a23c56e08d08dfc29cfdddca1"}, - {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d2efee35b4b0a347e0d99d28e884dfd82797852d62fcd7ebdeee26f3ceb72cf3"}, - {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6dcfcffe73710be01d90cae08c3e548d90932d37b39ef83969ae135d36ef3956"}, - {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:eb6e651000a19c96f452c85132811d25e9264d836951022d6e81df2fff38337d"}, - {file = "wrapt-1.16.0-cp311-cp311-win32.whl", hash = "sha256:66027d667efe95cc4fa945af59f92c5a02c6f5bb6012bff9e60542c74c75c362"}, - {file = "wrapt-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:aefbc4cb0a54f91af643660a0a150ce2c090d3652cf4052a5397fb2de549cd89"}, - {file = "wrapt-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5eb404d89131ec9b4f748fa5cfb5346802e5ee8836f57d516576e61f304f3b7b"}, - {file = "wrapt-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9090c9e676d5236a6948330e83cb89969f433b1943a558968f659ead07cb3b36"}, - {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94265b00870aa407bd0cbcfd536f17ecde43b94fb8d228560a1e9d3041462d73"}, - {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2058f813d4f2b5e3a9eb2eb3faf8f1d99b81c3e51aeda4b168406443e8ba809"}, - {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98b5e1f498a8ca1858a1cdbffb023bfd954da4e3fa2c0cb5853d40014557248b"}, - {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:14d7dc606219cdd7405133c713f2c218d4252f2a469003f8c46bb92d5d095d81"}, - {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:49aac49dc4782cb04f58986e81ea0b4768e4ff197b57324dcbd7699c5dfb40b9"}, - {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:418abb18146475c310d7a6dc71143d6f7adec5b004ac9ce08dc7a34e2babdc5c"}, - {file = "wrapt-1.16.0-cp312-cp312-win32.whl", hash = "sha256:685f568fa5e627e93f3b52fda002c7ed2fa1800b50ce51f6ed1d572d8ab3e7fc"}, - {file = "wrapt-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:dcdba5c86e368442528f7060039eda390cc4091bfd1dca41e8046af7c910dda8"}, - {file = "wrapt-1.16.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d462f28826f4657968ae51d2181a074dfe03c200d6131690b7d65d55b0f360f8"}, - {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a33a747400b94b6d6b8a165e4480264a64a78c8a4c734b62136062e9a248dd39"}, - {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3646eefa23daeba62643a58aac816945cadc0afaf21800a1421eeba5f6cfb9c"}, - {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ebf019be5c09d400cf7b024aa52b1f3aeebeff51550d007e92c3c1c4afc2a40"}, - {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:0d2691979e93d06a95a26257adb7bfd0c93818e89b1406f5a28f36e0d8c1e1fc"}, - {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:1acd723ee2a8826f3d53910255643e33673e1d11db84ce5880675954183ec47e"}, - {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:bc57efac2da352a51cc4658878a68d2b1b67dbe9d33c36cb826ca449d80a8465"}, - {file = "wrapt-1.16.0-cp36-cp36m-win32.whl", hash = "sha256:da4813f751142436b075ed7aa012a8778aa43a99f7b36afe9b742d3ed8bdc95e"}, - {file = "wrapt-1.16.0-cp36-cp36m-win_amd64.whl", hash = "sha256:6f6eac2360f2d543cc875a0e5efd413b6cbd483cb3ad7ebf888884a6e0d2e966"}, - {file = "wrapt-1.16.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a0ea261ce52b5952bf669684a251a66df239ec6d441ccb59ec7afa882265d593"}, - {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bd2d7ff69a2cac767fbf7a2b206add2e9a210e57947dd7ce03e25d03d2de292"}, - {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9159485323798c8dc530a224bd3ffcf76659319ccc7bbd52e01e73bd0241a0c5"}, - {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a86373cf37cd7764f2201b76496aba58a52e76dedfaa698ef9e9688bfd9e41cf"}, - {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:73870c364c11f03ed072dda68ff7aea6d2a3a5c3fe250d917a429c7432e15228"}, - {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b935ae30c6e7400022b50f8d359c03ed233d45b725cfdd299462f41ee5ffba6f"}, - {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:db98ad84a55eb09b3c32a96c576476777e87c520a34e2519d3e59c44710c002c"}, - {file = "wrapt-1.16.0-cp37-cp37m-win32.whl", hash = "sha256:9153ed35fc5e4fa3b2fe97bddaa7cbec0ed22412b85bcdaf54aeba92ea37428c"}, - {file = "wrapt-1.16.0-cp37-cp37m-win_amd64.whl", hash = "sha256:66dfbaa7cfa3eb707bbfcd46dab2bc6207b005cbc9caa2199bcbc81d95071a00"}, - {file = "wrapt-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1dd50a2696ff89f57bd8847647a1c363b687d3d796dc30d4dd4a9d1689a706f0"}, - {file = "wrapt-1.16.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:44a2754372e32ab315734c6c73b24351d06e77ffff6ae27d2ecf14cf3d229202"}, - {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e9723528b9f787dc59168369e42ae1c3b0d3fadb2f1a71de14531d321ee05b0"}, - {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbed418ba5c3dce92619656802cc5355cb679e58d0d89b50f116e4a9d5a9603e"}, - {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:941988b89b4fd6b41c3f0bfb20e92bd23746579736b7343283297c4c8cbae68f"}, - {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6a42cd0cfa8ffc1915aef79cb4284f6383d8a3e9dcca70c445dcfdd639d51267"}, - {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ca9b6085e4f866bd584fb135a041bfc32cab916e69f714a7d1d397f8c4891ca"}, - {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d5e49454f19ef621089e204f862388d29e6e8d8b162efce05208913dde5b9ad6"}, - {file = "wrapt-1.16.0-cp38-cp38-win32.whl", hash = "sha256:c31f72b1b6624c9d863fc095da460802f43a7c6868c5dda140f51da24fd47d7b"}, - {file = "wrapt-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:490b0ee15c1a55be9c1bd8609b8cecd60e325f0575fc98f50058eae366e01f41"}, - {file = "wrapt-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9b201ae332c3637a42f02d1045e1d0cccfdc41f1f2f801dafbaa7e9b4797bfc2"}, - {file = "wrapt-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2076fad65c6736184e77d7d4729b63a6d1ae0b70da4868adeec40989858eb3fb"}, - {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5cd603b575ebceca7da5a3a251e69561bec509e0b46e4993e1cac402b7247b8"}, - {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b47cfad9e9bbbed2339081f4e346c93ecd7ab504299403320bf85f7f85c7d46c"}, - {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8212564d49c50eb4565e502814f694e240c55551a5f1bc841d4fcaabb0a9b8a"}, - {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5f15814a33e42b04e3de432e573aa557f9f0f56458745c2074952f564c50e664"}, - {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db2e408d983b0e61e238cf579c09ef7020560441906ca990fe8412153e3b291f"}, - {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:edfad1d29c73f9b863ebe7082ae9321374ccb10879eeabc84ba3b69f2579d537"}, - {file = "wrapt-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed867c42c268f876097248e05b6117a65bcd1e63b779e916fe2e33cd6fd0d3c3"}, - {file = "wrapt-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:eb1b046be06b0fce7249f1d025cd359b4b80fc1c3e24ad9eca33e0dcdb2e4a35"}, - {file = "wrapt-1.16.0-py3-none-any.whl", hash = "sha256:6906c4100a8fcbf2fa735f6059214bb13b97f75b1a61777fcf6432121ef12ef1"}, - {file = "wrapt-1.16.0.tar.gz", hash = "sha256:5f370f952971e7d17c7d1ead40e49f32345a7f7a5373571ef44d800d06b1899d"}, -] - -[[package]] -name = "zipp" -version = "3.20.1" -description = "Backport of pathlib-compatible object wrapper for zip files" -optional = false -python-versions = ">=3.8" -files = [ - {file = "zipp-3.20.1-py3-none-any.whl", hash = "sha256:9960cd8967c8f85a56f920d5d507274e74f9ff813a0ab8889a5b5be2daf44064"}, - {file = "zipp-3.20.1.tar.gz", hash = "sha256:c22b14cc4763c5a5b04134207736c107db42e9d3ef2d9779d465f5f1bcba572b"}, -] - -[package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] -cover = ["pytest-cov"] -doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -enabler = ["pytest-enabler (>=2.2)"] -test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] -type = ["pytest-mypy"] - -[metadata] -lock-version = "2.0" -python-versions = "3.10.14" -content-hash = "284a30cfac84ab6b47c3b7b6139f453de291db8a216134a2f18fc584f85b9a4a" diff --git a/apps/schematic/api/poetry.toml b/apps/schematic/api/poetry.toml deleted file mode 100644 index 62e2dff2a..000000000 --- a/apps/schematic/api/poetry.toml +++ /dev/null @@ -1,3 +0,0 @@ -[virtualenvs] -in-project = true -create = true diff --git a/apps/schematic/api/prepare-python.sh b/apps/schematic/api/prepare-python.sh deleted file mode 100755 index c33d63a2c..000000000 --- a/apps/schematic/api/prepare-python.sh +++ /dev/null @@ -1,13 +0,0 @@ -#!/usr/bin/env bash - -PYTHON_VERSION="3.10.14" - -pyenv install --skip-existing $PYTHON_VERSION - -# Initializing pyenv again solves an issue encountered by GitHub action where the version of Python -# installed above is not detected. -eval "$(pyenv init -)" - -pyenv local $PYTHON_VERSION -poetry env use $PYTHON_VERSION -poetry install --with prod,dev \ No newline at end of file diff --git a/apps/schematic/api/prepare_key_certificate.py b/apps/schematic/api/prepare_key_certificate.py deleted file mode 100644 index 316a4e53c..000000000 --- a/apps/schematic/api/prepare_key_certificate.py +++ /dev/null @@ -1,60 +0,0 @@ -import base64 -import json -import subprocess - - -# Define the paths to your SSL certificate and key -cert_file_path = "private_localhost_certificate.crt" -key_file_path = "private_localhost.key" -env_file_path = ".env" # Path to your .env file - -# Define the OpenSSL command -openssl_command = [ - "openssl", - "req", - "-x509", - "-nodes", - "-days", - "365", - "-subj", - "/C=US/ST=WA/O=SAGE", - "-newkey", - "rsa:2048", - "-keyout", - key_file_path, - "-out", - cert_file_path, -] - -# Run the OpenSSL command -try: - subprocess.run(openssl_command, check=True) - print("SSL certificate and key generated successfully.") -except subprocess.CalledProcessError as e: - print(f"Error generating SSL certificate and key: {e}") - - -# Function to read a file and encode its contents to Base64 -# Certificate has to be in base64 format otherwise can't be parse properly as environment variables -def encode_file_to_base64(file_path): - with open(file_path, "rb") as file: - return base64.b64encode(file.read()).decode("utf-8") - - -# Encode the SSL certificate and key -ssl_certificate_base64 = encode_file_to_base64(cert_file_path) -ssl_private_key_base64 = encode_file_to_base64(key_file_path) - -# Combine into a JSON object -ssl_config_json = json.dumps( - { - "SSL_CERTIFICATE": ssl_certificate_base64, - "SSL_PRIVATE_KEY": ssl_private_key_base64, - } -) - -# Append to .env file -with open(env_file_path, "a") as env_file: - env_file.write(f"SECRETS_MANAGER_SECRETS={ssl_config_json}\n") - -print("SSL certificate and key have been encoded and appended to the .env file.") diff --git a/apps/schematic/api/project.json b/apps/schematic/api/project.json deleted file mode 100644 index 38351a538..000000000 --- a/apps/schematic/api/project.json +++ /dev/null @@ -1,116 +0,0 @@ -{ - "name": "schematic-api", - "$schema": "../../../node_modules/nx/schemas/project-schema.json", - "sourceRoot": "apps/schematic/api/src", - "projectType": "application", - "targets": { - "create-config": { - "executor": "nx:run-commands", - "options": { - "command": "cp -n .env.example .env", - "cwd": "{projectRoot}" - } - }, - "prepare": { - "executor": "nx:run-commands", - "options": { - "command": "./prepare-python.sh", - "cwd": "{projectRoot}" - } - }, - "serve": { - "executor": "nx:run-commands", - "options": { - "command": "poetry run python -m schematic_api", - "cwd": "apps/schematic/api" - } - }, - "serve-detach": { - "executor": "nx:run-commands", - "options": { - "command": "docker/schematic/serve-detach.sh schematic-api" - } - }, - "build-image": { - "executor": "@nx-tools/nx-container:build", - "options": { - "context": "apps/schematic/api", - "metadata": { - "images": ["ghcr.io/sage-bionetworks/schematic-api"], - "tags": ["type=edge,branch=main", "type=raw,value=local", "type=sha"] - }, - "push": false - } - }, - "scan-image": { - "executor": "nx:run-commands", - "options": { - "command": "trivy image ghcr.io/sage-bionetworks/schematic-api:local --quiet", - "color": true - } - }, - "generate": { - "executor": "nx:run-commands", - "options": { - "commands": ["xargs rm -fr <.openapi-generator/FILES", "openapi-generator-cli generate"], - "cwd": "{projectRoot}", - "parallel": false - }, - "dependsOn": ["^build"] - }, - "lint": { - "executor": "nx:run-commands", - "options": { - "commands": [ - "poetry run mypy --non-interactive --install-types --disallow-untyped-defs schematic_api/controllers/*impl.py schematic_api/test/test*.py", - "poetry run pylint schematic_api/controllers/*impl.py schematic_api/test/test*.py" - ], - "cwd": "apps/schematic/api" - } - }, - "mypy": { - "executor": "nx:run-commands", - "options": { - "command": "poetry run mypy --non-interactive --install-types --disallow-untyped-defs schematic_api/controllers/*impl.py schematic_api/test/test*.py", - "cwd": "apps/schematic/api" - } - }, - "pylint": { - "executor": "nx:run-commands", - "options": { - "command": "poetry run pylint schematic_api/controllers/*impl.py schematic_api/test/test*.py", - "cwd": "apps/schematic/api" - } - }, - "test": { - "executor": "nx:run-commands", - "options": { - "command": "poetry run pytest -m 'not secrets'", - "cwd": "apps/schematic/api" - } - }, - "test-integration": { - "executor": "nx:run-commands", - "options": { - "command": "poetry run pytest -m 'secrets'", - "cwd": "apps/schematic/api" - } - }, - "test-all": { - "executor": "nx:run-commands", - "options": { - "command": "poetry run pytest", - "cwd": "apps/schematic/api" - } - }, - "sonar": { - "executor": "nx:run-commands", - "options": { - "command": "bash $WORKSPACE_DIR/tools/sonar-scanner.sh --project-key {projectName} --project-dir .", - "cwd": "{projectRoot}" - } - } - }, - "tags": ["type:service", "scope:backend", "language:python", "package-manager:poetry"], - "implicitDependencies": ["schematic-api-description"] -} diff --git a/apps/schematic/api/pyproject.toml b/apps/schematic/api/pyproject.toml deleted file mode 100644 index df2783e12..000000000 --- a/apps/schematic/api/pyproject.toml +++ /dev/null @@ -1,37 +0,0 @@ -[tool.poetry] -name = "schematic-api" -version = "0.1.0" -description = "Schematic REST API" -authors = ["Thomas Schaffter "] -readme = "README.md" -packages = [{include = "schematic_api"}] - -[tool.poetry.dependencies] -python = "3.10.14" -connexion = {version = "2.14.1", extras = ["swagger-ui"]} -swagger-ui-bundle = "0.0.9" -python-dateutil = "2.8.2" -Flask = "2.2.5" -Flask-Cors = "3.0.10" -schematicpy = "24.7.2" -environs = "^11.0.0" -uwsgi = "^2.0.22" - -[tool.poetry.group.dev.dependencies] -mypy = "^1.4.1" -pylint = "3.2.7" - - -[tool.poetry.group.prod.dependencies] -uWSGI = "^2.0.22" - -[tool.poetry.group.test.dependencies] -pytest = "7.2.0" -pytest-cov = "4.0.0" -pytest-randomly = "3.12.0" -Flask-Testing = "0.8.1" -tox = "4.0.18" - -[build-system] -requires = ["poetry-core"] -build-backend = "poetry.core.masonry.api" diff --git a/apps/schematic/api/pytest.ini b/apps/schematic/api/pytest.ini deleted file mode 100644 index a3cfeabd4..000000000 --- a/apps/schematic/api/pytest.ini +++ /dev/null @@ -1,4 +0,0 @@ -[pytest] -markers = - synapse: tests that interact with synapse - secrets: tests that require secrets \ No newline at end of file diff --git a/apps/schematic/api/redirect.conf b/apps/schematic/api/redirect.conf deleted file mode 100644 index 5a0871b3c..000000000 --- a/apps/schematic/api/redirect.conf +++ /dev/null @@ -1,11 +0,0 @@ -server { - # listen to port 443 for https requests - listen 443 ssl http2 default_server; - listen [::]:443 ssl http2 default_server; - include /etc/nginx/conf.d/self-signed.conf; - include /etc/nginx/conf.d/ssl-params.conf; - server_name 127.0.0.1; - - # Redirect the browser to port 7443 - return 301 https://$server_name:7443$request_uri; -} \ No newline at end of file diff --git a/apps/schematic/api/save_key_certificate.py b/apps/schematic/api/save_key_certificate.py deleted file mode 100644 index 1b8880936..000000000 --- a/apps/schematic/api/save_key_certificate.py +++ /dev/null @@ -1,20 +0,0 @@ -import os -import base64 -import json - -secret_manager_secrets = os.environ["SECRETS_MANAGER_SECRETS"] - -ssl_private_key = json.loads(secret_manager_secrets)["SSL_PRIVATE_KEY"] -ssl_certificate_key = json.loads(secret_manager_secrets)["SSL_CERTIFICATE"] - -# save the key and certificate as files -test_private_key_file_path = "/etc/ssl/private/localhost.key" -test_certificate_key_file_path = "/etc/ssl/certs/localhost.crt" - -with open(test_private_key_file_path, "wb") as file: - decoded_private_key = base64.b64decode(ssl_private_key) - file.write(decoded_private_key) - -with open(test_certificate_key_file_path, "wb") as file: - decoded_ssl_certificate_key = base64.b64decode(ssl_certificate_key) - file.write(decoded_ssl_certificate_key) diff --git a/apps/schematic/api/schematic_api/__init__.py b/apps/schematic/api/schematic_api/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/apps/schematic/api/schematic_api/__main__.py b/apps/schematic/api/schematic_api/__main__.py deleted file mode 100644 index 48ce93b55..000000000 --- a/apps/schematic/api/schematic_api/__main__.py +++ /dev/null @@ -1,24 +0,0 @@ -#!/usr/bin/env python3 -import connexion -from flask_cors import CORS -import flask -from schematic_api import encoder - -app = connexion.App(__name__, specification_dir="./openapi/") -app.app.json_encoder = encoder.JSONEncoder -app.add_api( - "openapi.yaml", arguments={"title": "Schematic REST API"}, pythonic_params=True -) -app.add_url_rule("/", "ui", lambda: flask.redirect("/api/v1/ui")) - -# add CORS support -# https://connexion.readthedocs.io/en/latest/cookbook.html#cors-support -CORS(app.app, resources={r"/api/*": {"origins": "*"}}) - - -def main(): - app.run(port=7443, debug=False) - - -if __name__ == "__main__": - main() diff --git a/apps/schematic/api/schematic_api/controllers/__init__.py b/apps/schematic/api/schematic_api/controllers/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/apps/schematic/api/schematic_api/controllers/manifest_generation_controller.py b/apps/schematic/api/schematic_api/controllers/manifest_generation_controller.py deleted file mode 100644 index f88ee09e9..000000000 --- a/apps/schematic/api/schematic_api/controllers/manifest_generation_controller.py +++ /dev/null @@ -1,100 +0,0 @@ -import connexion -import six -from typing import Dict -from typing import Tuple -from typing import Union - -from schematic_api.models.basic_error import BasicError # noqa: E501 -from schematic_api.models.google_sheet_links import GoogleSheetLinks # noqa: E501 -from schematic_api import util -from schematic_api.controllers import manifest_generation_controller_impl - - -def generate_excel_manifest( - schema_url, - data_type, - add_annotations=None, - manifest_title=None, - display_label_type=None, - dataset_id=None, - asset_view_id=None, -): # noqa: E501 - """Generates an excel file - - Generates an excel file # noqa: E501 - - :param schema_url: The URL of a schema in jsonld or csv form - :type schema_url: str - :param data_type: A data type - :type data_type: str - :param add_annotations: If true, annotations are added to the manifest - :type add_annotations: bool - :param manifest_title: If making one manifest, the title of the manifest. If making multiple manifests, the prefix of the title of the manifests. - :type manifest_title: str - :param display_label_type: The type of label to display - :type display_label_type: str - :param dataset_id: The ID of a dataset. - :type dataset_id: str - :param asset_view_id: ID of view listing all project data assets. E.g. for Synapse this would be the Synapse ID of the fileview listing all data assets for a given project - :type asset_view_id: str - - :rtype: Union[file, Tuple[file, int], Tuple[file, int, Dict[str, str]] - """ - return manifest_generation_controller_impl.generate_excel_manifest( - schema_url, - data_type, - add_annotations, - manifest_title, - display_label_type, - dataset_id, - asset_view_id, - ) - - -def generate_google_sheet_manifests( - schema_url, - add_annotations=None, - manifest_title=None, - display_label_type=None, - use_strict_validation=None, - dataset_id_array=None, - data_type_array=None, - asset_view_id=None, - generate_all_manifests=None, -): # noqa: E501 - """Generates a list of google sheet links - - Generates a list of google sheet links # noqa: E501 - - :param schema_url: The URL of a schema in jsonld or csv form - :type schema_url: str - :param add_annotations: If true, annotations are added to the manifest - :type add_annotations: bool - :param manifest_title: If making one manifest, the title of the manifest. If making multiple manifests, the prefix of the title of the manifests. - :type manifest_title: str - :param display_label_type: The type of label to display - :type display_label_type: str - :param use_strict_validation: If true, users are blocked from entering incorrect values. If false, users will get a warning when using incorrect values. - :type use_strict_validation: bool - :param dataset_id_array: An array of dataset ids - :type dataset_id_array: List[str] - :param data_type_array: An array of data types - :type data_type_array: List[str] - :param asset_view_id: ID of view listing all project data assets. E.g. for Synapse this would be the Synapse ID of the fileview listing all data assets for a given project - :type asset_view_id: str - :param generate_all_manifests: If true, a manifest for all components will be generated, datasetIds will be ignored. If false, manifests for each id in datasetIds will be generated. - :type generate_all_manifests: bool - - :rtype: Union[GoogleSheetLinks, Tuple[GoogleSheetLinks, int], Tuple[GoogleSheetLinks, int, Dict[str, str]] - """ - return manifest_generation_controller_impl.generate_google_sheet_manifests( - schema_url, - add_annotations, - manifest_title, - display_label_type, - use_strict_validation, - dataset_id_array, - data_type_array, - asset_view_id, - generate_all_manifests, - ) diff --git a/apps/schematic/api/schematic_api/controllers/manifest_generation_controller_impl.py b/apps/schematic/api/schematic_api/controllers/manifest_generation_controller_impl.py deleted file mode 100644 index c3b0d4a18..000000000 --- a/apps/schematic/api/schematic_api/controllers/manifest_generation_controller_impl.py +++ /dev/null @@ -1,185 +0,0 @@ -"""Manifest generation functions""" - -# pylint: disable=too-many-arguments -import os - -from schematic import CONFIG # type: ignore -from schematic.manifest.generator import ManifestGenerator # type: ignore -from schematic.utils.schema_utils import DisplayLabelType # type: ignore -from flask import send_from_directory, Response - -from schematic_api.models.basic_error import BasicError -from schematic_api.models.google_sheet_links import GoogleSheetLinks -from schematic_api.controllers.utils import ( - handle_exceptions, - get_access_token, - download_schema_file_as_jsonld, -) - - -@handle_exceptions -def generate_excel_manifest_file( - schema_url: str, - dataset_id: str | None, - add_annotations: bool, - manifest_title: str | None, - data_type: str | None, - display_label_type: DisplayLabelType, - asset_view_id: str | None, -) -> tuple[str | BasicError, int]: - """Creates an excel version of the manifest and returns the path - - Args: - schema_url (str): The URL of the schema - dataset_id (str | None): Use this to get the existing manifest in the - dataset - add_annotations (bool): Whether or not annotations get added to the manifest - manifest_title (str | None): Title of the manifest - data_type (str | None): The datatype of the manifest to generate - display_label_type (DisplayLabelType): - The type of label to use as display - Defaults to "class_label" - asset_view_id (str | None): ID of the asset view - - Returns: - tuple[str | BasicError, int]: A tuple - The first item is the path to the excel file or an error object - The second item is the response status - """ - if asset_view_id: - CONFIG.synapse_master_fileview_id = asset_view_id - - access_token = get_access_token() - path_list = ManifestGenerator.create_manifests( - path_to_data_model=schema_url, - output_format="excel", - data_types=[data_type], - title=manifest_title, - access_token=access_token, - dataset_ids=[dataset_id], - use_annotations=add_annotations, - data_model_labels=display_label_type, - ) - assert len(path_list) == 1 - path = path_list[0] - assert isinstance(path, str) - return path, 200 - - -def generate_excel_manifest( - schema_url: str, - data_type: str | None, - add_annotations: bool, - manifest_title: str | None, - display_label_type: DisplayLabelType, - dataset_id: str | None, - asset_view_id: str | None, -) -> Response | tuple[BasicError, int]: - """Creates a a flask response for an excel manifest file - - Args: - schema_url (str): The URL of the schema - dataset_id (str | None): Use this to get the existing manifest in the - dataset - add_annotations (bool): Whether or not annotations get added to the manifest - manifest_title (str | None): Title of the manifest - data_type (str | None): The datatype of the manifest to generate - display_label_type (DisplayLabelType): - The type of label to use as display - Defaults to "class_label" - asset_view_id (str | None): ID of the asset view - - Returns: - Response | tuple[BasicError, int]: - Either A repsonse crated by Flask - or a tuple with a Error and a response status - """ - result, status = generate_excel_manifest_file( - schema_url=schema_url, - dataset_id=dataset_id, - add_annotations=add_annotations, - manifest_title=manifest_title, - data_type=data_type, - display_label_type=display_label_type, - asset_view_id=asset_view_id, - ) - - if isinstance(result, BasicError): - return result, status - - dir_name = os.path.dirname(result) - file_name = os.path.basename(result) - mimetype = "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet" - return send_from_directory( - directory=dir_name, - path=file_name, - as_attachment=True, - mimetype=mimetype, - max_age=0, - ) - - -@handle_exceptions -def generate_google_sheet_manifests( - schema_url: str, - add_annotations: bool, - manifest_title: str | None, - display_label_type: DisplayLabelType, - use_strict_validation: bool, - dataset_id_array: list[str] | None, - data_type_array: list[str] | None, - asset_view_id: str | None, - generate_all_manifests: bool, -) -> tuple[GoogleSheetLinks | BasicError, int]: - """Generates a list of links to manifests in google sheet form - - Args: - schema_url (str): The URL of the schema - dataset_id_array (list[str] | None): Use this to get the existing manifests in the - datasets. Must be of same type as the data_type_array, same order, and same length - asset_view_id (str | None): ID of the asset view - data_type_array (list[str] | None): The datatypes of the manifests to generate - add_annotations (bool): Whether or not annotations get added to the manifest - manifest_title (str | None): Title of the manifest - use_strict_validation (bool): Whether or not to use google sheet strict validation - generate_all_manifests (bool): Will generate a manifest for all data types - display_label_type (DisplayLabelType): - The type of label to use as display - Defaults to "class_label" - - Raises: - ValueError: When generate_all_manifests is true and either dataset_id_array or - data_type_array are provided - ValueError: When generate_all_manifests is false and data_type_array is not provided - ValueError: When generate_all_manifests is false and dataset_id_array is provided, - but it doesn't match the length of data_type_array - - Returns: - tuple[GoogleSheetLinks | BasicError, int]: A tuple - The first item is either the google sheet links of the manifests or an error object - The second item is the response status - """ - - if generate_all_manifests: - data_type_array = ["all manifests"] - if not data_type_array: - data_type_array = [] - - access_token = get_access_token() - if asset_view_id: - CONFIG.synapse_master_fileview_id = asset_view_id - schema_path = download_schema_file_as_jsonld(schema_url) - links = ManifestGenerator.create_manifests( - path_to_data_model=schema_path, - output_format="google_sheet", - data_types=data_type_array, - title=manifest_title, - access_token=access_token, - dataset_ids=dataset_id_array, - strict=use_strict_validation, - use_annotations=add_annotations, - data_model_labels=display_label_type, - ) - result: GoogleSheetLinks | BasicError = GoogleSheetLinks(links) - status = 200 - return result, status diff --git a/apps/schematic/api/schematic_api/controllers/manifest_validation_controller.py b/apps/schematic/api/schematic_api/controllers/manifest_validation_controller.py deleted file mode 100644 index 7a543a228..000000000 --- a/apps/schematic/api/schematic_api/controllers/manifest_validation_controller.py +++ /dev/null @@ -1,184 +0,0 @@ -import connexion -import six -from typing import Dict -from typing import Tuple -from typing import Union - -from schematic_api.models.basic_error import BasicError # noqa: E501 -from schematic_api.models.manifest_validation_result import ( - ManifestValidationResult, -) # noqa: E501 -from schematic_api import util -from schematic_api.controllers import manifest_validation_controller_impl - - -def submit_manifest_csv( - schema_url, - component, - dataset_id, - asset_view_id, - body, - restrict_rules=None, - storage_method=None, - hide_blanks=None, - table_manipulation_method=None, - display_label_type=None, - annotation_key_style=None, - table_column_name_style=None, -): # noqa: E501 - """Validates manifest in csv form, then submits it - - Validates manifest in csv form, then submits it # noqa: E501 - - :param schema_url: The URL of a schema in jsonld or csv form - :type schema_url: str - :param component: A component in a schema, either the dsplay label or schema label - :type component: str - :param dataset_id: The ID of a dataset. - :type dataset_id: str - :param asset_view_id: ID of view listing all project data assets. E.g. for Synapse this would be the Synapse ID of the fileview listing all data assets for a given project - :type asset_view_id: str - :param body: .csv file - :type body: str - :param restrict_rules: If True, validation suite will only run with in-house validation rule. If False, the Great Expectations suite will be utilized and all rules will be available. - :type restrict_rules: bool - :param storage_method: file_and_entities will store the manifest as a csv and create Synapse files for each row in the manifest. table_and_file will store the manifest as a table and a csv on Synapse. file_only will store the manifest as a csv only on Synapse. table_file_and_entities will perform the options file_with_entites and table in combination. - :type storage_method: str - :param hide_blanks: If true, annotations with blank values will be hidden from a dataset's annotation list in Synaspe. If false, annotations with blank values will be displayed. - :type hide_blanks: bool - :param table_manipulation_method: replace will remove the rows and columns from the existing table and store the new rows and columns, preserving the name and synID. upsert will add the new rows to the table and preserve the exisitng rows and columns in the existing table. - :type table_manipulation_method: str - :param display_label_type: The type of label to display - :type display_label_type: str - :param annotation_key_style: The labeling style for annotation keys. - :type annotation_key_style: str - :param table_column_name_style: The labeling syle for table column names. - :type table_column_name_style: str - - :rtype: Union[str, Tuple[str, int], Tuple[str, int, Dict[str, str]] - """ - return manifest_validation_controller_impl.submit_manifest_csv( - schema_url, - component, - dataset_id, - asset_view_id, - body, - restrict_rules, - storage_method, - hide_blanks, - table_manipulation_method, - display_label_type, - annotation_key_style, - table_column_name_style, - ) - - -def submit_manifest_json( - schema_url, - component, - dataset_id, - asset_view_id, - restrict_rules=None, - storage_method=None, - hide_blanks=None, - table_manipulation_method=None, - display_label_type=None, - annotation_key_style=None, - table_column_name_style=None, - body=None, -): # noqa: E501 - """Validates a manifest in json form, then submits it - - Validates a manifest in json form, then submits it in csv form # noqa: E501 - - :param schema_url: The URL of a schema in jsonld or csv form - :type schema_url: str - :param component: A component in a schema, either the dsplay label or schema label - :type component: str - :param dataset_id: The ID of a dataset. - :type dataset_id: str - :param asset_view_id: ID of view listing all project data assets. E.g. for Synapse this would be the Synapse ID of the fileview listing all data assets for a given project - :type asset_view_id: str - :param restrict_rules: If True, validation suite will only run with in-house validation rule. If False, the Great Expectations suite will be utilized and all rules will be available. - :type restrict_rules: bool - :param storage_method: file_and_entities will store the manifest as a csv and create Synapse files for each row in the manifest. table_and_file will store the manifest as a table and a csv on Synapse. file_only will store the manifest as a csv only on Synapse. table_file_and_entities will perform the options file_with_entites and table in combination. - :type storage_method: str - :param hide_blanks: If true, annotations with blank values will be hidden from a dataset's annotation list in Synaspe. If false, annotations with blank values will be displayed. - :type hide_blanks: bool - :param table_manipulation_method: replace will remove the rows and columns from the existing table and store the new rows and columns, preserving the name and synID. upsert will add the new rows to the table and preserve the exisitng rows and columns in the existing table. - :type table_manipulation_method: str - :param display_label_type: The type of label to display - :type display_label_type: str - :param annotation_key_style: The labeling style for annotation keys. - :type annotation_key_style: str - :param table_column_name_style: The labeling syle for table column names. - :type table_column_name_style: str - :param body: A manifest in json form - :type body: str - - :rtype: Union[str, Tuple[str, int], Tuple[str, int, Dict[str, str]] - """ - return manifest_validation_controller_impl.submit_manifest_json( - schema_url, - component, - dataset_id, - asset_view_id, - restrict_rules, - storage_method, - hide_blanks, - table_manipulation_method, - display_label_type, - annotation_key_style, - table_column_name_style, - body, - ) - - -def validate_manifest_csv( - schema_url, component_label, body, restrict_rules=None, display_label_type=None -): # noqa: E501 - """Validates a manifest in csv form - - Validates a manifest in csv form # noqa: E501 - - :param schema_url: The URL of a schema in jsonld or csv form - :type schema_url: str - :param component_label: The label of a component in a schema - :type component_label: str - :param body: .csv file - :type body: str - :param restrict_rules: If True, validation suite will only run with in-house validation rule. If False, the Great Expectations suite will be utilized and all rules will be available. - :type restrict_rules: bool - :param display_label_type: The type of label to display - :type display_label_type: str - - :rtype: Union[ManifestValidationResult, Tuple[ManifestValidationResult, int], Tuple[ManifestValidationResult, int, Dict[str, str]] - """ - return manifest_validation_controller_impl.validate_manifest_csv( - schema_url, component_label, body, restrict_rules, display_label_type - ) - - -def validate_manifest_json( - schema_url, component_label, restrict_rules=None, display_label_type=None, body=None -): # noqa: E501 - """Validates a manifest in json form - - Validates a manifest in json form # noqa: E501 - - :param schema_url: The URL of a schema in jsonld or csv form - :type schema_url: str - :param component_label: The label of a component in a schema - :type component_label: str - :param restrict_rules: If True, validation suite will only run with in-house validation rule. If False, the Great Expectations suite will be utilized and all rules will be available. - :type restrict_rules: bool - :param display_label_type: The type of label to display - :type display_label_type: str - :param body: A manifest in json form - :type body: str - - :rtype: Union[ManifestValidationResult, Tuple[ManifestValidationResult, int], Tuple[ManifestValidationResult, int, Dict[str, str]] - """ - return manifest_validation_controller_impl.validate_manifest_json( - schema_url, component_label, restrict_rules, display_label_type, body - ) diff --git a/apps/schematic/api/schematic_api/controllers/manifest_validation_controller_impl.py b/apps/schematic/api/schematic_api/controllers/manifest_validation_controller_impl.py deleted file mode 100644 index 2b22abdda..000000000 --- a/apps/schematic/api/schematic_api/controllers/manifest_validation_controller_impl.py +++ /dev/null @@ -1,373 +0,0 @@ -"""Implementation of manifest validation endpoints""" - -# pylint: disable=too-many-locals - -from typing import Any - -from schematic import CONFIG # type: ignore -from schematic.models.metadata import MetadataModel # type: ignore -from schematic.utils.schema_utils import DisplayLabelType # type: ignore - -from schematic_api.models.manifest_validation_result import ManifestValidationResult -from schematic_api.models.basic_error import BasicError -from schematic_api.controllers.utils import ( - handle_exceptions, - get_access_token, - download_schema_file_as_jsonld, - save_manifest_json_string_as_csv, - save_manifest_csv_string_as_csv, -) - - -def submit_manifest_with_schematic( # pylint: disable=too-many-arguments - schema_path: str, - manifest_path: str, - component: str | None, - dataset_id: str, - restrict_rules: bool = False, - storage_method: str = "table_file_and_entities", - hide_blanks: bool = False, - table_manipulation_method: str = "replace", - display_label_type: DisplayLabelType = "class_label", - table_column_name_style: str = "class_label", - annotation_key_style: str = "class_label", -) -> str: - """Submits a manifest csv - - Args: - schema_path (str): The path to a schema in jsonld form - manifest_path (str): The path to a manifest in csv form - component (str | None): - The component, either schema label, or display label - See use_schema_label - dataset_id (str): The id of the dataset to submit the manifest to - restrict_rules (bool, optional): - Whether or not to restrict rule to non- great expectations. - Defaults to False. - storage_method (str, optional): - Specify what will be updated. - Defaults to "table_file_and_entities". - hide_blanks (bool, optional): - Whether or not annotations with blank values will be hidden from a - datasets annotation list. - Defaults to False. - table_manipulation_method (str, optional): - Specify the way the manifest tables should be stored. - Defaults to "replace". - display_label_type (DisplayLabelType): - The type of label to use as display - Defaults to "class_label" - annotation_key_style (str): Sets labeling syle for annotation keys. - class_label: will format the display name as upper camelcase, and strip blacklisted - characters - display_label: will strip blacklisted characters including spaces, to retain display label - formatting while ensuring the label is formatted properly for Synapse annotations. - table_column_name_style: (str): Sets labeling style for table column names. - display_name: will use the raw display name as the column name. - class_label will format the display name as upper camelcase, and strip blacklisted - characters - display_label: will strip blacklisted characters including spaces, to retain display label - formatting. - - Returns: - str: The id of the manifest - """ - access_token = get_access_token() - metadata_model = MetadataModel( - inputMModelLocation=schema_path, - inputMModelLocationType="local", - data_model_labels=display_label_type, - ) - manifest_id: str = metadata_model.submit_metadata_manifest( - manifest_path=manifest_path, - dataset_id=dataset_id, - validate_component=component, - access_token=access_token, - manifest_record_type=storage_method, - restrict_rules=restrict_rules, - hide_blanks=hide_blanks, - table_manipulation=table_manipulation_method, - table_column_names=table_column_name_style, - annotation_keys=annotation_key_style, - ) - return manifest_id - - -@handle_exceptions -def submit_manifest_csv( # pylint: disable=too-many-arguments - schema_url: str, - component: str | None, - dataset_id: str, - asset_view_id: str, - body: bytes, - restrict_rules: bool = False, - storage_method: str = "table_file_and_entities", - hide_blanks: bool = False, - table_manipulation_method: str = "replace", - display_label_type: DisplayLabelType = "class_label", - annotation_key_style: str = "class_label", - table_column_name_style: str = "class_label", -) -> tuple[str | BasicError, int]: - """Submits a manifest csv in bytes form - - Args: - schema_url (str): The url to schema the component is in - component (str | None): - The component, either schema label, or display label - See use_schema_label - dataset_id (str): The id of the dataset to submit the manifest to - asset_view_id (str): The id of the asset view the dataset is in - body (bytes): The body of the request, contains the manifest in bytes form - restrict_rules (bool, optional): - Whether or not to restrict rule to non- great expectations. - Defaults to False. - storage_method (str, optional): - Specify what will be updated. - Defaults to "table_file_and_entities". - hide_blanks (bool, optional): - Whether or not annotations with blank values will be hidden from a - datasets annotation list. - Defaults to False. - table_manipulation_method (str, optional): - Specify the way the manifest tables should be stored. - Defaults to "replace". - display_label_type (DisplayLabelType): - The type of label to use as display - Defaults to "class_label" - annotation_key_style (str): Sets labeling syle for annotation keys. - class_label: will format the display name as upper camelcase, and strip blacklisted - characters - display_label: will strip blacklisted characters including spaces, to retain display label - formatting while ensuring the label is formatted properly for Synapse annotations. - table_column_name_style: (str): Sets labeling style for table column names. - display_name: will use the raw display name as the column name. - class_label will format the display name as upper camelcase, and strip blacklisted - characters - display_label: will strip blacklisted characters including spaces, to retain display label - formatting. - - Returns: - tuple[str | BasicError, int]: A tuple - The first item is either the id of the manifest or an error object - The second item is the response status - """ - CONFIG.synapse_master_fileview_id = asset_view_id - manifest_path = save_manifest_csv_string_as_csv(body) - schema_path = download_schema_file_as_jsonld(schema_url) - - result: str | BasicError = submit_manifest_with_schematic( - schema_path=schema_path, - manifest_path=manifest_path, - component=component, - dataset_id=dataset_id, - restrict_rules=restrict_rules, - storage_method=storage_method, - hide_blanks=hide_blanks, - table_manipulation_method=table_manipulation_method, - display_label_type=display_label_type, - table_column_name_style=table_column_name_style, - annotation_key_style=annotation_key_style, - ) - - status = 200 - return result, status - - -@handle_exceptions -def submit_manifest_json( # pylint: disable=too-many-arguments - schema_url: str, - component: str | None, - dataset_id: str, - asset_view_id: str, - restrict_rules: bool = False, - storage_method: str = "table_file_and_entities", - hide_blanks: bool = False, - table_manipulation_method: str = "replace", - display_label_type: DisplayLabelType = "class_label", - annotation_key_style: str = "class_label", - table_column_name_style: str = "class_label", - body: Any = None, -) -> tuple[str | BasicError, int]: - """Submits a manifest csv in bytes form - - Args: - schema_url (str): The url to schema the component is in - component (str | None): - The component, either schema label, or display label - See use_schema_label - dataset_id (str): The id of the dataset to submit the manifest to - asset_view_id (str): The id of the asset view the dataset is in - body (Any): The body of the request. - restrict_rules (bool, optional): - Whether or not to restrict rule to non- great expectations. - Defaults to False. - storage_method (str, optional): - Specify what will be updated. - Defaults to "table_file_and_entities". - hide_blanks (bool, optional): - Whether or not annotations with blank values will be hidden from a datasets - annotation list - Defaults to False. - table_manipulation_method (str, optional): - Specify the way the manifest tables should be stored. - Defaults to "replace". - display_label_type (DisplayLabelType): - The type of label to use as display - Defaults to "class_label" - annotation_key_style (str): Sets labeling syle for annotation keys. - class_label: will format the display name as upper camelcase, and strip blacklisted - characters - display_label: will strip blacklisted characters including spaces, to retain display label - formatting while ensuring the label is formatted properly for Synapse annotations. - table_column_name_style: (str): Sets labeling style for table column names. - display_name: will use the raw display name as the column name. - class_label will format the display name as upper camelcase, and strip blacklisted - characters - display_label: will strip blacklisted characters including spaces, to retain display label - formatting. - - Returns: - tuple[str | BasicError, int]: A tuple - The first item is either the id of the manifest or an error object - The second item is the response status - """ - CONFIG.synapse_master_fileview_id = asset_view_id - manifest_path = save_manifest_json_string_as_csv(body) - schema_path = download_schema_file_as_jsonld(schema_url) - - result: str | BasicError = submit_manifest_with_schematic( - schema_path=schema_path, - manifest_path=manifest_path, - component=component, - dataset_id=dataset_id, - restrict_rules=restrict_rules, - storage_method=storage_method, - hide_blanks=hide_blanks, - table_manipulation_method=table_manipulation_method, - display_label_type=display_label_type, - table_column_name_style=table_column_name_style, - annotation_key_style=annotation_key_style, - ) - - status = 200 - return result, status - - -def validate_manifest_with_schematic( - manifest_path: str, - schema_url: str, - component_label: str, - restrict_rules: bool, - display_label_type: DisplayLabelType = "class_label", -) -> tuple[list, list]: - """Validates a manifest csv file - - Args: - manifest_path (str): The path to the manifest - schema_url (str): The url of the schema to validate the manifest against - component_label (str): The label of the component being validated - restrict_rules (bool): Weather or not to restrict the rules used - display_label_type (DisplayLabelType): - The type of label to use as display - Defaults to "class_label" - - Returns: - tuple[list, list]: A tuple - The first item is a list of validation errors - The second item is a list of validation warnings - """ - schema_path = download_schema_file_as_jsonld(schema_url) - access_token = get_access_token() - - metadata_model = MetadataModel( - inputMModelLocation=schema_path, - inputMModelLocationType="local", - data_model_labels=display_label_type, - ) - result: tuple[list, list] = metadata_model.validateModelManifest( - manifestPath=manifest_path, - rootNode=component_label, - restrict_rules=restrict_rules, - access_token=access_token, - ) - return result - - -@handle_exceptions -def validate_manifest_csv( - schema_url: str, - component_label: str, - body: bytes, - restrict_rules: bool, - display_label_type: DisplayLabelType = "class_label", -) -> tuple[ManifestValidationResult | BasicError, int]: - """Validates a manifest csv file - - Args: - schema_url (str): The url of the schema to validate the manifest against - component_label (str): The label of the component being validated - body (bytes): The body of the request, a manifest csv in bytes form - restrict_rules (bool): Weather or not to restrict the rules used - display_label_type (DisplayLabelType): - The type of label to use as display - Defaults to "class_label" - - Returns: - tuple[ManifestValidationResult | BasicError, int]: A tuple - The first item is the results of the validation attempt or an error - The second item is the status of the request - """ - manifest_path = save_manifest_csv_string_as_csv(body) - - errors, warnings = validate_manifest_with_schematic( - manifest_path, - schema_url, - component_label, - restrict_rules, - display_label_type=display_label_type, - ) - - result: ManifestValidationResult | BasicError = ManifestValidationResult( - errors=errors, warnings=warnings - ) - return result, 200 - - -@handle_exceptions -def validate_manifest_json( - schema_url: str, - component_label: str, - restrict_rules: bool, - display_label_type: DisplayLabelType = "class_label", - body: Any = None, -) -> tuple[ManifestValidationResult | BasicError, int]: - """Validates a manifest in json string form - - Args: - schema_url (str): The url of the schema to validate the manifest against - component_label (str): The label of the component being validated - body (Any): The body of the request - restrict_rules (bool): Weather or not to restrict the rules used - display_label_type (DisplayLabelType): - The type of label to use as display - Defaults to "class_label" - - Returns: - tuple[ManifestValidationResult | BasicError, int]: A tuple - The first item is the results of the validation attempt or an error - The second item is the status of the request - """ - manifest_path = save_manifest_json_string_as_csv(body) - - errors, warnings = validate_manifest_with_schematic( - manifest_path, - schema_url, - component_label, - restrict_rules, - display_label_type=display_label_type, - ) - - result: ManifestValidationResult | BasicError = ManifestValidationResult( - errors=errors, warnings=warnings - ) - return result, 200 diff --git a/apps/schematic/api/schematic_api/controllers/paging.py b/apps/schematic/api/schematic_api/controllers/paging.py deleted file mode 100644 index 9e87502a8..000000000 --- a/apps/schematic/api/schematic_api/controllers/paging.py +++ /dev/null @@ -1,97 +0,0 @@ -"""Functionality to handle paginated endpoints""" - -import math -from typing import TypeVar - -ITEM_TYPE = TypeVar("ITEM_TYPE") -TOTAL_ITEMS_MSG = "total_items must be 0 or greater: " -PAGE_MAX_ITEMS_MSG = "page_max_items must be 1 or greater: " -PAGE_NUMBER_MSG = "page_number must be 1 or greater: " - - -class Page: - """This represents a page for a generic list of items for a paginated endpoint""" - - def __init__( - self, items: list[ITEM_TYPE], page_number: int = 1, page_max_items: int = 100000 - ) -> None: - """ - Args: - items (list[ITEM_TYPE]): A list of all items in the query - page_number (int, optional): The page number the current request is for. Defaults to 1. - page_max_items (int, optional): The maximum number of items per page. Defaults to 100000. - """ - self.page_number = page_number - self.page_max_items = page_max_items - self.total_items = len(items) - self.total_pages = get_page_amount(self.total_items, page_max_items) - self.has_next = page_number < self.total_pages - self.has_previous = page_number > 1 - self.items: list[ITEM_TYPE] = get_item_slice(items, page_max_items, page_number) - - -def get_page_amount(total_items: int, page_max_items: int) -> int: - """Getes the amount of pages total based on the number of items and page size - - Args: - total_items (int): The total number of items in the query - page_max_items (int): The maximum number of items per page - - Raises: - ValueError: total_items is less than 0 - ValueError: page_max_items is less than - - Returns: - int: The amount of pages - """ - if total_items < 0: - raise ValueError(TOTAL_ITEMS_MSG, total_items) - if page_max_items < 1: - raise ValueError(PAGE_MAX_ITEMS_MSG, page_max_items) - return math.ceil(total_items / page_max_items) - - -def get_item_slice( - items: list[ITEM_TYPE], page_max_items: int, page_number: int -) -> list[ITEM_TYPE]: - """Gets a list slice based on the paging parameters - - Args: - items (list[ITEM_TYPE]): A list of items to be sliced - page_max_items (int): The maximum number of items per page - page_number (int): The page number the current request is for - - Returns: - list[ITEM_TYPE]: The slice of items - """ - page_indeces = get_page_indeces(len(items), page_max_items, page_number) - return items[page_indeces[0] : page_indeces[1]] - - -def get_page_indeces( - total_items: int, page_max_items: int, page_number: int -) -> tuple[int, int]: - """Gets the indces used to slice the list of items - - Args: - total_items (int): The total number of items in the query - page_max_items (int): The maximum number of items per page - page_number (int): The page number the current request is for - - Raises: - ValueError: total_items is less than 0 - ValueError: page_max_items is less than 1 - ValueError: page_number is less than 1 - - Returns: - tuple[int, int]: The two indeces to slice the list of items with - """ - if total_items < 0: - raise ValueError(TOTAL_ITEMS_MSG, total_items) - if page_max_items < 1: - raise ValueError(PAGE_MAX_ITEMS_MSG, page_max_items) - if page_number < 1: - raise ValueError(PAGE_NUMBER_MSG, page_number) - index1 = (page_number - 1) * page_max_items - index2 = min(index1 + page_max_items, total_items) - return (index1, index2) diff --git a/apps/schematic/api/schematic_api/controllers/schema_controller.py b/apps/schematic/api/schematic_api/controllers/schema_controller.py deleted file mode 100644 index 9cc701b0d..000000000 --- a/apps/schematic/api/schematic_api/controllers/schema_controller.py +++ /dev/null @@ -1,307 +0,0 @@ -import connexion -import six -from typing import Dict -from typing import Tuple -from typing import Union - -from schematic_api.models.basic_error import BasicError # noqa: E501 -from schematic_api.models.component_requirement_array import ( - ComponentRequirementArray, -) # noqa: E501 -from schematic_api.models.component_requirement_graph import ( - ComponentRequirementGraph, -) # noqa: E501 -from schematic_api.models.connected_node_pair_array import ( - ConnectedNodePairArray, -) # noqa: E501 -from schematic_api.models.connected_node_pair_page import ( - ConnectedNodePairPage, -) # noqa: E501 -from schematic_api.models.node_array import NodeArray # noqa: E501 -from schematic_api.models.node_page import NodePage # noqa: E501 -from schematic_api.models.node_property_array import NodePropertyArray # noqa: E501 -from schematic_api.models.validation_rule_array import ValidationRuleArray # noqa: E501 -from schematic_api import util -from schematic_api.controllers import schema_controller_impl - - -def get_component( - component_label, schema_url, include_index=None, display_label_type=None -): # noqa: E501 - """Get all the attributes associated with a specific data model component formatted as a dataframe (stored as a JSON String). - - Get all the attributes associated with a specific data model component formatted as a dataframe (stored as a JSON String). # noqa: E501 - - :param component_label: The label of a component in a schema - :type component_label: str - :param schema_url: The URL of a schema in jsonld or csv form - :type schema_url: str - :param include_index: Whether to include the indexes of the dataframe in the returned JSON string. - :type include_index: bool - :param display_label_type: The type of label to display - :type display_label_type: str - - :rtype: Union[str, Tuple[str, int], Tuple[str, int, Dict[str, str]] - """ - return schema_controller_impl.get_component( - component_label, schema_url, include_index, display_label_type - ) - - -def get_component_requirements_array( - component_label, schema_url, display_label_type=None -): # noqa: E501 - """Given a source model component (see https://w3id.org/biolink/vocab/category for definnition of component), return all components required by it in array form. - - Given a source model component (see https://w3id.org/biolink/vocab/category for definnition of component), return all components required by it in array form. # noqa: E501 - - :param component_label: The label of a component in a schema - :type component_label: str - :param schema_url: The URL of a schema in jsonld or csv form - :type schema_url: str - :param display_label_type: The type of label to display - :type display_label_type: str - - :rtype: Union[ComponentRequirementArray, Tuple[ComponentRequirementArray, int], Tuple[ComponentRequirementArray, int, Dict[str, str]] - """ - return schema_controller_impl.get_component_requirements_array( - component_label, schema_url, display_label_type - ) - - -def get_component_requirements_graph( - component_label, schema_url, display_label_type=None -): # noqa: E501 - """Given a source model component (see https://w3id.org/biolink/vocab/category for definnition of component), return all components required by it in graph form. - - Given a source model component (see https://w3id.org/biolink/vocab/category for definnition of component), return all components required by it in graph form. # noqa: E501 - - :param component_label: The label of a component in a schema - :type component_label: str - :param schema_url: The URL of a schema in jsonld or csv form - :type schema_url: str - :param display_label_type: The type of label to display - :type display_label_type: str - - :rtype: Union[ComponentRequirementGraph, Tuple[ComponentRequirementGraph, int], Tuple[ComponentRequirementGraph, int, Dict[str, str]] - """ - return schema_controller_impl.get_component_requirements_graph( - component_label, schema_url, display_label_type - ) - - -def get_connected_node_pair_array( - schema_url, relationship_type, display_label_type=None -): # noqa: E501 - """Gets an array of connected node pairs - - Gets a array of connected node pairs # noqa: E501 - - :param schema_url: The URL of a schema in jsonld or csv form - :type schema_url: str - :param relationship_type: Type of relationship in a schema, such as requiresDependency - :type relationship_type: str - :param display_label_type: The type of label to display - :type display_label_type: str - - :rtype: Union[ConnectedNodePairArray, Tuple[ConnectedNodePairArray, int], Tuple[ConnectedNodePairArray, int, Dict[str, str]] - """ - return schema_controller_impl.get_connected_node_pair_array( - schema_url, relationship_type, display_label_type - ) - - -def get_connected_node_pair_page( - schema_url, - relationship_type, - page_number=None, - page_max_items=None, - display_label_type=None, -): # noqa: E501 - """Gets a page of connected node pairs - - Gets a page of connected node pairs # noqa: E501 - - :param schema_url: The URL of a schema in jsonld or csv form - :type schema_url: str - :param relationship_type: Type of relationship in a schema, such as requiresDependency - :type relationship_type: str - :param page_number: The page number to get for a paginated query - :type page_number: int - :param page_max_items: The maximum number of items per page (up to 100,000) for paginated endpoints - :type page_max_items: int - :param display_label_type: The type of label to display - :type display_label_type: str - - :rtype: Union[ConnectedNodePairPage, Tuple[ConnectedNodePairPage, int], Tuple[ConnectedNodePairPage, int, Dict[str, str]] - """ - return schema_controller_impl.get_connected_node_pair_page( - schema_url, relationship_type, page_number, page_max_items, display_label_type - ) - - -def get_node_dependency_array( - node_label, - schema_url, - return_display_names=None, - return_ordered_by_schema=None, - display_label_type=None, -): # noqa: E501 - """Gets the immediate dependencies that are related to the given source node - - Gets the immediate dependencies that are related to the given source node # noqa: E501 - - :param node_label: The label of the source node in a schema to get the dependencies of - :type node_label: str - :param schema_url: The URL of a schema in jsonld or csv form - :type schema_url: str - :param return_display_names: Whether or not to return the display names of the component, otherwise the label - :type return_display_names: bool - :param return_ordered_by_schema: Whether or not to order the components by their order in the schema, otherwise random - :type return_ordered_by_schema: bool - :param display_label_type: The type of label to display - :type display_label_type: str - - :rtype: Union[NodeArray, Tuple[NodeArray, int], Tuple[NodeArray, int, Dict[str, str]] - """ - return schema_controller_impl.get_node_dependency_array( - node_label, - schema_url, - return_display_names, - return_ordered_by_schema, - display_label_type, - ) - - -def get_node_dependency_page( - node_label, - schema_url, - return_display_names=None, - return_ordered_by_schema=None, - page_number=None, - page_max_items=None, - display_label_type=None, -): # noqa: E501 - """Gets the immediate dependencies that are related to the given source node - - Gets the immediate dependencies that are related to the given source node # noqa: E501 - - :param node_label: The label of the source node in a schema to get the dependencies of - :type node_label: str - :param schema_url: The URL of a schema in jsonld or csv form - :type schema_url: str - :param return_display_names: Whether or not to return the display names of the component, otherwise the label - :type return_display_names: bool - :param return_ordered_by_schema: Whether or not to order the components by their order in the schema, otherwise random - :type return_ordered_by_schema: bool - :param page_number: The page number to get for a paginated query - :type page_number: int - :param page_max_items: The maximum number of items per page (up to 100,000) for paginated endpoints - :type page_max_items: int - :param display_label_type: The type of label to display - :type display_label_type: str - - :rtype: Union[NodePage, Tuple[NodePage, int], Tuple[NodePage, int, Dict[str, str]] - """ - return schema_controller_impl.get_node_dependency_page( - node_label, - schema_url, - return_display_names, - return_ordered_by_schema, - page_number, - page_max_items, - display_label_type, - ) - - -def get_node_is_required( - node_display, schema_url, display_label_type=None -): # noqa: E501 - """Gets whether or not the node is required in the schema - - Gets whether or not the node is required in the schema # noqa: E501 - - :param node_display: The display name of the node in a schema - :type node_display: str - :param schema_url: The URL of a schema in jsonld or csv form - :type schema_url: str - :param display_label_type: The type of label to display - :type display_label_type: str - - :rtype: Union[bool, Tuple[bool, int], Tuple[bool, int, Dict[str, str]] - """ - return schema_controller_impl.get_node_is_required( - node_display, schema_url, display_label_type - ) - - -def get_node_properties(node_label, schema_url, display_label_type=None): # noqa: E501 - """Gets properties associated with a given node - - Gets properties associated with a given node # noqa: E501 - - :param node_label: The label of the source node in a schema to get the dependencies of - :type node_label: str - :param schema_url: The URL of a schema in jsonld or csv form - :type schema_url: str - :param display_label_type: The type of label to display - :type display_label_type: str - - :rtype: Union[NodePropertyArray, Tuple[NodePropertyArray, int], Tuple[NodePropertyArray, int, Dict[str, str]] - """ - return schema_controller_impl.get_node_properties( - node_label, schema_url, display_label_type - ) - - -def get_node_validation_rules( - node_display, schema_url, display_label_type=None -): # noqa: E501 - """Gets the validation rules, along with the arguments for each given rule associated with a given node - - Gets the validation rules, along with the arguments for each given rule associated with a given node # noqa: E501 - - :param node_display: The display name of the node in a schema - :type node_display: str - :param schema_url: The URL of a schema in jsonld or csv form - :type schema_url: str - :param display_label_type: The type of label to display - :type display_label_type: str - - :rtype: Union[ValidationRuleArray, Tuple[ValidationRuleArray, int], Tuple[ValidationRuleArray, int, Dict[str, str]] - """ - return schema_controller_impl.get_node_validation_rules( - node_display, schema_url, display_label_type - ) - - -def get_property_label(node_display, use_strict_camel_case=None): # noqa: E501 - """Gets the property label of the node - - Gets the property label of the node # noqa: E501 - - :param node_display: The display name of the node in a schema - :type node_display: str - :param use_strict_camel_case: Whether or not to use the more strict way of converting to camel case - :type use_strict_camel_case: bool - - :rtype: Union[str, Tuple[str, int], Tuple[str, int, Dict[str, str]] - """ - return schema_controller_impl.get_property_label( - node_display, use_strict_camel_case - ) - - -def get_schema_attributes(schema_url, display_label_type=None): # noqa: E501 - """Get all the attributes associated with a data model formatted as a dataframe (stored as a JSON String). - - Get all the attributes associated with a data model formatted as a dataframe (stored as a JSON String). # noqa: E501 - - :param schema_url: The URL of a schema in jsonld or csv form - :type schema_url: str - :param display_label_type: The type of label to display - :type display_label_type: str - - :rtype: Union[str, Tuple[str, int], Tuple[str, int, Dict[str, str]] - """ - return schema_controller_impl.get_schema_attributes(schema_url, display_label_type) diff --git a/apps/schematic/api/schematic_api/controllers/schema_controller_impl.py b/apps/schematic/api/schematic_api/controllers/schema_controller_impl.py deleted file mode 100644 index e1f9b9db3..000000000 --- a/apps/schematic/api/schematic_api/controllers/schema_controller_impl.py +++ /dev/null @@ -1,568 +0,0 @@ -"""Implementation of all endpoints""" - -from typing import Union - -from schematic.schemas.data_model_parser import DataModelParser # type: ignore -from schematic.schemas.data_model_graph import ( # type: ignore - DataModelGraph, - DataModelGraphExplorer, -) -from schematic.visualization.attributes_explorer import AttributesExplorer # type: ignore -from schematic.utils.schema_utils import get_property_label_from_display_name # type: ignore -from schematic.utils.schema_utils import DisplayLabelType # type: ignore - -from schematic_api.models.basic_error import BasicError -from schematic_api.models.component_requirement_subgraph import ( - ComponentRequirementSubgraph, -) -from schematic_api.models.node_property_array import NodePropertyArray -from schematic_api.models.validation_rule import ValidationRule -from schematic_api.models.validation_rule_array import ValidationRuleArray -from schematic_api.models.node import Node -from schematic_api.models.node_array import NodeArray -from schematic_api.models.node_page import NodePage -from schematic_api.models.connected_node_pair_array import ConnectedNodePairArray -from schematic_api.models.connected_node_pair_page import ConnectedNodePairPage -from schematic_api.models.connected_node_pair import ConnectedNodePair -from schematic_api.controllers.utils import ( - handle_exceptions, - download_schema_file_as_jsonld, -) -from schematic_api.controllers.paging import Page - - -def create_data_model_graph_explorer( - schema_url: str, display_label_type: DisplayLabelType -) -> DataModelGraphExplorer: - """Creates a DataModelGraphExplorer for use in variopus endpoints - - Args: - schema_url (str): The URL of the schema in json form - display_label_type (DisplayLabelType): - The type of label to use as display - - Returns: - DataModelGraphExplorer: _description_ - """ - data_model_parser = DataModelParser(path_to_data_model=schema_url) - parsed_data_model = data_model_parser.parse_model() - data_model_grapher = DataModelGraph( - attribute_relationships_dict=parsed_data_model, - data_model_labels=display_label_type, - ) - graph_data_model = data_model_grapher.generate_data_model_graph() - return DataModelGraphExplorer(graph_data_model) - - -@handle_exceptions -def get_component( - component_label: str, - schema_url: str, - include_index: bool = False, - display_label_type: DisplayLabelType = "class_label", -) -> tuple[Union[str, BasicError], int]: - """ - Get all the attributes associated with a specific data model component formatted as a - dataframe (stored as a JSON String). - - Args: - component_label (str): The label of the component - schema_url (str): The URL of the schema in json form - include_index (bool): Whether to include the indexes of the dataframe - display_label_type (DisplayLabelType): - The type of label to use as display - Defaults to "class_label" - - Returns: - tuple[Union[str, BasicError], int]: A tuple - The first item is either the component or an error object - The second item is the response status - """ - schema_path = download_schema_file_as_jsonld(schema_url) - explorer = AttributesExplorer( - path_to_jsonld=schema_path, data_model_labels=display_label_type - ) - attributes = ( - explorer._parse_component_attributes( # pylint:disable=protected-access - component=component_label, save_file=False, include_index=include_index - ) - ) - assert isinstance(attributes, str) - result: Union[str, BasicError] = attributes - status = 200 - return result, status - - -@handle_exceptions -def get_component_requirements_array( - component_label: str, - schema_url: str, - display_label_type: DisplayLabelType, -) -> tuple[Union[list[str], BasicError], int]: - """Gets the input components required components - - Args: - component_label (str): The label of the component - schema_url (str): The URL of the schema in json form - display_label_type (DisplayLabelType): - The type of label to use as display - Returns: - tuple[Union[ComponentRequirementArray, BasicError], int]: A tuple - item 1 is either the required coponents or an error - item 2 is the status - """ - dmge = create_data_model_graph_explorer(schema_url, display_label_type) - result = dmge.get_component_requirements(source_component=component_label) - status = 200 - return result, status - - -@handle_exceptions -def get_component_requirements_graph( - component_label: str, - schema_url: str, - display_label_type: DisplayLabelType, -) -> tuple[Union[list[ComponentRequirementSubgraph], BasicError], int]: - """Gets the input components required components - - Args: - component_label (str): The label of the component - schema_url (str): The URL of the schema in json form - display_label_type (DisplayLabelType): - The type of label to use as display - Returns: - tuple[Union[ComponentRequirementGrpah, BasicError], int]: A tuple - item 1 is either the required coponents or an error - item 2 is the status - """ - dmge = create_data_model_graph_explorer(schema_url, display_label_type) - graph = dmge.get_component_requirements_graph(source_component=component_label) - edges: list[tuple[str, str]] = graph.edges() - result = [ComponentRequirementSubgraph(edge[0], edge[1]) for edge in edges] - status = 200 - return result, status - - -def get_connected_node_pairs_from_schematic( - relationship_type: str, - schema_url: str, - display_label_type: DisplayLabelType = "class_label", -) -> list[ConnectedNodePair]: - """Gets a list of connected node pairs via the provided relationship - - Args: - relationship_type (str): the type of relationship in the schema to get - schema_url (str): The URL of the schema in jsonld form - display_label_type (DisplayLabelType): - The type of label to use as display - Defaults to "class_label" - - Returns: - list[ConnectedNodePair]: A list of connected node pairs - """ - dmge = create_data_model_graph_explorer(schema_url, display_label_type) - relationship_subgraph = dmge.get_subgraph_by_edge_type(relationship_type) - lst = [list(edge) for edge in relationship_subgraph.edges] - - return [ - ConnectedNodePair(connected_nodes[0], connected_nodes[1]) - for connected_nodes in lst - ] - - -@handle_exceptions -def get_connected_node_pair_array( - schema_url: str, - relationship_type: str, - display_label_type: DisplayLabelType = "class_label", -) -> tuple[Union[ConnectedNodePairArray, BasicError], int]: - """Gets a list of connected node pairs via the provided relationship - - Args: - relationship_type (str): the type of relationship in the schema to get - schema_url (str): The URL of the schema in jsonld form - display_label_type (DisplayLabelType): - The type of label to use as display - Defaults to "class_label" - - Returns: - tuple[Union[ConnectedNodePairArray, BasicError], int]: A list of connected node pairs - """ - nodes = get_connected_node_pairs_from_schematic( - relationship_type, schema_url, display_label_type - ) - result: Union[ConnectedNodePairArray, BasicError] = ConnectedNodePairArray(nodes) - status = 200 - return result, status - - -@handle_exceptions -def get_connected_node_pair_page( - schema_url: str, - relationship_type: str, - page_number: int = 1, - page_max_items: int = 100000, - display_label_type: DisplayLabelType = "class_label", -) -> tuple[Union[ConnectedNodePairPage, BasicError], int]: - """Gets a page of connected node pairs via the provided relationship - - Args: - relationship_type (str): the type of relationship in the schema to get - schema_url (str): The URL of the schema in json form - page_number (int): The page number the current request is for - page_max_items (int): The maximum number of items per page - display_label_type (DisplayLabelType): - The type of label to use as display - Defaults to "class_label" - - Returns: - tuple[Union[ConnectedNodePairPage, BasicError], int: A tuple - The first item is either the connected nodes or an error object - The second item is the response status - """ - # pylint: disable=duplicate-code - - connected_nodes = get_connected_node_pairs_from_schematic( - relationship_type, schema_url, display_label_type - ) - page = Page(connected_nodes, page_number, page_max_items) - - cn_page = ConnectedNodePairPage( - number=page.page_number, - size=page.page_max_items, - total_elements=page.total_items, - total_pages=page.total_pages, - has_next=page.has_next, - has_previous=page.has_previous, - connected_nodes=page.items, - ) - result: Union[ConnectedNodePairPage, BasicError] = cn_page - status = 200 - return result, status - - -def get_node_is_required_from_schematic( - node_display: str, - schema_url: str, - display_label_type: DisplayLabelType = "class_label", -) -> bool: - """Gets whether or not the node is required by the schema - - Args: - node_display(str): The display name of the node - schema_url (str): The URL of the schema in jsonld form - display_label_type (DisplayLabelType): - The type of label to use as display - Defaults to "class_label" - - Returns: - bool: Whether or no the node is required - """ - dmge = create_data_model_graph_explorer(schema_url, display_label_type) - return dmge.get_node_required(node_display) - - -@handle_exceptions -def get_node_is_required( - node_display: str, - schema_url: str, - display_label_type: DisplayLabelType = "class_label", -) -> tuple[Union[bool, BasicError], int]: - """Gets whether or not the node is required by the schema - - Args: - node_display(str): The display name of the node - schema_url (str): The URL of the schema in jsonld form - display_label_type (DisplayLabelType): - The type of label to use as display - Defaults to "class_label" - - Returns: - tuple[Union[bool, BasicError], int]: A tuple - The first item is either whether or not the node is required or an error object - The second item is the response status - """ - result: Union[bool, BasicError] = get_node_is_required_from_schematic( - node_display, schema_url, display_label_type - ) - status = 200 - return result, status - - -@handle_exceptions -def get_property_label( - node_display: str, use_strict_camel_case: bool -) -> tuple[Union[str, BasicError], int]: - """Gets the property label of the node - - Args: - node_display(str): The display name of the node - use_strict_camel_case (bool): whether or not to use strict camel case when doing the - conversion - - Returns: - tuple[Union[str, BasicError], int]: A tuple - The first item is either the label or an error object - The second item is the response status - """ - result: Union[str, BasicError] = get_property_label_from_display_name( - display_name=node_display, strict_camel_case=use_strict_camel_case - ) - status = 200 - return result, status - - -@handle_exceptions -def get_schema_attributes( - schema_url: str, display_label_type: DisplayLabelType = "class_label" -) -> tuple[Union[str, BasicError], int]: - """ - Get all the attributes associated with a data model formatted as a dataframe - (stored as a JSON String). - - Args: - schema_url (str): The URL of the schema in json form - display_label_type (DisplayLabelType): - The type of label to use as display - Defaults to "class_label" - - Returns: - tuple[Union[str, BasicError], int]: A tuple - The first item is either the attributes or an error object - The second item is the response status - """ - schema_path = download_schema_file_as_jsonld(schema_url) - explorer = AttributesExplorer( - path_to_jsonld=schema_path, data_model_labels=display_label_type - ) - result: Union[str, BasicError] = explorer.parse_attributes(save_file=False) # type: ignore - status = 200 - return result, status - - -def get_node_properties_from_schematic( - node_label: str, - schema_url: str, - display_label_type: DisplayLabelType = "class_label", -) -> list[str]: - """Gets the properties associated with the node - - Args: - schema_url (str): The URL of the schema in jsonld form - node_label (str): The label of the node - display_label_type (DisplayLabelType): - The type of label to use as display - Defaults to "class_label" - - Returns: - list[str]: A list of properties of the node - """ - dmge = create_data_model_graph_explorer(schema_url, display_label_type) - properties = dmge.find_class_specific_properties(node_label) - return properties - - -@handle_exceptions -def get_node_properties( - node_label: str, - schema_url: str, - display_label_type: DisplayLabelType = "class_label", -) -> tuple[Union[NodePropertyArray, BasicError], int]: - """Gets the properties associated with the node - - Args: - schema_url (str): The URL of the schema in jsonld form - node_label (str): The label of the node - display_label_type (DisplayLabelType): - The type of label to use as display - Defaults to "class_label" - - Returns: - tuple[Union[NodePropertyArray, BasicError], int]: A tuple - The first item is either the node properties or an error object - The second item is the response status - """ - - properties = get_node_properties_from_schematic( - node_label, schema_url, display_label_type - ) - result: Union[NodePropertyArray, BasicError] = NodePropertyArray(properties) - status = 200 - return result, status - - -def get_node_validation_rules_from_schematic( - node_display: str, - schema_url: str, - display_label_type: DisplayLabelType = "class_label", -) -> list[ValidationRule]: - """Gets the validation_rules associated with the node - - Args: - schema_url (str): The URL of the schema in jsonld form - node_display (str): The display name of the node - display_label_type (DisplayLabelType): - The type of label to use as display - Defaults to "class_label" - - Returns: - list[ValidationRule]: A list of validation_rules of the node - """ - dmge = create_data_model_graph_explorer(schema_url, display_label_type) - rules: list[str] = dmge.get_node_validation_rules(node_display) # type: ignore - return [ValidationRule(rule) for rule in rules] - - -@handle_exceptions -def get_node_validation_rules( - node_display: str, - schema_url: str, - display_label_type: DisplayLabelType = "class_label", -) -> tuple[Union[ValidationRuleArray, BasicError], int]: - """Gets the validation rules associated with the node - - Args: - schema_url (str): The URL of the schema in jsonld form - node_display(str): The display name of the node - display_label_type (DisplayLabelType): - The type of label to use as display - Defaults to "class_label" - - Returns: - tuple[Union[ValidationRuleArray, BasicError], int]: A tuple - The first item is either the validation rules or an error object - The second item is the response status - """ - validation_rules = get_node_validation_rules_from_schematic( - node_display, schema_url, display_label_type - ) - result: Union[ValidationRuleArray, BasicError] = ValidationRuleArray( - validation_rules - ) - status = 200 - return result, status - - -def get_node_dependencies_from_schematic( - node_label: str, - schema_url: str, - return_display_names: bool = True, - return_ordered_by_schema: bool = True, - display_label_type: DisplayLabelType = "class_label", -) -> list[Node]: - """Gets the nodes that the input node is dependent on - - Args: - node_label (str): The label of the node to get dependencies for - schema_url (str): The URL of the schema in json form - return_display_names (bool): Whether or not to return the display names of the node, - otherwise the label - return_ordered_by_schema (bool):Whether or not to order the nodes by their order in - the schema, otherwise random - display_label_type (DisplayLabelType): - The type of label to use as display - Defaults to "class_label" - - Returns: - list[Node]: A list of nodes - - """ - dmge = create_data_model_graph_explorer(schema_url, display_label_type) - nodes = dmge.get_node_dependencies( - node_label, return_display_names, return_ordered_by_schema - ) - return [Node(node) for node in nodes] - - -@handle_exceptions -def get_node_dependency_array( - node_label: str, - schema_url: str, - return_display_names: bool = True, - return_ordered_by_schema: bool = True, - display_label_type: DisplayLabelType = "class_label", -) -> tuple[Union[NodeArray, BasicError], int]: - """Gets the nodes that the input node is dependent on - - Args: - node_label (str): The label of the node to get dependencies for - schema_url (str): The URL of the schema in json form - return_display_names (bool): Whether or not to return the display names of the dependencies, - otherwise the label - return_ordered_by_schema (bool):Whether or not to order the dependencies by their order in - the schema, otherwise random - display_label_type (DisplayLabelType): - The type of label to use as display - Defaults to "class_label" - - Returns: - tuple[Union[NodeArray, BasicError], int]: A tuple - The first item is either the nodes or an error object - The second item is the response status - """ - - nodes = get_node_dependencies_from_schematic( - node_label, - schema_url, - return_display_names, - return_ordered_by_schema, - display_label_type, - ) - result: Union[NodeArray, BasicError] = NodeArray(nodes) - status = 200 - return result, status - - -@handle_exceptions -def get_node_dependency_page( # pylint: disable=too-many-arguments - node_label: str, - schema_url: str, - return_display_names: bool = True, - return_ordered_by_schema: bool = True, - page_number: int = 1, - page_max_items: int = 100000, - display_label_type: DisplayLabelType = "class_label", -) -> tuple[Union[NodePage, BasicError], int]: - """Gets the nodes that the input node is dependent on - - Args: - node_label (str): The label of the node to get dependencies for - schema_url (str): The URL of the schema in json form - return_display_names (bool): Whether or not to return the display names of the dependencies, - otherwise the label - return_ordered_by_schema (bool):Whether or not to order the dependencies by their order in - the schema, otherwise random - page_number (int): The page number the current request is for - page_max_items (int): The maximum number of items per page - display_label_type (DisplayLabelType): - The type of label to use as display - Defaults to "class_label" - - Returns: - tuple[Union[NodePage, BasicError], int]: A tuple - The first item is either the nodes or an error object - The second item is the response status - """ - # pylint: disable=duplicate-code - nodes = get_node_dependencies_from_schematic( - node_label, - schema_url, - return_display_names, - return_ordered_by_schema, - display_label_type, - ) - page = Page(nodes, page_number, page_max_items) - - node_page = NodePage( - number=page.page_number, - size=page.page_max_items, - total_elements=page.total_items, - total_pages=page.total_pages, - has_next=page.has_next, - has_previous=page.has_previous, - nodes=page.items, - ) - result: Union[NodePage, BasicError] = node_page - status = 200 - - return result, status diff --git a/apps/schematic/api/schematic_api/controllers/security_controller_.py b/apps/schematic/api/schematic_api/controllers/security_controller_.py deleted file mode 100644 index 1a9b1b861..000000000 --- a/apps/schematic/api/schematic_api/controllers/security_controller_.py +++ /dev/null @@ -1,15 +0,0 @@ -from typing import List - - -def info_from_bearerAuth(token): - """ - Check and retrieve authentication information from custom bearer token. - Returned value will be passed in 'token_info' parameter of your operation function, if there is one. - 'sub' or 'uid' will be set in 'user' parameter of your operation function, if there is one. - - :param token Token provided by Authorization header - :type token: str - :return: Decoded token information or None if token is invalid - :rtype: dict | None - """ - return {"uid": "user_id"} diff --git a/apps/schematic/api/schematic_api/controllers/storage_controller.py b/apps/schematic/api/schematic_api/controllers/storage_controller.py deleted file mode 100644 index f2f06c276..000000000 --- a/apps/schematic/api/schematic_api/controllers/storage_controller.py +++ /dev/null @@ -1,348 +0,0 @@ -import connexion -import six -from typing import Dict -from typing import Tuple -from typing import Union - -from schematic_api.models.asset_type import AssetType # noqa: E501 -from schematic_api.models.basic_error import BasicError # noqa: E501 -from schematic_api.models.dataset_metadata_array import ( - DatasetMetadataArray, -) # noqa: E501 -from schematic_api.models.dataset_metadata_page import DatasetMetadataPage # noqa: E501 -from schematic_api.models.file_metadata_array import FileMetadataArray # noqa: E501 -from schematic_api.models.file_metadata_page import FileMetadataPage # noqa: E501 -from schematic_api.models.manifest_metadata_array import ( - ManifestMetadataArray, -) # noqa: E501 -from schematic_api.models.manifest_metadata_page import ( - ManifestMetadataPage, -) # noqa: E501 -from schematic_api.models.project_metadata_array import ( - ProjectMetadataArray, -) # noqa: E501 -from schematic_api.models.project_metadata_page import ProjectMetadataPage # noqa: E501 -from schematic_api import util -from schematic_api.controllers import storage_controller_impl - - -def get_asset_view_csv(asset_view_id, asset_type): # noqa: E501 - """Gets the asset view table in csv file form - - Gets the asset view table in csv file form # noqa: E501 - - :param asset_view_id: ID of view listing all project data assets. E.g. for Synapse this would be the Synapse ID of the fileview listing all data assets for a given project - :type asset_view_id: str - :param asset_type: Type of asset, such as Synapse - :type asset_type: dict | bytes - - :rtype: Union[str, Tuple[str, int], Tuple[str, int, Dict[str, str]] - """ - if connexion.request.is_json: - asset_type = AssetType.from_dict(connexion.request.get_json()) # noqa: E501 - return storage_controller_impl.get_asset_view_csv(asset_view_id, asset_type) - - -def get_asset_view_json(asset_view_id, asset_type): # noqa: E501 - """Gets the asset view table in json form - - Gets the asset view table in json form # noqa: E501 - - :param asset_view_id: ID of view listing all project data assets. E.g. for Synapse this would be the Synapse ID of the fileview listing all data assets for a given project - :type asset_view_id: str - :param asset_type: Type of asset, such as Synapse - :type asset_type: dict | bytes - - :rtype: Union[object, Tuple[object, int], Tuple[object, int, Dict[str, str]] - """ - if connexion.request.is_json: - asset_type = AssetType.from_dict(connexion.request.get_json()) # noqa: E501 - return storage_controller_impl.get_asset_view_json(asset_view_id, asset_type) - - -def get_dataset_file_metadata_array( - dataset_id, asset_type, asset_view_id, file_names=None, use_full_file_path=None -): # noqa: E501 - """Gets all files associated with a dataset. - - Gets all files associated with a dataset. # noqa: E501 - - :param dataset_id: The ID of a dataset. - :type dataset_id: str - :param asset_type: Type of asset, such as Synapse - :type asset_type: dict | bytes - :param asset_view_id: ID of view listing all project data assets. E.g. for Synapse this would be the Synapse ID of the fileview listing all data assets for a given project - :type asset_view_id: str - :param file_names: A list of file names used to filter the output. - :type file_names: List[str] - :param use_full_file_path: Whether or not to return the full path of output, or just the basename. - :type use_full_file_path: bool - - :rtype: Union[FileMetadataArray, Tuple[FileMetadataArray, int], Tuple[FileMetadataArray, int, Dict[str, str]] - """ - if connexion.request.is_json: - asset_type = AssetType.from_dict(connexion.request.get_json()) # noqa: E501 - return storage_controller_impl.get_dataset_file_metadata_array( - dataset_id, asset_type, asset_view_id, file_names, use_full_file_path - ) - - -def get_dataset_file_metadata_page( - dataset_id, - asset_type, - asset_view_id, - file_names=None, - use_full_file_path=None, - page_number=None, - page_max_items=None, -): # noqa: E501 - """Gets all files associated with a dataset. - - Gets all files associated with a dataset. # noqa: E501 - - :param dataset_id: The ID of a dataset. - :type dataset_id: str - :param asset_type: Type of asset, such as Synapse - :type asset_type: dict | bytes - :param asset_view_id: ID of view listing all project data assets. E.g. for Synapse this would be the Synapse ID of the fileview listing all data assets for a given project - :type asset_view_id: str - :param file_names: A list of file names used to filter the output. - :type file_names: List[str] - :param use_full_file_path: Whether or not to return the full path of output, or just the basename. - :type use_full_file_path: bool - :param page_number: The page number to get for a paginated query - :type page_number: int - :param page_max_items: The maximum number of items per page (up to 100,000) for paginated endpoints - :type page_max_items: int - - :rtype: Union[FileMetadataPage, Tuple[FileMetadataPage, int], Tuple[FileMetadataPage, int, Dict[str, str]] - """ - if connexion.request.is_json: - asset_type = AssetType.from_dict(connexion.request.get_json()) # noqa: E501 - return storage_controller_impl.get_dataset_file_metadata_page( - dataset_id, - asset_type, - asset_view_id, - file_names, - use_full_file_path, - page_number, - page_max_items, - ) - - -def get_dataset_manifest_csv(asset_type, dataset_id, asset_view_id): # noqa: E501 - """Gets the manifest in csv form - - Gets the manifest in csv form # noqa: E501 - - :param asset_type: Type of asset, such as Synapse - :type asset_type: dict | bytes - :param dataset_id: The ID of a dataset. - :type dataset_id: str - :param asset_view_id: ID of view listing all project data assets. E.g. for Synapse this would be the Synapse ID of the fileview listing all data assets for a given project - :type asset_view_id: str - - :rtype: Union[str, Tuple[str, int], Tuple[str, int, Dict[str, str]] - """ - if connexion.request.is_json: - asset_type = AssetType.from_dict(connexion.request.get_json()) # noqa: E501 - return storage_controller_impl.get_dataset_manifest_csv( - asset_type, dataset_id, asset_view_id - ) - - -def get_dataset_manifest_json(asset_type, dataset_id, asset_view_id): # noqa: E501 - """Gets the manifest in json form - - Gets the manifest in json form # noqa: E501 - - :param asset_type: Type of asset, such as Synapse - :type asset_type: dict | bytes - :param dataset_id: The ID of a dataset. - :type dataset_id: str - :param asset_view_id: ID of view listing all project data assets. E.g. for Synapse this would be the Synapse ID of the fileview listing all data assets for a given project - :type asset_view_id: str - - :rtype: Union[object, Tuple[object, int], Tuple[object, int, Dict[str, str]] - """ - if connexion.request.is_json: - asset_type = AssetType.from_dict(connexion.request.get_json()) # noqa: E501 - return storage_controller_impl.get_dataset_manifest_json( - asset_type, dataset_id, asset_view_id - ) - - -def get_manifest_csv(asset_type, manifest_id): # noqa: E501 - """Gets the manifest in csv form - - Gets the manifest in csv form # noqa: E501 - - :param asset_type: Type of asset, such as Synapse - :type asset_type: dict | bytes - :param manifest_id: ID of a manifest - :type manifest_id: str - - :rtype: Union[str, Tuple[str, int], Tuple[str, int, Dict[str, str]] - """ - if connexion.request.is_json: - asset_type = AssetType.from_dict(connexion.request.get_json()) # noqa: E501 - return storage_controller_impl.get_manifest_csv(asset_type, manifest_id) - - -def get_manifest_json(asset_type, manifest_id): # noqa: E501 - """Gets the manifest in json form - - Gets the manifest in json form # noqa: E501 - - :param asset_type: Type of asset, such as Synapse - :type asset_type: dict | bytes - :param manifest_id: ID of a manifest - :type manifest_id: str - - :rtype: Union[object, Tuple[object, int], Tuple[object, int, Dict[str, str]] - """ - if connexion.request.is_json: - asset_type = AssetType.from_dict(connexion.request.get_json()) # noqa: E501 - return storage_controller_impl.get_manifest_json(asset_type, manifest_id) - - -def get_project_dataset_metadata_array( - project_id, asset_type, asset_view_id -): # noqa: E501 - """Gets all dataset metadata in folder under a given storage project that the current user has access to. - - Gets all dataset meatdata in folder under a given storage project that the current user has access to. # noqa: E501 - - :param project_id: The Synapse ID of a storage project. - :type project_id: str - :param asset_type: Type of asset, such as Synapse - :type asset_type: dict | bytes - :param asset_view_id: ID of view listing all project data assets. E.g. for Synapse this would be the Synapse ID of the fileview listing all data assets for a given project - :type asset_view_id: str - - :rtype: Union[DatasetMetadataArray, Tuple[DatasetMetadataArray, int], Tuple[DatasetMetadataArray, int, Dict[str, str]] - """ - if connexion.request.is_json: - asset_type = AssetType.from_dict(connexion.request.get_json()) # noqa: E501 - return storage_controller_impl.get_project_dataset_metadata_array( - project_id, asset_type, asset_view_id - ) - - -def get_project_dataset_metadata_page( - project_id, asset_type, asset_view_id, page_number=None, page_max_items=None -): # noqa: E501 - """Gets a page of dataset metadata in folder under a given storage project that the current user has access to. - - Gets a page of dataset meatdata in folder under a given storage project that the current user has access to. # noqa: E501 - - :param project_id: The Synapse ID of a storage project. - :type project_id: str - :param asset_type: Type of asset, such as Synapse - :type asset_type: dict | bytes - :param asset_view_id: ID of view listing all project data assets. E.g. for Synapse this would be the Synapse ID of the fileview listing all data assets for a given project - :type asset_view_id: str - :param page_number: The page number to get for a paginated query - :type page_number: int - :param page_max_items: The maximum number of items per page (up to 100,000) for paginated endpoints - :type page_max_items: int - - :rtype: Union[DatasetMetadataPage, Tuple[DatasetMetadataPage, int], Tuple[DatasetMetadataPage, int, Dict[str, str]] - """ - if connexion.request.is_json: - asset_type = AssetType.from_dict(connexion.request.get_json()) # noqa: E501 - return storage_controller_impl.get_project_dataset_metadata_page( - project_id, asset_type, asset_view_id, page_number, page_max_items - ) - - -def get_project_manifest_metadata_array( - project_id, asset_type, asset_view_id -): # noqa: E501 - """Gets all manifests in a project folder that users have access to - - Gets all manifests in a project folder that the current user has access to. # noqa: E501 - - :param project_id: The Synapse ID of a storage project. - :type project_id: str - :param asset_type: Type of asset, such as Synapse - :type asset_type: dict | bytes - :param asset_view_id: ID of view listing all project data assets. E.g. for Synapse this would be the Synapse ID of the fileview listing all data assets for a given project - :type asset_view_id: str - - :rtype: Union[ManifestMetadataArray, Tuple[ManifestMetadataArray, int], Tuple[ManifestMetadataArray, int, Dict[str, str]] - """ - if connexion.request.is_json: - asset_type = AssetType.from_dict(connexion.request.get_json()) # noqa: E501 - return storage_controller_impl.get_project_manifest_metadata_array( - project_id, asset_type, asset_view_id - ) - - -def get_project_manifest_metadata_page( - project_id, asset_type, asset_view_id, page_number=None, page_max_items=None -): # noqa: E501 - """Gets all manifests in a project folder that users have access to - - Gets all manifests in a project folder that the current user has access to. # noqa: E501 - - :param project_id: The Synapse ID of a storage project. - :type project_id: str - :param asset_type: Type of asset, such as Synapse - :type asset_type: dict | bytes - :param asset_view_id: ID of view listing all project data assets. E.g. for Synapse this would be the Synapse ID of the fileview listing all data assets for a given project - :type asset_view_id: str - :param page_number: The page number to get for a paginated query - :type page_number: int - :param page_max_items: The maximum number of items per page (up to 100,000) for paginated endpoints - :type page_max_items: int - - :rtype: Union[ManifestMetadataPage, Tuple[ManifestMetadataPage, int], Tuple[ManifestMetadataPage, int, Dict[str, str]] - """ - if connexion.request.is_json: - asset_type = AssetType.from_dict(connexion.request.get_json()) # noqa: E501 - return storage_controller_impl.get_project_manifest_metadata_page( - project_id, asset_type, asset_view_id, page_number, page_max_items - ) - - -def get_project_metadata_array(asset_view_id, asset_type): # noqa: E501 - """Gets all storage projects the current user has access to. - - Gets all storage projects the current user has access to. # noqa: E501 - - :param asset_view_id: ID of view listing all project data assets. E.g. for Synapse this would be the Synapse ID of the fileview listing all data assets for a given project - :type asset_view_id: str - :param asset_type: Type of asset, such as Synapse - :type asset_type: dict | bytes - - :rtype: Union[ProjectMetadataArray, Tuple[ProjectMetadataArray, int], Tuple[ProjectMetadataArray, int, Dict[str, str]] - """ - if connexion.request.is_json: - asset_type = AssetType.from_dict(connexion.request.get_json()) # noqa: E501 - return storage_controller_impl.get_project_metadata_array(asset_view_id, asset_type) - - -def get_project_metadata_page( - asset_view_id, asset_type, page_number=None, page_max_items=None -): # noqa: E501 - """Gets all storage projects the current user has access to. - - Gets all storage projects the current user has access to. # noqa: E501 - - :param asset_view_id: ID of view listing all project data assets. E.g. for Synapse this would be the Synapse ID of the fileview listing all data assets for a given project - :type asset_view_id: str - :param asset_type: Type of asset, such as Synapse - :type asset_type: dict | bytes - :param page_number: The page number to get for a paginated query - :type page_number: int - :param page_max_items: The maximum number of items per page (up to 100,000) for paginated endpoints - :type page_max_items: int - - :rtype: Union[ProjectMetadataPage, Tuple[ProjectMetadataPage, int], Tuple[ProjectMetadataPage, int, Dict[str, str]] - """ - if connexion.request.is_json: - asset_type = AssetType.from_dict(connexion.request.get_json()) # noqa: E501 - return storage_controller_impl.get_project_metadata_page( - asset_view_id, asset_type, page_number, page_max_items - ) diff --git a/apps/schematic/api/schematic_api/controllers/storage_controller_impl.py b/apps/schematic/api/schematic_api/controllers/storage_controller_impl.py deleted file mode 100644 index ca0c1770a..000000000 --- a/apps/schematic/api/schematic_api/controllers/storage_controller_impl.py +++ /dev/null @@ -1,671 +0,0 @@ -"""Implementation of all endpoints""" - -import os -from typing import Callable -import tempfile - -import pandas as pd -import synapseclient # type: ignore -from schematic.store.synapse import SynapseStorage, ManifestDownload, load_df # type: ignore -from schematic import CONFIG # type: ignore - -from schematic_api.models.basic_error import BasicError -from schematic_api.models.dataset_metadata import DatasetMetadata -from schematic_api.models.dataset_metadata_array import DatasetMetadataArray -from schematic_api.models.dataset_metadata_page import DatasetMetadataPage -from schematic_api.models.manifest_metadata import ManifestMetadata -from schematic_api.models.manifest_metadata_array import ManifestMetadataArray -from schematic_api.models.manifest_metadata_page import ManifestMetadataPage -from schematic_api.models.project_metadata import ProjectMetadata -from schematic_api.models.project_metadata_array import ProjectMetadataArray -from schematic_api.models.project_metadata_page import ProjectMetadataPage -from schematic_api.models.file_metadata import FileMetadata -from schematic_api.models.file_metadata_array import FileMetadataArray -from schematic_api.models.file_metadata_page import FileMetadataPage -from schematic_api.controllers.utils import ( - SYNAPSE_CACHE_PATH, - PURGE_SYNAPSE_CACHE, - handle_exceptions, - get_access_token, - purge_synapse_cache, -) -from schematic_api.controllers.paging import Page - - -def get_asset_storage_class(asset_type: str) -> Callable: - """Returns the class associated with the asset type. - - Args: - asset_type (str): An asset type, such as "synapse". - - Raises: - ValueError: When the asset_type isn't in the asst_type dictionary - - Returns: - Callable: A class that has - - access_token parameter - - getStorageDatasetsInProject method - - getProjectManifests method - """ - asset_type_dict = {"synapse": SynapseStorage} - asset_type_object = asset_type_dict.get(asset_type) - if asset_type_object is None: - msg = f"{asset_type} is not an allowed value: [{list(asset_type_dict.keys())}]" - raise ValueError(msg) - return asset_type_object - - -def get_store( - asset_type: str, # pylint: disable=unused-argument -) -> SynapseStorage: - """Creates a SynapseStorage class and purges its synapse cache - - Args: - asset_type (str): The type of storage class (will be used in the future) - - Returns: - SynapseStorage: A synapse storage class - """ - access_token = get_access_token() - store = SynapseStorage( - access_token=access_token, synapse_cache_path=SYNAPSE_CACHE_PATH - ) - if PURGE_SYNAPSE_CACHE: - purge_synapse_cache(store) - return store - - -def get_asset_view_from_schematic(asset_type: str) -> pd.DataFrame: - """Gets the asset view in pandas.Dataframe form - - Args: - asset_view_id (str): The d of the asset view - asset_type (str): The type of asset, ie "synapse" - - Returns: - pandas.DataFrame: The asset view - """ - store = get_store(asset_type) - return store.getStorageFileviewTable() - - -@handle_exceptions -def get_asset_view_csv( - asset_view_id: str, asset_type: str -) -> tuple[str | BasicError, int]: - """Gets the asset view in csv form - - Args: - asset_view_id (str): The id of the asset view - asset_type (str): The type of asset, ie "synapse" - - Returns: - tuple[str | BasicError, int]: A tuple - The first item is either the the path of the file or an error object - The second item is the response status - """ - CONFIG.synapse_master_fileview_id = asset_view_id - asset_view = get_asset_view_from_schematic(asset_type) - with tempfile.NamedTemporaryFile( - delete=False, suffix=".asset_view.csv" - ) as tmp_file: - export_path = tmp_file.name - asset_view.to_csv(tmp_file.name, index=False) - return export_path, 200 - - -@handle_exceptions -def get_asset_view_json( - asset_view_id: str, asset_type: str -) -> tuple[str | BasicError, int]: - """Gets the asset view in json form - - Args: - asset_view_id (str): The id of the asset view - asset_type (str): The type of asset, ie "synapse" - - Returns: - tuple[str | BasicError, int]: A tuple - The first item is either the fileview or an error object - The second item is the response status - """ - CONFIG.synapse_master_fileview_id = asset_view_id - asset_view = get_asset_view_from_schematic(asset_type) - result: str | BasicError = asset_view.to_json() - status = 200 - return result, status - - -def get_dataset_file_metadata_from_schematic( - dataset_id: str, - asset_type: str, - file_names: list[str] | None, - use_full_file_path: bool, -) -> list[FileMetadata]: - """Gets a list of datasets from the project - - Args: - dataset_id (str): The Id for the dataset to get the files from - asset_type (str): The type of asset, ie "synapse" - file_names: (list[str] | None): An optional list of file names to filter the output by - use_full_file_path (str): Whether or not to return the full file path of each file - - Returns: - list[FileMetadata]: A list of file metadata - """ - store = get_store(asset_type) - file_tuple_list = store.getFilesInStorageDataset( - datasetId=dataset_id, - fileNames=file_names, # type: ignore - fullpath=use_full_file_path, - ) - return [FileMetadata(id=item[0], name=item[1]) for item in file_tuple_list] - - -@handle_exceptions -def get_dataset_file_metadata_array( - dataset_id: str, - asset_type: str, - asset_view_id: str, - file_names: list[str] | None = None, - use_full_file_path: bool = False, -) -> tuple[FileMetadataArray | BasicError, int]: - """Gets file metadata associated with a dataset - - Args: - dataset_id (str): The Id for the dataset to get the files from - asset_view_id (str): The id for the asset view of the project - asset_type (str): The type of asset, ie "synapse" - file_names (Optional[list[str]]): An optional list of file names to filter the output by - use_full_file_path: Whether or not to return the full file path of each file - - Returns: - tuple[FileMetadataArray | BasicError, int]: A tuple - The first item is either the file metadata or an error object - The second item is the response status - """ - CONFIG.synapse_master_fileview_id = asset_view_id - file_metadata_list = get_dataset_file_metadata_from_schematic( - dataset_id, asset_type, file_names, use_full_file_path - ) - - result: FileMetadataArray | BasicError = FileMetadataArray(file_metadata_list) - status = 200 - - return result, status - - -@handle_exceptions -def get_dataset_file_metadata_page( # pylint: disable=too-many-arguments - dataset_id: str, - asset_type: str, - asset_view_id: str, - file_names: list[str] | None = None, - use_full_file_path: bool = False, - page_number: int = 1, - page_max_items: int = 100_000, -) -> tuple[FileMetadataPage | BasicError, int]: - """Gets file metadata associated with a dataset - - Args: - dataset_id (str): The Id for the dataset to get the files from - asset_view_id (str): The id for the asset view of the project - asset_type (str): The type of asset, ie "synapse" - file_names (list[str] | None): An optional list of file names to filter the output by - use_full_file_path: Whether or not to return the full file path of each file - page_number (int): The page number the current request is for - page_max_items (int): The maximum number of items per page - - Returns: - tuple[FileMetadataPage | BasicError, int]: A tuple - The first item is either the file metadata or an error object - The second item is the response status - """ - CONFIG.synapse_master_fileview_id = asset_view_id - file_metadata_list = get_dataset_file_metadata_from_schematic( - dataset_id, asset_type, file_names, use_full_file_path - ) - - page = Page(file_metadata_list, page_number, page_max_items) - - file_page = FileMetadataPage( - number=page.page_number, - size=page.page_max_items, - total_elements=page.total_items, - total_pages=page.total_pages, - has_next=page.has_next, - has_previous=page.has_previous, - files=page.items, - ) - - result: FileMetadataPage | BasicError = file_page - status = 200 - - return result, status - - -def load_manifest_from_synapse_metadata( - manifest_data: synapseclient.File, -) -> pd.DataFrame: - """Loads a manifest from a csv file - - Args: - manifest_data (synapseclient.File): - Manifest metadata from doing syanpseclient.get on a file entity - - Returns: - pandas.DataFrame: The manifest - - """ - manifest_local_file_path = manifest_data["path"] - manifest = load_df(manifest_local_file_path) - os.remove(manifest_local_file_path) - return manifest - - -def get_dataset_manifest_from_schematic( - asset_type: str, dataset_id: str -) -> pd.DataFrame: - """Gets a manifest in pandas.Dataframe format - - Args: - asset_type (str): The type of asset, ie "synapse" - manifest_id (str): The unique id for the manifest file - dataset_id (str): The id of the dataset the manifest is in - - Returns: - pandas.DataFrame: The manifest - """ - store = get_store(asset_type) - manifest_data = store.getDatasetManifest( - datasetId=dataset_id, downloadFile=True, newManifestName="manifest.csv" - ) - assert isinstance(manifest_data, synapseclient.File) - return load_manifest_from_synapse_metadata(manifest_data) - - -@handle_exceptions -def get_dataset_manifest_csv( - asset_type: str, - dataset_id: str, - asset_view_id: str, -) -> tuple[str | BasicError, int]: - """Gets a manifest in csv file form - - Args: - asset_type (str): The type of asset, ie "synapse" - asset_view_id (str): The id of the asst view the dataset is in - dataset_id (str): The id of the dataset the manifest is in - - Returns: - tuple[str | BasicError, int]: A tuple - The first item is either the path of the manifest or an error object - The second item is the response status - """ - CONFIG.synapse_master_fileview_id = asset_view_id - manifest = get_dataset_manifest_from_schematic(asset_type, dataset_id) - with tempfile.NamedTemporaryFile(delete=False, suffix=".manifest.csv") as tmp_file: - export_path = tmp_file.name - manifest.to_csv(tmp_file.name, index=False) - return export_path, 200 - - -@handle_exceptions -def get_dataset_manifest_json( - asset_type: str, - dataset_id: str, - asset_view_id: str, -) -> tuple[str | BasicError, int]: - """Gets a manifest in json form - - Args: - asset_type (str): The type of asset, ie "synapse" - asset_view_id (str): The id of the asst view the dataset is in - dataset_id (str): The id of the dataset the manifest is in - - Returns: - tuple[str | BasicError, int]: A tuple - The first item is either the manifest or an error object - The second item is the response status - """ - CONFIG.synapse_master_fileview_id = asset_view_id - manifest = get_dataset_manifest_from_schematic(asset_type, dataset_id) - result: str | BasicError = manifest.to_json() - status = 200 - - return result, status - - -def get_manifest_from_schematic(asset_type: str, manifest_id: str) -> pd.DataFrame: - """Gets a manifest in pandas.Dataframe format - - Args: - asset_type (str): The type of asset, ie "synapse" - manifest_id (str): The unique id for the manifest file - - Returns: - pandas.DataFrame: The manifest - """ - # The storage object isn't needed but this purges the synapse cache - get_store(asset_type) - access_token = get_access_token() - synapse = SynapseStorage.login(access_token=access_token) - manifest_download = ManifestDownload(synapse, manifest_id) - manifest_data = ManifestDownload.download_manifest( - manifest_download, "manifest.csv" - ) - assert isinstance(manifest_data, synapseclient.File) - return load_manifest_from_synapse_metadata(manifest_data) - - -@handle_exceptions -def get_manifest_csv(asset_type: str, manifest_id: str) -> tuple[str | BasicError, int]: - """Gets a manifest in json form - - Args: - asset_type (str): The type of asset, ie "synapse" - manifest_id (str): The unique id for the manifest file - - Returns: - tuple[str | BasicError, int]: A tuple - The first item is either the path to the manifest or an error object - The second item is the response status - """ - manifest = get_manifest_from_schematic(asset_type, manifest_id) - with tempfile.NamedTemporaryFile(delete=False, suffix=".manifest.csv") as tmp_file: - export_path = tmp_file.name - manifest.to_csv(tmp_file.name, index=False) - return export_path, 200 - - -@handle_exceptions -def get_manifest_json( - asset_type: str, manifest_id: str -) -> tuple[str | BasicError, int]: - """Gets a manifest in json form - - Args: - asset_type (str): The type of asset, ie "synapse" - manifest_id (str): The unique id for the manifest file - - Returns: - tuple[str | BasicError, int]: A tuple - The first item is either the manifest or an error object - The second item is the response status - """ - manifest = get_manifest_from_schematic(asset_type, manifest_id) - result: str | BasicError = manifest.to_json() - status = 200 - - return result, status - - -def get_project_dataset_metadata_from_schematic( - project_id: str, asset_type: str -) -> list[DatasetMetadata]: - """Gets a list of dataset metadata from the project - - Args: - project_id (str): The id for the project - asset_type (str): The type of asset, ie "synapse" - - Returns: - list[DatasetMetadata]: A list of dataset metadata - """ - store = get_store(asset_type) - tuples = store.getStorageDatasetsInProject(projectId=project_id) - return [DatasetMetadata(id=item[0], name=item[1]) for item in tuples] - - -@handle_exceptions -def get_project_dataset_metadata_array( - project_id: str, asset_type: str, asset_view_id: str -) -> tuple[DatasetMetadataArray | BasicError, int]: - """Creates a list of dataset metadata from the project - - Args: - project_id (str): The Id for the project to get datasets from - asset_view_id (str): The id for the asset view of the project - asset_type (str): The type of asset, ie "synapse" - - Returns: - tuple[DatasetMetadataArray | BasicError, int]: A tuple - The first item is either the dataset metadata or an error object - The second item is the response status - """ - - CONFIG.synapse_master_fileview_id = asset_view_id - dataset_metadata_list = get_project_dataset_metadata_from_schematic( - project_id, asset_type - ) - result: DatasetMetadataArray | BasicError = DatasetMetadataArray( - dataset_metadata_list - ) - status = 200 - return result, status - - -@handle_exceptions -def get_project_dataset_metadata_page( - project_id: str, - asset_type: str, - asset_view_id: str, - page_number: int = 1, - page_max_items: int = 100_000, -) -> tuple[DatasetMetadataPage | BasicError, int]: - """Creates a page of dataset metadata from the project - - Args: - project_id (str): The Id for the project to get datasets from - asset_view_id (str): The id for the asset view of the project - asset_type (str): The type of asset, ie "synapse" - page_number (int): The page number the current request is for - page_max_items (int): The maximum number of items per page - - Returns: - tuple[DatasetMetadataPage | BasicError, int]: A tuple - The first item is either the dataset metadata or an error object - The second item is the response status - """ - # pylint: disable=duplicate-code - - CONFIG.synapse_master_fileview_id = asset_view_id - dataset_metadata_list = get_project_dataset_metadata_from_schematic( - project_id, asset_type - ) - page = Page(dataset_metadata_list, page_number, page_max_items) - - dataset_page = DatasetMetadataPage( - number=page.page_number, - size=page.page_max_items, - total_elements=page.total_items, - total_pages=page.total_pages, - has_next=page.has_next, - has_previous=page.has_previous, - datasets=page.items, - ) - - result: DatasetMetadataPage | BasicError = dataset_page - status = 200 - - return result, status - - -def get_project_manifest_metadata_from_schematic( - project_id: str, - asset_type: str, -) -> list[ManifestMetadata]: - """Gets manifest metadata from the project - - Args: - project_id (str): The id for the project - asset_type (str): The type of asset, ie "synapse" - - Returns: - list[ManifestMetadata]: A list of manifest metadata - """ - store = get_store(asset_type) - manifest_tuple_list = store.getProjectManifests(projectId=project_id) - return [ - ManifestMetadata( - name=item[1][1], - id=item[1][0], - dataset_name=item[0][1], - dataset_id=item[0][0], - component_name=item[2][0], - ) - for item in manifest_tuple_list - ] - - -@handle_exceptions -def get_project_manifest_metadata_array( - project_id: str, - asset_type: str, - asset_view_id: str, -) -> tuple[ManifestMetadataArray | BasicError, int]: - """Gets a list of manifest metadata from a project - - Args: - project_id (str): The id of the project - asset_view_id (str): The id of the asset view - asset_type (str): The type of asset, ie "synapse" - - Returns: - tuple[ManifestMetadataArray | BasicError, int]: A tuple - The first item is either the manifests or an error object - The second item is the response status - """ - CONFIG.synapse_master_fileview_id = asset_view_id - manifest_metadata = get_project_manifest_metadata_from_schematic( - project_id, asset_type - ) - result: ManifestMetadataArray | BasicError = ManifestMetadataArray( - manifest_metadata - ) - status = 200 - return result, status - - -@handle_exceptions -def get_project_manifest_metadata_page( - project_id: str, - asset_type: str, - asset_view_id: str, - page_number: int = 1, - page_max_items: int = 100_000, -) -> tuple[ManifestMetadataPage | BasicError, int]: - """Gets a page of manifest metadata from a project - - Args: - project_id (str): The id of the project - asset_view_id (str): The id of the asset view - asset_type (str): The type of asset, ie "synapse" - page_number (int): The page number the current request is for - page_max_items (int): The maximum number of items per page - - Returns: - tuple[ManifestMetadataPage | BasicError, int]: A tuple - The first item is either the manifests or an error object - The second item is the response status - """ - # load config - CONFIG.synapse_master_fileview_id = asset_view_id - manifest_metadata = get_project_manifest_metadata_from_schematic( - project_id, asset_type - ) - - page = Page(manifest_metadata, page_number, page_max_items) - - manifest_page = ManifestMetadataPage( - number=page.page_number, - size=page.page_max_items, - total_elements=page.total_items, - total_pages=page.total_pages, - has_next=page.has_next, - has_previous=page.has_previous, - manifests=page.items, - ) - - result: ManifestMetadataPage | BasicError = manifest_page - status = 200 - - return result, status - - -def get_project_metadata_from_schematic( - asset_type: str, -) -> list[ProjectMetadata]: - """Gets a list of projects - - Args: - asset_type (str): The type of asset, ie "synapse" - - Returns: - list[ProjectMetadata]: A list of project metadata - """ - store = get_store(asset_type) - metadata_tuple_list = store.getStorageProjects() - return [ProjectMetadata(id=item[0], name=item[1]) for item in metadata_tuple_list] - - -@handle_exceptions -def get_project_metadata_array( - asset_view_id: str, - asset_type: str, -) -> tuple[ProjectMetadataArray | BasicError, int]: - """Gets a list of project metadata the user has access to - - Args: - asset_view_id (str): The id for the asset view of the project - asset_type (str): The type of asset, ie "synapse" - - Returns: - tuple[ProjectMetadataArray, BasicError, int]: A tuple - The first item is either the projects or an error object - The second item is the response status - """ - - CONFIG.synapse_master_fileview_id = asset_view_id - project_metadata = get_project_metadata_from_schematic(asset_type) - result: ProjectMetadataArray | BasicError = ProjectMetadataArray(project_metadata) - status = 200 - return result, status - - -@handle_exceptions -def get_project_metadata_page( - asset_view_id: str, - asset_type: str, - page_number: int = 1, - page_max_items: int = 100_000, -) -> tuple[ProjectMetadataPage | BasicError, int]: - """Gets a list of project metadata the user has access to - - Args: - asset_view_id (str): The id for the asset view of the project - asset_type (str): The type of asset, ie "synapse" - page_number (int): The page number the current request is for - page_max_items (int): The maximum number of items per page - - Returns: - tuple[ProjectMetadataPage | BasicError, int]: A tuple - The first item is either the projects or an error object - The second item is the response status - """ - - CONFIG.synapse_master_fileview_id = asset_view_id - project_metadata = get_project_metadata_from_schematic(asset_type) - page = Page(project_metadata, page_number, page_max_items) - manifest_page = ProjectMetadataPage( - number=page.page_number, - size=page.page_max_items, - total_elements=page.total_items, - total_pages=page.total_pages, - has_next=page.has_next, - has_previous=page.has_previous, - projects=page.items, - ) - result: ProjectMetadataPage | BasicError = manifest_page - status = 200 - return result, status diff --git a/apps/schematic/api/schematic_api/controllers/tangled_tree_controller.py b/apps/schematic/api/schematic_api/controllers/tangled_tree_controller.py deleted file mode 100644 index ebc616f06..000000000 --- a/apps/schematic/api/schematic_api/controllers/tangled_tree_controller.py +++ /dev/null @@ -1,53 +0,0 @@ -import connexion -import six -from typing import Dict -from typing import Tuple -from typing import Union - -from schematic_api.models.basic_error import BasicError # noqa: E501 -from schematic_api import util -from schematic_api.controllers import tangled_tree_controller_impl - - -def get_tangled_tree_layers( - schema_url, figure_type=None, display_label_type=None -): # noqa: E501 - """Get tangled tree node layers to display for a given data model and figure type - - Get tangled tree node layers to display for a given data model and figure type # noqa: E501 - - :param schema_url: The URL of a schema in jsonld or csv form - :type schema_url: str - :param figure_type: Figure type to generate. - :type figure_type: str - :param display_label_type: The type of label to display - :type display_label_type: str - - :rtype: Union[str, Tuple[str, int], Tuple[str, int, Dict[str, str]] - """ - return tangled_tree_controller_impl.get_tangled_tree_layers( - schema_url, figure_type, display_label_type - ) - - -def get_tangled_tree_text( - schema_url, figure_type=None, text_format=None, display_label_type=None -): # noqa: E501 - """Get tangled tree plain or highlighted text to display for a given data model, text formatting and figure type - - Get tangled tree plain or highlighted text to display for a given data model, text formatting and figure type # noqa: E501 - - :param schema_url: The URL of a schema in jsonld or csv form - :type schema_url: str - :param figure_type: Figure type to generate. - :type figure_type: str - :param text_format: Text formatting type. - :type text_format: str - :param display_label_type: The type of label to display - :type display_label_type: str - - :rtype: Union[object, Tuple[object, int], Tuple[object, int, Dict[str, str]] - """ - return tangled_tree_controller_impl.get_tangled_tree_text( - schema_url, figure_type, text_format, display_label_type - ) diff --git a/apps/schematic/api/schematic_api/controllers/tangled_tree_controller_impl.py b/apps/schematic/api/schematic_api/controllers/tangled_tree_controller_impl.py deleted file mode 100644 index 62b056277..000000000 --- a/apps/schematic/api/schematic_api/controllers/tangled_tree_controller_impl.py +++ /dev/null @@ -1,88 +0,0 @@ -"""Tangled tree controllers""" - -from typing import Literal - -from schematic.visualization.tangled_tree import TangledTree # type: ignore -from schematic.utils.schema_utils import DisplayLabelType # type: ignore - -from schematic_api.models.basic_error import BasicError -from schematic_api.controllers.utils import ( - handle_exceptions, - download_schema_file_as_jsonld, -) - - -@handle_exceptions -def get_tangled_tree_layers( - schema_url: str, - figure_type: Literal["component", "dependency"] = "component", - display_label_type: DisplayLabelType = "class_label", -) -> tuple[str | BasicError, int]: - """Gets layers for a tangled tree visualization. - - Args: - schema_url (str): The URL to the schema file - figure_type (Literal["component", "dependency"]): Figure type to generate. - display_label_type (DisplayLabelType): - The type of label to use as display - Defaults to "class_label" - - Returns: - tuple[str | BasicError, int]: A tuple - The first item is either the layers or an error object - The second item is the response status - """ - schema_path = download_schema_file_as_jsonld(schema_url) - tangled_tree = TangledTree( - path_to_json_ld=schema_path, - figure_type=figure_type, - data_model_labels=display_label_type, - ) - # Currently TangledTree.get_tangled_tree_layers() returns either an empty list if - # save_file=False or a list of one string if save_file=False. - # The API should output just the string. - # TangledTree.get_tangled_tree_layers() will likely get changed in the future to return - # just a string. - layers_list: list[str] = tangled_tree.get_tangled_tree_layers(save_file=False) - if len(layers_list) == 0: - raise ValueError("TangledTree.get_tangled_tree_layers() returned an empty list") - result: str | BasicError = layers_list[0] - status = 200 - - return result, status - - -@handle_exceptions -def get_tangled_tree_text( - schema_url: str, - figure_type: Literal["component", "dependency"] = "component", - text_format: Literal["plain", "highlighted"] = "plain", - display_label_type: DisplayLabelType = "class_label", -) -> tuple[str | BasicError, int]: - """Gets text for a tangled tree visualization. - - Args: - schema_url (str): The URL to the schema file - figure_type (Literal["component", "dependency"]): Figure type to generate. - text_format (Literal["plain", "highlighted"]): Determines the type of text - rendering to return - display_label_type (DisplayLabelType): - The type of label to use as display - Defaults to "class_label" - - Returns: - tuple[str | BasicError, int]: A tuple - The first item is either the text or an error object - The second item is the response status - """ - schema_path = download_schema_file_as_jsonld(schema_url) - tangled_tree = TangledTree( - path_to_json_ld=schema_path, - figure_type=figure_type, - data_model_labels=display_label_type, - ) - attempt = tangled_tree.get_text_for_tangled_tree(text_format, save_file=False) - assert isinstance(attempt, str) - result: str | BasicError = attempt - status = 200 - return result, status diff --git a/apps/schematic/api/schematic_api/controllers/utils.py b/apps/schematic/api/schematic_api/controllers/utils.py deleted file mode 100644 index 829730dfe..000000000 --- a/apps/schematic/api/schematic_api/controllers/utils.py +++ /dev/null @@ -1,319 +0,0 @@ -"""utils for multiple controllers""" - -from typing import Callable, Any, Optional -import urllib.request -import shutil -import tempfile -from urllib.error import HTTPError -import tempfile -import os -import io -import json -import logging -import subprocess -from datetime import datetime, timedelta -import re -from math import ceil -import yaml - -import pandas as pd -from flask import request # type: ignore -from synapseclient.core.exceptions import ( # type: ignore - SynapseNoCredentialsError, - SynapseAuthenticationError, -) -from schematic.store import SynapseStorage -from schematic.exceptions import AccessCredentialsError # type: ignore - -from schematic_api.models.basic_error import BasicError - -# Config for various settable global values -# Will use config.yaml if it exists, otherwise uses the example file -# config.yaml is ignored by git so can be changed locally without accidentaly commiting it -# To do so copy default_cofnig.yaml to config.yaml and make changes there -if os.path.exists("config.yaml"): - with open("config.yaml", "r", encoding="utf-8") as file: - API_CONFIG = yaml.safe_load(file) -else: - with open("default_config.yaml", "r", encoding="utf-8") as file: - API_CONFIG = yaml.safe_load(file) - -PURGE_SYNAPSE_CACHE = API_CONFIG["purge_synapse_cache"] -SYNAPSE_CACHE_PATH = API_CONFIG["synapse_cache_path"] - -LOGGER = logging.getLogger("Synapse cache") - - -def save_manifest_json_string_as_csv(manifest_json_string: str) -> str: - """Takes a manifest json string and converts it to a csv file - - Args: - manifest_json_string (str): The manifest in json string form - - Returns: - str: The path of the csv file - """ - temp_dir = tempfile.gettempdir() - temp_path = os.path.join(temp_dir, "manifest.csv") - json_dict = json.loads(manifest_json_string) - manifest_df = pd.DataFrame(json_dict) - manifest_df.to_csv(temp_path, encoding="utf-8", index=False) - return temp_path - - -def save_manifest_csv_string_as_csv(manifest_csv_string: bytes) -> str: - """Takes a manifest csv string and converts it to a csv file - - Args: - manifest_csv_string (bytes): The manifest in csv string form - - Returns: - str: The path of the csv file - """ - temp_dir = tempfile.gettempdir() - temp_path = os.path.join(temp_dir, "manifest.csv") - manifest_df = pd.read_csv(io.BytesIO(manifest_csv_string), sep=",") - manifest_df.to_csv(temp_path, encoding="utf-8", index=False) - return temp_path - - -def get_access_token() -> str | None: - """Get access token from header""" - bearer_token = None - # Check if the Authorization header is present - if "Authorization" in request.headers: - auth_header = request.headers["Authorization"] - - # Ensure the header starts with 'Bearer ' and retrieve the token - if auth_header.startswith("Bearer "): - bearer_token = auth_header.split(" ")[1] - return bearer_token - - -def handle_exceptions(endpoint_function: Callable) -> Callable: - """ - This is designed to be used as a decorator for endpoint functions. - The endpoint function is called in a try block, and then various - Synapse and Schematic exceptions are handled and returned as the - BasicError object. - - Args: - f (Callable): A function that calls the input function - """ - - def func(*args: Any, **kwargs: Any) -> tuple[Any | BasicError, int]: - try: - return endpoint_function(*args, **kwargs) - - except SynapseNoCredentialsError as error: - status = 401 - res = BasicError( - "Missing or invalid Synapse credentials error", status, str(error) - ) - return res, status - - except SynapseAuthenticationError as error: - status = 401 - res = BasicError("Forbidden Synapse access error", status, str(error)) - return res, status - - except AccessCredentialsError as error: - status = 403 - res = BasicError("Synapse entity access error", status, str(error)) - return res, status - - except InvalidSchemaURL as error: - status = 404 - res = BasicError("Invalid URL", status, str(error)) - return res, status - - except InvalidValueError as error: - status = 422 - res = BasicError("Invalid data", status, str(error)) - return res, status - - except Exception as error: # pylint: disable=broad-exception-caught - status = 500 - res = BasicError("Internal error", status, str(error)) - return res, status - - return func - - -class InvalidSchemaURL(Exception): - """Raised when a provided url for a schema is incorrect""" - - def __init__(self, message: str, url: str): - """ - Args: - message (str): The error message - url (str): The provided incorrect URL - """ - self.message = message - self.url = url - super().__init__(self.message) - - def __str__(self) -> str: - return f"{self.message}: {self.url}" - - -class InvalidValueError(Exception): - """Raised when a provided value for an endpoint is invalid""" - - def __init__(self, message: str, values: dict[str, Any]): - """ - Args: - message (str): The error message - values (dict[str, Any]): A dict where the argument names are keys and - argument values are values - """ - self.message = message - self.values = values - - super().__init__(self.message) - - def __str__(self) -> str: - return f"{self.message}: {self.values}" - - -def download_schema_file_as_jsonld(schema_url: str) -> str: - """Downloads a schema and saves it as temp file - - Args: - schema_url (str): The URL of the schema - - Raises: - InvalidSchemaURL: When the schema url doesn't exist or is badly formatted - - Returns: - str: The path fo the schema jsonld file - """ - try: - with urllib.request.urlopen(schema_url) as response: - with tempfile.NamedTemporaryFile( - delete=False, suffix=".model.jsonld" - ) as tmp_file: - shutil.copyfileobj(response, tmp_file) - return tmp_file.name - except ValueError as error: - # checks for specific ValueError where the url isn't correctly formatted - if str(error).startswith("unknown url type"): - raise InvalidSchemaURL( - "The provided URL is incorrectly formatted", schema_url - ) from error - # reraises the ValueError if it isn't the specific type above - else: - raise - except HTTPError as error: - raise InvalidSchemaURL( - "The provided URL could not be found", schema_url - ) from error - - -def purge_synapse_cache( - store: SynapseStorage, - maximum_storage_allowed_cache_gb: float = 1, - minute_buffer: int = 15, -) -> None: - """ - Purge synapse cache if it exceeds a certain size. Default to 1GB. - Args: - maximum_storage_allowed_cache_gb (float): the maximum storage allowed - before purging cache. Default is 1 GB. - minute_buffer (int): All files created this amount of time or older will be deleted - """ - # try clearing the cache - # scan a directory and check size of files - if os.path.exists(store.root_synapse_cache): - maximum_storage_allowed_cache_bytes = maximum_storage_allowed_cache_gb * ( - 1024**3 - ) - dir_size_bytes = check_synapse_cache_size(directory=store.root_synapse_cache) - # Check if cache is bigger than the allowed size and if so delete all files in cache - # older than the buffer time - if dir_size_bytes >= maximum_storage_allowed_cache_bytes: - minutes_earlier = calculate_datetime(minute_buffer) - num_of_deleted_files = store.syn.cache.purge(before_date=minutes_earlier) - LOGGER.info( - f"{num_of_deleted_files} files have been deleted from {store.root_synapse_cache}" - ) - else: - # on AWS, OS takes around 14-17% of our ephemeral storage (20GiB) - # instead of guessing how much space that we left, print out .synapseCache here - LOGGER.info(f"the total size of .synapseCache is: {dir_size_bytes} bytes") - - -def check_synapse_cache_size(directory: str) -> float: - """use du --sh command to calculate size of the Synapse cache - - Args: - directory (str, optional): The Synapse cache directory - - Returns: - float: returns size of the Synapse directory in bytes - """ - # Note: this command might fail on windows user. - # But since this command is primarily for running on AWS, it is fine. - command = ["du", "-sh", directory] - output = subprocess.run(command, capture_output=True, check=False).stdout.decode( - "utf-8" - ) - - # Parsing the output to extract the directory size - size = output.split("\t")[0] - return calculate_byte_size(size) - - -def calculate_byte_size(size_string: str) -> int: - """ - Calculates the size in bytes of a size returned from the "du" command - - Args: - size_string (str): - The input must be a string such as 4B, or 1.2K. - Sizes up to GB allowed. - - Raises: - ValueError: When the input doesn't match the allowed paterns - - Returns: - int: The size in bytes - """ - if size_string.isnumeric() and int(size_string) == 0: - return 0 - - size_dict: dict[str, int] = {"B": 0, "K": 1, "M": 2, "G": 3} - - size_letter_string = "".join(size_dict.keys()) - int_size_match = re.match(f"^[0-9]+[{size_letter_string}]$", size_string) - float_size_match = re.match(f"^[0-9]+\.[0-9]+[{size_letter_string}]$", size_string) - if not (int_size_match or float_size_match): - LOGGER.error("Cannot recognize the file size unit") - raise ValueError("The size string doesn't match the allowed type:", size_string) - - size_letter = size_string[-1] - size = float(size_string[:-1]) - multiple = 1024 ** size_dict[size_letter] - byte_size: int = ceil(size * multiple) - return byte_size - - -def calculate_datetime( - minutes: int, input_date_time: Optional[datetime] = None -) -> datetime: - """ - Calculates the datetime x minutes before the input date time - If no datetime is given, the current datetime is used. - - Args: - minutes (int): How much time to subtract from the input date time. - input_date_time (Optional[datetime], optional): The datetime to start with. Defaults to None. - - Returns: - datetime: The new datetime - """ - if input_date_time is None: - date_time = datetime.now() - else: - date_time = input_date_time - return date_time - timedelta(minutes=minutes) diff --git a/apps/schematic/api/schematic_api/controllers/versions_controller.py b/apps/schematic/api/schematic_api/controllers/versions_controller.py deleted file mode 100644 index e63e2f5c2..000000000 --- a/apps/schematic/api/schematic_api/controllers/versions_controller.py +++ /dev/null @@ -1,20 +0,0 @@ -import connexion -import six -from typing import Dict -from typing import Tuple -from typing import Union - -from schematic_api.models.basic_error import BasicError # noqa: E501 -from schematic_api import util -from schematic_api.controllers import versions_controller_impl - - -def get_schematic_version(): # noqa: E501 - """Gets the version of the schematic library currently used by the API - - Gets the version of the schematic library currently used by the API # noqa: E501 - - - :rtype: Union[str, Tuple[str, int], Tuple[str, int, Dict[str, str]] - """ - return versions_controller_impl.get_schematic_version() diff --git a/apps/schematic/api/schematic_api/controllers/versions_controller_impl.py b/apps/schematic/api/schematic_api/controllers/versions_controller_impl.py deleted file mode 100644 index 37981a078..000000000 --- a/apps/schematic/api/schematic_api/controllers/versions_controller_impl.py +++ /dev/null @@ -1,15 +0,0 @@ -"Version endpoint controllers" - -import importlib.metadata - -from schematic_api.controllers.utils import handle_exceptions - - -@handle_exceptions -def get_schematic_version() -> str: - """Gets the current schematic version - - Returns: - str: The current schematic version - """ - return importlib.metadata.version("schematicpy") diff --git a/apps/schematic/api/schematic_api/encoder.py b/apps/schematic/api/schematic_api/encoder.py deleted file mode 100644 index ea64c0bb3..000000000 --- a/apps/schematic/api/schematic_api/encoder.py +++ /dev/null @@ -1,20 +0,0 @@ -from connexion.apps.flask_app import FlaskJSONEncoder -import six - -from schematic_api.models.base_model_ import Model - - -class JSONEncoder(FlaskJSONEncoder): - include_nulls = False - - def default(self, o): - if isinstance(o, Model): - dikt = {} - for attr, _ in six.iteritems(o.openapi_types): - value = getattr(o, attr) - if value is None and not self.include_nulls: - continue - attr = o.attribute_map[attr] - dikt[attr] = value - return dikt - return FlaskJSONEncoder.default(self, o) diff --git a/apps/schematic/api/schematic_api/models/__init__.py b/apps/schematic/api/schematic_api/models/__init__.py deleted file mode 100644 index 42e106ed9..000000000 --- a/apps/schematic/api/schematic_api/models/__init__.py +++ /dev/null @@ -1,45 +0,0 @@ -# coding: utf-8 - -# flake8: noqa -from __future__ import absolute_import - -# import models into model package -from schematic_api.models.asset_type import AssetType -from schematic_api.models.basic_error import BasicError -from schematic_api.models.component_requirement_array import ComponentRequirementArray -from schematic_api.models.component_requirement_graph import ComponentRequirementGraph -from schematic_api.models.component_requirement_subgraph import ( - ComponentRequirementSubgraph, -) -from schematic_api.models.connected_node_pair import ConnectedNodePair -from schematic_api.models.connected_node_pair_array import ConnectedNodePairArray -from schematic_api.models.connected_node_pair_page import ConnectedNodePairPage -from schematic_api.models.connected_node_pair_page_all_of import ( - ConnectedNodePairPageAllOf, -) -from schematic_api.models.dataset_metadata import DatasetMetadata -from schematic_api.models.dataset_metadata_array import DatasetMetadataArray -from schematic_api.models.dataset_metadata_page import DatasetMetadataPage -from schematic_api.models.dataset_metadata_page_all_of import DatasetMetadataPageAllOf -from schematic_api.models.file_metadata import FileMetadata -from schematic_api.models.file_metadata_array import FileMetadataArray -from schematic_api.models.file_metadata_page import FileMetadataPage -from schematic_api.models.file_metadata_page_all_of import FileMetadataPageAllOf -from schematic_api.models.google_sheet_links import GoogleSheetLinks -from schematic_api.models.manifest_metadata import ManifestMetadata -from schematic_api.models.manifest_metadata_array import ManifestMetadataArray -from schematic_api.models.manifest_metadata_page import ManifestMetadataPage -from schematic_api.models.manifest_metadata_page_all_of import ManifestMetadataPageAllOf -from schematic_api.models.manifest_validation_result import ManifestValidationResult -from schematic_api.models.node import Node -from schematic_api.models.node_array import NodeArray -from schematic_api.models.node_page import NodePage -from schematic_api.models.node_page_all_of import NodePageAllOf -from schematic_api.models.node_property_array import NodePropertyArray -from schematic_api.models.page_metadata import PageMetadata -from schematic_api.models.project_metadata import ProjectMetadata -from schematic_api.models.project_metadata_array import ProjectMetadataArray -from schematic_api.models.project_metadata_page import ProjectMetadataPage -from schematic_api.models.project_metadata_page_all_of import ProjectMetadataPageAllOf -from schematic_api.models.validation_rule import ValidationRule -from schematic_api.models.validation_rule_array import ValidationRuleArray diff --git a/apps/schematic/api/schematic_api/models/asset_type.py b/apps/schematic/api/schematic_api/models/asset_type.py deleted file mode 100644 index e6760d3e9..000000000 --- a/apps/schematic/api/schematic_api/models/asset_type.py +++ /dev/null @@ -1,38 +0,0 @@ -# coding: utf-8 - -from __future__ import absolute_import -from datetime import date, datetime # noqa: F401 - -from typing import List, Dict # noqa: F401 - -from schematic_api.models.base_model_ import Model -from schematic_api import util - - -class AssetType(Model): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - """ - - """ - allowed enum values - """ - SYNAPSE = "synapse" - - def __init__(self): # noqa: E501 - """AssetType - a model defined in OpenAPI""" - self.openapi_types = {} - - self.attribute_map = {} - - @classmethod - def from_dict(cls, dikt) -> "AssetType": - """Returns the dict as a model - - :param dikt: A dict. - :type: dict - :return: The AssetType of this AssetType. # noqa: E501 - :rtype: AssetType - """ - return util.deserialize_model(dikt, cls) diff --git a/apps/schematic/api/schematic_api/models/base_model_.py b/apps/schematic/api/schematic_api/models/base_model_.py deleted file mode 100644 index 1154d22c2..000000000 --- a/apps/schematic/api/schematic_api/models/base_model_.py +++ /dev/null @@ -1,73 +0,0 @@ -import pprint - -import six -import typing - -from schematic_api import util - -T = typing.TypeVar("T") - - -class Model(object): - # openapiTypes: The key is attribute name and the - # value is attribute type. - openapi_types: typing.Dict[str, type] = {} - - # attributeMap: The key is attribute name and the - # value is json key in definition. - attribute_map: typing.Dict[str, str] = {} - - @classmethod - def from_dict(cls: typing.Type[T], dikt) -> T: - """Returns the dict as a model""" - return util.deserialize_model(dikt, cls) - - def to_dict(self): - """Returns the model properties as a dict - - :rtype: dict - """ - result = {} - - for attr, _ in six.iteritems(self.openapi_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list( - map(lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value) - ) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict( - map( - lambda item: ( - (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") - else item - ), - value.items(), - ) - ) - else: - result[attr] = value - - return result - - def to_str(self): - """Returns the string representation of the model - - :rtype: str - """ - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/apps/schematic/api/schematic_api/models/basic_error.py b/apps/schematic/api/schematic_api/models/basic_error.py deleted file mode 100644 index 537acad3f..000000000 --- a/apps/schematic/api/schematic_api/models/basic_error.py +++ /dev/null @@ -1,153 +0,0 @@ -# coding: utf-8 - -from __future__ import absolute_import -from datetime import date, datetime # noqa: F401 - -from typing import List, Dict # noqa: F401 - -from schematic_api.models.base_model_ import Model -from schematic_api import util - - -class BasicError(Model): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - """ - - def __init__(self, title=None, status=None, detail=None, type=None): # noqa: E501 - """BasicError - a model defined in OpenAPI - - :param title: The title of this BasicError. # noqa: E501 - :type title: str - :param status: The status of this BasicError. # noqa: E501 - :type status: int - :param detail: The detail of this BasicError. # noqa: E501 - :type detail: str - :param type: The type of this BasicError. # noqa: E501 - :type type: str - """ - self.openapi_types = {"title": str, "status": int, "detail": str, "type": str} - - self.attribute_map = { - "title": "title", - "status": "status", - "detail": "detail", - "type": "type", - } - - self._title = title - self._status = status - self._detail = detail - self._type = type - - @classmethod - def from_dict(cls, dikt) -> "BasicError": - """Returns the dict as a model - - :param dikt: A dict. - :type: dict - :return: The BasicError of this BasicError. # noqa: E501 - :rtype: BasicError - """ - return util.deserialize_model(dikt, cls) - - @property - def title(self): - """Gets the title of this BasicError. - - A human readable documentation for the problem type # noqa: E501 - - :return: The title of this BasicError. - :rtype: str - """ - return self._title - - @title.setter - def title(self, title): - """Sets the title of this BasicError. - - A human readable documentation for the problem type # noqa: E501 - - :param title: The title of this BasicError. - :type title: str - """ - if title is None: - raise ValueError( - "Invalid value for `title`, must not be `None`" - ) # noqa: E501 - - self._title = title - - @property - def status(self): - """Gets the status of this BasicError. - - The HTTP status code # noqa: E501 - - :return: The status of this BasicError. - :rtype: int - """ - return self._status - - @status.setter - def status(self, status): - """Sets the status of this BasicError. - - The HTTP status code # noqa: E501 - - :param status: The status of this BasicError. - :type status: int - """ - if status is None: - raise ValueError( - "Invalid value for `status`, must not be `None`" - ) # noqa: E501 - - self._status = status - - @property - def detail(self): - """Gets the detail of this BasicError. - - A human readable explanation specific to this occurrence of the problem # noqa: E501 - - :return: The detail of this BasicError. - :rtype: str - """ - return self._detail - - @detail.setter - def detail(self, detail): - """Sets the detail of this BasicError. - - A human readable explanation specific to this occurrence of the problem # noqa: E501 - - :param detail: The detail of this BasicError. - :type detail: str - """ - - self._detail = detail - - @property - def type(self): - """Gets the type of this BasicError. - - An absolute URI that identifies the problem type # noqa: E501 - - :return: The type of this BasicError. - :rtype: str - """ - return self._type - - @type.setter - def type(self, type): - """Sets the type of this BasicError. - - An absolute URI that identifies the problem type # noqa: E501 - - :param type: The type of this BasicError. - :type type: str - """ - - self._type = type diff --git a/apps/schematic/api/schematic_api/models/component_requirement_array.py b/apps/schematic/api/schematic_api/models/component_requirement_array.py deleted file mode 100644 index 9c3ca8778..000000000 --- a/apps/schematic/api/schematic_api/models/component_requirement_array.py +++ /dev/null @@ -1,62 +0,0 @@ -# coding: utf-8 - -from __future__ import absolute_import -from datetime import date, datetime # noqa: F401 - -from typing import List, Dict # noqa: F401 - -from schematic_api.models.base_model_ import Model -from schematic_api import util - - -class ComponentRequirementArray(Model): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - """ - - def __init__(self, component_requirements_list=None): # noqa: E501 - """ComponentRequirementArray - a model defined in OpenAPI - - :param component_requirements_list: The component_requirements_list of this ComponentRequirementArray. # noqa: E501 - :type component_requirements_list: List[str] - """ - self.openapi_types = {"component_requirements_list": List[str]} - - self.attribute_map = { - "component_requirements_list": "componentRequirementsList" - } - - self._component_requirements_list = component_requirements_list - - @classmethod - def from_dict(cls, dikt) -> "ComponentRequirementArray": - """Returns the dict as a model - - :param dikt: A dict. - :type: dict - :return: The ComponentRequirementArray of this ComponentRequirementArray. # noqa: E501 - :rtype: ComponentRequirementArray - """ - return util.deserialize_model(dikt, cls) - - @property - def component_requirements_list(self): - """Gets the component_requirements_list of this ComponentRequirementArray. - - - :return: The component_requirements_list of this ComponentRequirementArray. - :rtype: List[str] - """ - return self._component_requirements_list - - @component_requirements_list.setter - def component_requirements_list(self, component_requirements_list): - """Sets the component_requirements_list of this ComponentRequirementArray. - - - :param component_requirements_list: The component_requirements_list of this ComponentRequirementArray. - :type component_requirements_list: List[str] - """ - - self._component_requirements_list = component_requirements_list diff --git a/apps/schematic/api/schematic_api/models/component_requirement_graph.py b/apps/schematic/api/schematic_api/models/component_requirement_graph.py deleted file mode 100644 index c500a4b0e..000000000 --- a/apps/schematic/api/schematic_api/models/component_requirement_graph.py +++ /dev/null @@ -1,71 +0,0 @@ -# coding: utf-8 - -from __future__ import absolute_import -from datetime import date, datetime # noqa: F401 - -from typing import List, Dict # noqa: F401 - -from schematic_api.models.base_model_ import Model -from schematic_api.models.component_requirement_subgraph import ( - ComponentRequirementSubgraph, -) -from schematic_api import util - -from schematic_api.models.component_requirement_subgraph import ( - ComponentRequirementSubgraph, -) # noqa: E501 - - -class ComponentRequirementGraph(Model): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - """ - - def __init__(self, component_requirements_graph=None): # noqa: E501 - """ComponentRequirementGraph - a model defined in OpenAPI - - :param component_requirements_graph: The component_requirements_graph of this ComponentRequirementGraph. # noqa: E501 - :type component_requirements_graph: List[ComponentRequirementSubgraph] - """ - self.openapi_types = { - "component_requirements_graph": List[ComponentRequirementSubgraph] - } - - self.attribute_map = { - "component_requirements_graph": "componentRequirementsGraph" - } - - self._component_requirements_graph = component_requirements_graph - - @classmethod - def from_dict(cls, dikt) -> "ComponentRequirementGraph": - """Returns the dict as a model - - :param dikt: A dict. - :type: dict - :return: The ComponentRequirementGraph of this ComponentRequirementGraph. # noqa: E501 - :rtype: ComponentRequirementGraph - """ - return util.deserialize_model(dikt, cls) - - @property - def component_requirements_graph(self): - """Gets the component_requirements_graph of this ComponentRequirementGraph. - - - :return: The component_requirements_graph of this ComponentRequirementGraph. - :rtype: List[ComponentRequirementSubgraph] - """ - return self._component_requirements_graph - - @component_requirements_graph.setter - def component_requirements_graph(self, component_requirements_graph): - """Sets the component_requirements_graph of this ComponentRequirementGraph. - - - :param component_requirements_graph: The component_requirements_graph of this ComponentRequirementGraph. - :type component_requirements_graph: List[ComponentRequirementSubgraph] - """ - - self._component_requirements_graph = component_requirements_graph diff --git a/apps/schematic/api/schematic_api/models/component_requirement_subgraph.py b/apps/schematic/api/schematic_api/models/component_requirement_subgraph.py deleted file mode 100644 index c636ac683..000000000 --- a/apps/schematic/api/schematic_api/models/component_requirement_subgraph.py +++ /dev/null @@ -1,96 +0,0 @@ -# coding: utf-8 - -from __future__ import absolute_import -from datetime import date, datetime # noqa: F401 - -from typing import List, Dict # noqa: F401 - -from schematic_api.models.base_model_ import Model -from schematic_api import util - - -class ComponentRequirementSubgraph(Model): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - """ - - def __init__(self, component1=None, component2=None): # noqa: E501 - """ComponentRequirementSubgraph - a model defined in OpenAPI - - :param component1: The component1 of this ComponentRequirementSubgraph. # noqa: E501 - :type component1: str - :param component2: The component2 of this ComponentRequirementSubgraph. # noqa: E501 - :type component2: str - """ - self.openapi_types = {"component1": str, "component2": str} - - self.attribute_map = {"component1": "component1", "component2": "component2"} - - self._component1 = component1 - self._component2 = component2 - - @classmethod - def from_dict(cls, dikt) -> "ComponentRequirementSubgraph": - """Returns the dict as a model - - :param dikt: A dict. - :type: dict - :return: The ComponentRequirementSubgraph of this ComponentRequirementSubgraph. # noqa: E501 - :rtype: ComponentRequirementSubgraph - """ - return util.deserialize_model(dikt, cls) - - @property - def component1(self): - """Gets the component1 of this ComponentRequirementSubgraph. - - The display name of the first component in the graph # noqa: E501 - - :return: The component1 of this ComponentRequirementSubgraph. - :rtype: str - """ - return self._component1 - - @component1.setter - def component1(self, component1): - """Sets the component1 of this ComponentRequirementSubgraph. - - The display name of the first component in the graph # noqa: E501 - - :param component1: The component1 of this ComponentRequirementSubgraph. - :type component1: str - """ - if component1 is None: - raise ValueError( - "Invalid value for `component1`, must not be `None`" - ) # noqa: E501 - - self._component1 = component1 - - @property - def component2(self): - """Gets the component2 of this ComponentRequirementSubgraph. - - The display name of the second component in the graph # noqa: E501 - - :return: The component2 of this ComponentRequirementSubgraph. - :rtype: str - """ - return self._component2 - - @component2.setter - def component2(self, component2): - """Sets the component2 of this ComponentRequirementSubgraph. - - The display name of the second component in the graph # noqa: E501 - - :param component2: The component2 of this ComponentRequirementSubgraph. - :type component2: str - """ - if component2 is None: - raise ValueError( - "Invalid value for `component2`, must not be `None`" - ) # noqa: E501 - - self._component2 = component2 diff --git a/apps/schematic/api/schematic_api/models/connected_node_pair.py b/apps/schematic/api/schematic_api/models/connected_node_pair.py deleted file mode 100644 index 15b45cd41..000000000 --- a/apps/schematic/api/schematic_api/models/connected_node_pair.py +++ /dev/null @@ -1,96 +0,0 @@ -# coding: utf-8 - -from __future__ import absolute_import -from datetime import date, datetime # noqa: F401 - -from typing import List, Dict # noqa: F401 - -from schematic_api.models.base_model_ import Model -from schematic_api import util - - -class ConnectedNodePair(Model): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - """ - - def __init__(self, node1=None, node2=None): # noqa: E501 - """ConnectedNodePair - a model defined in OpenAPI - - :param node1: The node1 of this ConnectedNodePair. # noqa: E501 - :type node1: str - :param node2: The node2 of this ConnectedNodePair. # noqa: E501 - :type node2: str - """ - self.openapi_types = {"node1": str, "node2": str} - - self.attribute_map = {"node1": "node1", "node2": "node2"} - - self._node1 = node1 - self._node2 = node2 - - @classmethod - def from_dict(cls, dikt) -> "ConnectedNodePair": - """Returns the dict as a model - - :param dikt: A dict. - :type: dict - :return: The ConnectedNodePair of this ConnectedNodePair. # noqa: E501 - :rtype: ConnectedNodePair - """ - return util.deserialize_model(dikt, cls) - - @property - def node1(self): - """Gets the node1 of this ConnectedNodePair. - - The disaplay name of the first node. # noqa: E501 - - :return: The node1 of this ConnectedNodePair. - :rtype: str - """ - return self._node1 - - @node1.setter - def node1(self, node1): - """Sets the node1 of this ConnectedNodePair. - - The disaplay name of the first node. # noqa: E501 - - :param node1: The node1 of this ConnectedNodePair. - :type node1: str - """ - if node1 is None: - raise ValueError( - "Invalid value for `node1`, must not be `None`" - ) # noqa: E501 - - self._node1 = node1 - - @property - def node2(self): - """Gets the node2 of this ConnectedNodePair. - - The display name of the second node. # noqa: E501 - - :return: The node2 of this ConnectedNodePair. - :rtype: str - """ - return self._node2 - - @node2.setter - def node2(self, node2): - """Sets the node2 of this ConnectedNodePair. - - The display name of the second node. # noqa: E501 - - :param node2: The node2 of this ConnectedNodePair. - :type node2: str - """ - if node2 is None: - raise ValueError( - "Invalid value for `node2`, must not be `None`" - ) # noqa: E501 - - self._node2 = node2 diff --git a/apps/schematic/api/schematic_api/models/connected_node_pair_array.py b/apps/schematic/api/schematic_api/models/connected_node_pair_array.py deleted file mode 100644 index 848488786..000000000 --- a/apps/schematic/api/schematic_api/models/connected_node_pair_array.py +++ /dev/null @@ -1,65 +0,0 @@ -# coding: utf-8 - -from __future__ import absolute_import -from datetime import date, datetime # noqa: F401 - -from typing import List, Dict # noqa: F401 - -from schematic_api.models.base_model_ import Model -from schematic_api.models.connected_node_pair import ConnectedNodePair -from schematic_api import util - -from schematic_api.models.connected_node_pair import ConnectedNodePair # noqa: E501 - - -class ConnectedNodePairArray(Model): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - """ - - def __init__(self, connected_nodes=None): # noqa: E501 - """ConnectedNodePairArray - a model defined in OpenAPI - - :param connected_nodes: The connected_nodes of this ConnectedNodePairArray. # noqa: E501 - :type connected_nodes: List[ConnectedNodePair] - """ - self.openapi_types = {"connected_nodes": List[ConnectedNodePair]} - - self.attribute_map = {"connected_nodes": "connectedNodes"} - - self._connected_nodes = connected_nodes - - @classmethod - def from_dict(cls, dikt) -> "ConnectedNodePairArray": - """Returns the dict as a model - - :param dikt: A dict. - :type: dict - :return: The ConnectedNodePairArray of this ConnectedNodePairArray. # noqa: E501 - :rtype: ConnectedNodePairArray - """ - return util.deserialize_model(dikt, cls) - - @property - def connected_nodes(self): - """Gets the connected_nodes of this ConnectedNodePairArray. - - An array of conncted node pairs. # noqa: E501 - - :return: The connected_nodes of this ConnectedNodePairArray. - :rtype: List[ConnectedNodePair] - """ - return self._connected_nodes - - @connected_nodes.setter - def connected_nodes(self, connected_nodes): - """Sets the connected_nodes of this ConnectedNodePairArray. - - An array of conncted node pairs. # noqa: E501 - - :param connected_nodes: The connected_nodes of this ConnectedNodePairArray. - :type connected_nodes: List[ConnectedNodePair] - """ - - self._connected_nodes = connected_nodes diff --git a/apps/schematic/api/schematic_api/models/connected_node_pair_page.py b/apps/schematic/api/schematic_api/models/connected_node_pair_page.py deleted file mode 100644 index 5ca83e488..000000000 --- a/apps/schematic/api/schematic_api/models/connected_node_pair_page.py +++ /dev/null @@ -1,274 +0,0 @@ -# coding: utf-8 - -from __future__ import absolute_import -from datetime import date, datetime # noqa: F401 - -from typing import List, Dict # noqa: F401 - -from schematic_api.models.base_model_ import Model -from schematic_api.models.connected_node_pair import ConnectedNodePair -from schematic_api import util - -from schematic_api.models.connected_node_pair import ConnectedNodePair # noqa: E501 - - -class ConnectedNodePairPage(Model): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - """ - - def __init__( - self, - number=None, - size=None, - total_elements=None, - total_pages=None, - has_next=None, - has_previous=None, - connected_nodes=None, - ): # noqa: E501 - """ConnectedNodePairPage - a model defined in OpenAPI - - :param number: The number of this ConnectedNodePairPage. # noqa: E501 - :type number: int - :param size: The size of this ConnectedNodePairPage. # noqa: E501 - :type size: int - :param total_elements: The total_elements of this ConnectedNodePairPage. # noqa: E501 - :type total_elements: int - :param total_pages: The total_pages of this ConnectedNodePairPage. # noqa: E501 - :type total_pages: int - :param has_next: The has_next of this ConnectedNodePairPage. # noqa: E501 - :type has_next: bool - :param has_previous: The has_previous of this ConnectedNodePairPage. # noqa: E501 - :type has_previous: bool - :param connected_nodes: The connected_nodes of this ConnectedNodePairPage. # noqa: E501 - :type connected_nodes: List[ConnectedNodePair] - """ - self.openapi_types = { - "number": int, - "size": int, - "total_elements": int, - "total_pages": int, - "has_next": bool, - "has_previous": bool, - "connected_nodes": List[ConnectedNodePair], - } - - self.attribute_map = { - "number": "number", - "size": "size", - "total_elements": "totalElements", - "total_pages": "totalPages", - "has_next": "hasNext", - "has_previous": "hasPrevious", - "connected_nodes": "connectedNodes", - } - - self._number = number - self._size = size - self._total_elements = total_elements - self._total_pages = total_pages - self._has_next = has_next - self._has_previous = has_previous - self._connected_nodes = connected_nodes - - @classmethod - def from_dict(cls, dikt) -> "ConnectedNodePairPage": - """Returns the dict as a model - - :param dikt: A dict. - :type: dict - :return: The ConnectedNodePairPage of this ConnectedNodePairPage. # noqa: E501 - :rtype: ConnectedNodePairPage - """ - return util.deserialize_model(dikt, cls) - - @property - def number(self): - """Gets the number of this ConnectedNodePairPage. - - The page number. # noqa: E501 - - :return: The number of this ConnectedNodePairPage. - :rtype: int - """ - return self._number - - @number.setter - def number(self, number): - """Sets the number of this ConnectedNodePairPage. - - The page number. # noqa: E501 - - :param number: The number of this ConnectedNodePairPage. - :type number: int - """ - if number is None: - raise ValueError( - "Invalid value for `number`, must not be `None`" - ) # noqa: E501 - - self._number = number - - @property - def size(self): - """Gets the size of this ConnectedNodePairPage. - - The number of items in a single page. # noqa: E501 - - :return: The size of this ConnectedNodePairPage. - :rtype: int - """ - return self._size - - @size.setter - def size(self, size): - """Sets the size of this ConnectedNodePairPage. - - The number of items in a single page. # noqa: E501 - - :param size: The size of this ConnectedNodePairPage. - :type size: int - """ - if size is None: - raise ValueError( - "Invalid value for `size`, must not be `None`" - ) # noqa: E501 - - self._size = size - - @property - def total_elements(self): - """Gets the total_elements of this ConnectedNodePairPage. - - Total number of elements in the result set. # noqa: E501 - - :return: The total_elements of this ConnectedNodePairPage. - :rtype: int - """ - return self._total_elements - - @total_elements.setter - def total_elements(self, total_elements): - """Sets the total_elements of this ConnectedNodePairPage. - - Total number of elements in the result set. # noqa: E501 - - :param total_elements: The total_elements of this ConnectedNodePairPage. - :type total_elements: int - """ - if total_elements is None: - raise ValueError( - "Invalid value for `total_elements`, must not be `None`" - ) # noqa: E501 - - self._total_elements = total_elements - - @property - def total_pages(self): - """Gets the total_pages of this ConnectedNodePairPage. - - Total number of pages in the result set. # noqa: E501 - - :return: The total_pages of this ConnectedNodePairPage. - :rtype: int - """ - return self._total_pages - - @total_pages.setter - def total_pages(self, total_pages): - """Sets the total_pages of this ConnectedNodePairPage. - - Total number of pages in the result set. # noqa: E501 - - :param total_pages: The total_pages of this ConnectedNodePairPage. - :type total_pages: int - """ - if total_pages is None: - raise ValueError( - "Invalid value for `total_pages`, must not be `None`" - ) # noqa: E501 - - self._total_pages = total_pages - - @property - def has_next(self): - """Gets the has_next of this ConnectedNodePairPage. - - Returns if there is a next page. # noqa: E501 - - :return: The has_next of this ConnectedNodePairPage. - :rtype: bool - """ - return self._has_next - - @has_next.setter - def has_next(self, has_next): - """Sets the has_next of this ConnectedNodePairPage. - - Returns if there is a next page. # noqa: E501 - - :param has_next: The has_next of this ConnectedNodePairPage. - :type has_next: bool - """ - if has_next is None: - raise ValueError( - "Invalid value for `has_next`, must not be `None`" - ) # noqa: E501 - - self._has_next = has_next - - @property - def has_previous(self): - """Gets the has_previous of this ConnectedNodePairPage. - - Returns if there is a previous page. # noqa: E501 - - :return: The has_previous of this ConnectedNodePairPage. - :rtype: bool - """ - return self._has_previous - - @has_previous.setter - def has_previous(self, has_previous): - """Sets the has_previous of this ConnectedNodePairPage. - - Returns if there is a previous page. # noqa: E501 - - :param has_previous: The has_previous of this ConnectedNodePairPage. - :type has_previous: bool - """ - if has_previous is None: - raise ValueError( - "Invalid value for `has_previous`, must not be `None`" - ) # noqa: E501 - - self._has_previous = has_previous - - @property - def connected_nodes(self): - """Gets the connected_nodes of this ConnectedNodePairPage. - - An array of conncted node pairs. # noqa: E501 - - :return: The connected_nodes of this ConnectedNodePairPage. - :rtype: List[ConnectedNodePair] - """ - return self._connected_nodes - - @connected_nodes.setter - def connected_nodes(self, connected_nodes): - """Sets the connected_nodes of this ConnectedNodePairPage. - - An array of conncted node pairs. # noqa: E501 - - :param connected_nodes: The connected_nodes of this ConnectedNodePairPage. - :type connected_nodes: List[ConnectedNodePair] - """ - if connected_nodes is None: - raise ValueError( - "Invalid value for `connected_nodes`, must not be `None`" - ) # noqa: E501 - - self._connected_nodes = connected_nodes diff --git a/apps/schematic/api/schematic_api/models/connected_node_pair_page_all_of.py b/apps/schematic/api/schematic_api/models/connected_node_pair_page_all_of.py deleted file mode 100644 index 20126a9ae..000000000 --- a/apps/schematic/api/schematic_api/models/connected_node_pair_page_all_of.py +++ /dev/null @@ -1,69 +0,0 @@ -# coding: utf-8 - -from __future__ import absolute_import -from datetime import date, datetime # noqa: F401 - -from typing import List, Dict # noqa: F401 - -from schematic_api.models.base_model_ import Model -from schematic_api.models.connected_node_pair import ConnectedNodePair -from schematic_api import util - -from schematic_api.models.connected_node_pair import ConnectedNodePair # noqa: E501 - - -class ConnectedNodePairPageAllOf(Model): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - """ - - def __init__(self, connected_nodes=None): # noqa: E501 - """ConnectedNodePairPageAllOf - a model defined in OpenAPI - - :param connected_nodes: The connected_nodes of this ConnectedNodePairPageAllOf. # noqa: E501 - :type connected_nodes: List[ConnectedNodePair] - """ - self.openapi_types = {"connected_nodes": List[ConnectedNodePair]} - - self.attribute_map = {"connected_nodes": "connectedNodes"} - - self._connected_nodes = connected_nodes - - @classmethod - def from_dict(cls, dikt) -> "ConnectedNodePairPageAllOf": - """Returns the dict as a model - - :param dikt: A dict. - :type: dict - :return: The ConnectedNodePairPage_allOf of this ConnectedNodePairPageAllOf. # noqa: E501 - :rtype: ConnectedNodePairPageAllOf - """ - return util.deserialize_model(dikt, cls) - - @property - def connected_nodes(self): - """Gets the connected_nodes of this ConnectedNodePairPageAllOf. - - An array of conncted node pairs. # noqa: E501 - - :return: The connected_nodes of this ConnectedNodePairPageAllOf. - :rtype: List[ConnectedNodePair] - """ - return self._connected_nodes - - @connected_nodes.setter - def connected_nodes(self, connected_nodes): - """Sets the connected_nodes of this ConnectedNodePairPageAllOf. - - An array of conncted node pairs. # noqa: E501 - - :param connected_nodes: The connected_nodes of this ConnectedNodePairPageAllOf. - :type connected_nodes: List[ConnectedNodePair] - """ - if connected_nodes is None: - raise ValueError( - "Invalid value for `connected_nodes`, must not be `None`" - ) # noqa: E501 - - self._connected_nodes = connected_nodes diff --git a/apps/schematic/api/schematic_api/models/dataset_metadata.py b/apps/schematic/api/schematic_api/models/dataset_metadata.py deleted file mode 100644 index a56e39d0c..000000000 --- a/apps/schematic/api/schematic_api/models/dataset_metadata.py +++ /dev/null @@ -1,94 +0,0 @@ -# coding: utf-8 - -from __future__ import absolute_import -from datetime import date, datetime # noqa: F401 - -from typing import List, Dict # noqa: F401 - -from schematic_api.models.base_model_ import Model -from schematic_api import util - - -class DatasetMetadata(Model): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - """ - - def __init__(self, name=None, id=None): # noqa: E501 - """DatasetMetadata - a model defined in OpenAPI - - :param name: The name of this DatasetMetadata. # noqa: E501 - :type name: str - :param id: The id of this DatasetMetadata. # noqa: E501 - :type id: str - """ - self.openapi_types = {"name": str, "id": str} - - self.attribute_map = {"name": "name", "id": "id"} - - self._name = name - self._id = id - - @classmethod - def from_dict(cls, dikt) -> "DatasetMetadata": - """Returns the dict as a model - - :param dikt: A dict. - :type: dict - :return: The DatasetMetadata of this DatasetMetadata. # noqa: E501 - :rtype: DatasetMetadata - """ - return util.deserialize_model(dikt, cls) - - @property - def name(self): - """Gets the name of this DatasetMetadata. - - The name of the dataset. # noqa: E501 - - :return: The name of this DatasetMetadata. - :rtype: str - """ - return self._name - - @name.setter - def name(self, name): - """Sets the name of this DatasetMetadata. - - The name of the dataset. # noqa: E501 - - :param name: The name of this DatasetMetadata. - :type name: str - """ - if name is None: - raise ValueError( - "Invalid value for `name`, must not be `None`" - ) # noqa: E501 - - self._name = name - - @property - def id(self): - """Gets the id of this DatasetMetadata. - - The ID of the dataset. # noqa: E501 - - :return: The id of this DatasetMetadata. - :rtype: str - """ - return self._id - - @id.setter - def id(self, id): - """Sets the id of this DatasetMetadata. - - The ID of the dataset. # noqa: E501 - - :param id: The id of this DatasetMetadata. - :type id: str - """ - if id is None: - raise ValueError("Invalid value for `id`, must not be `None`") # noqa: E501 - - self._id = id diff --git a/apps/schematic/api/schematic_api/models/dataset_metadata_array.py b/apps/schematic/api/schematic_api/models/dataset_metadata_array.py deleted file mode 100644 index 8ad256d14..000000000 --- a/apps/schematic/api/schematic_api/models/dataset_metadata_array.py +++ /dev/null @@ -1,65 +0,0 @@ -# coding: utf-8 - -from __future__ import absolute_import -from datetime import date, datetime # noqa: F401 - -from typing import List, Dict # noqa: F401 - -from schematic_api.models.base_model_ import Model -from schematic_api.models.dataset_metadata import DatasetMetadata -from schematic_api import util - -from schematic_api.models.dataset_metadata import DatasetMetadata # noqa: E501 - - -class DatasetMetadataArray(Model): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - """ - - def __init__(self, datasets=None): # noqa: E501 - """DatasetMetadataArray - a model defined in OpenAPI - - :param datasets: The datasets of this DatasetMetadataArray. # noqa: E501 - :type datasets: List[DatasetMetadata] - """ - self.openapi_types = {"datasets": List[DatasetMetadata]} - - self.attribute_map = {"datasets": "datasets"} - - self._datasets = datasets - - @classmethod - def from_dict(cls, dikt) -> "DatasetMetadataArray": - """Returns the dict as a model - - :param dikt: A dict. - :type: dict - :return: The DatasetMetadataArray of this DatasetMetadataArray. # noqa: E501 - :rtype: DatasetMetadataArray - """ - return util.deserialize_model(dikt, cls) - - @property - def datasets(self): - """Gets the datasets of this DatasetMetadataArray. - - An array of dataset meatdata. # noqa: E501 - - :return: The datasets of this DatasetMetadataArray. - :rtype: List[DatasetMetadata] - """ - return self._datasets - - @datasets.setter - def datasets(self, datasets): - """Sets the datasets of this DatasetMetadataArray. - - An array of dataset meatdata. # noqa: E501 - - :param datasets: The datasets of this DatasetMetadataArray. - :type datasets: List[DatasetMetadata] - """ - - self._datasets = datasets diff --git a/apps/schematic/api/schematic_api/models/dataset_metadata_page.py b/apps/schematic/api/schematic_api/models/dataset_metadata_page.py deleted file mode 100644 index 7160b55b2..000000000 --- a/apps/schematic/api/schematic_api/models/dataset_metadata_page.py +++ /dev/null @@ -1,274 +0,0 @@ -# coding: utf-8 - -from __future__ import absolute_import -from datetime import date, datetime # noqa: F401 - -from typing import List, Dict # noqa: F401 - -from schematic_api.models.base_model_ import Model -from schematic_api.models.dataset_metadata import DatasetMetadata -from schematic_api import util - -from schematic_api.models.dataset_metadata import DatasetMetadata # noqa: E501 - - -class DatasetMetadataPage(Model): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - """ - - def __init__( - self, - number=None, - size=None, - total_elements=None, - total_pages=None, - has_next=None, - has_previous=None, - datasets=None, - ): # noqa: E501 - """DatasetMetadataPage - a model defined in OpenAPI - - :param number: The number of this DatasetMetadataPage. # noqa: E501 - :type number: int - :param size: The size of this DatasetMetadataPage. # noqa: E501 - :type size: int - :param total_elements: The total_elements of this DatasetMetadataPage. # noqa: E501 - :type total_elements: int - :param total_pages: The total_pages of this DatasetMetadataPage. # noqa: E501 - :type total_pages: int - :param has_next: The has_next of this DatasetMetadataPage. # noqa: E501 - :type has_next: bool - :param has_previous: The has_previous of this DatasetMetadataPage. # noqa: E501 - :type has_previous: bool - :param datasets: The datasets of this DatasetMetadataPage. # noqa: E501 - :type datasets: List[DatasetMetadata] - """ - self.openapi_types = { - "number": int, - "size": int, - "total_elements": int, - "total_pages": int, - "has_next": bool, - "has_previous": bool, - "datasets": List[DatasetMetadata], - } - - self.attribute_map = { - "number": "number", - "size": "size", - "total_elements": "totalElements", - "total_pages": "totalPages", - "has_next": "hasNext", - "has_previous": "hasPrevious", - "datasets": "datasets", - } - - self._number = number - self._size = size - self._total_elements = total_elements - self._total_pages = total_pages - self._has_next = has_next - self._has_previous = has_previous - self._datasets = datasets - - @classmethod - def from_dict(cls, dikt) -> "DatasetMetadataPage": - """Returns the dict as a model - - :param dikt: A dict. - :type: dict - :return: The DatasetMetadataPage of this DatasetMetadataPage. # noqa: E501 - :rtype: DatasetMetadataPage - """ - return util.deserialize_model(dikt, cls) - - @property - def number(self): - """Gets the number of this DatasetMetadataPage. - - The page number. # noqa: E501 - - :return: The number of this DatasetMetadataPage. - :rtype: int - """ - return self._number - - @number.setter - def number(self, number): - """Sets the number of this DatasetMetadataPage. - - The page number. # noqa: E501 - - :param number: The number of this DatasetMetadataPage. - :type number: int - """ - if number is None: - raise ValueError( - "Invalid value for `number`, must not be `None`" - ) # noqa: E501 - - self._number = number - - @property - def size(self): - """Gets the size of this DatasetMetadataPage. - - The number of items in a single page. # noqa: E501 - - :return: The size of this DatasetMetadataPage. - :rtype: int - """ - return self._size - - @size.setter - def size(self, size): - """Sets the size of this DatasetMetadataPage. - - The number of items in a single page. # noqa: E501 - - :param size: The size of this DatasetMetadataPage. - :type size: int - """ - if size is None: - raise ValueError( - "Invalid value for `size`, must not be `None`" - ) # noqa: E501 - - self._size = size - - @property - def total_elements(self): - """Gets the total_elements of this DatasetMetadataPage. - - Total number of elements in the result set. # noqa: E501 - - :return: The total_elements of this DatasetMetadataPage. - :rtype: int - """ - return self._total_elements - - @total_elements.setter - def total_elements(self, total_elements): - """Sets the total_elements of this DatasetMetadataPage. - - Total number of elements in the result set. # noqa: E501 - - :param total_elements: The total_elements of this DatasetMetadataPage. - :type total_elements: int - """ - if total_elements is None: - raise ValueError( - "Invalid value for `total_elements`, must not be `None`" - ) # noqa: E501 - - self._total_elements = total_elements - - @property - def total_pages(self): - """Gets the total_pages of this DatasetMetadataPage. - - Total number of pages in the result set. # noqa: E501 - - :return: The total_pages of this DatasetMetadataPage. - :rtype: int - """ - return self._total_pages - - @total_pages.setter - def total_pages(self, total_pages): - """Sets the total_pages of this DatasetMetadataPage. - - Total number of pages in the result set. # noqa: E501 - - :param total_pages: The total_pages of this DatasetMetadataPage. - :type total_pages: int - """ - if total_pages is None: - raise ValueError( - "Invalid value for `total_pages`, must not be `None`" - ) # noqa: E501 - - self._total_pages = total_pages - - @property - def has_next(self): - """Gets the has_next of this DatasetMetadataPage. - - Returns if there is a next page. # noqa: E501 - - :return: The has_next of this DatasetMetadataPage. - :rtype: bool - """ - return self._has_next - - @has_next.setter - def has_next(self, has_next): - """Sets the has_next of this DatasetMetadataPage. - - Returns if there is a next page. # noqa: E501 - - :param has_next: The has_next of this DatasetMetadataPage. - :type has_next: bool - """ - if has_next is None: - raise ValueError( - "Invalid value for `has_next`, must not be `None`" - ) # noqa: E501 - - self._has_next = has_next - - @property - def has_previous(self): - """Gets the has_previous of this DatasetMetadataPage. - - Returns if there is a previous page. # noqa: E501 - - :return: The has_previous of this DatasetMetadataPage. - :rtype: bool - """ - return self._has_previous - - @has_previous.setter - def has_previous(self, has_previous): - """Sets the has_previous of this DatasetMetadataPage. - - Returns if there is a previous page. # noqa: E501 - - :param has_previous: The has_previous of this DatasetMetadataPage. - :type has_previous: bool - """ - if has_previous is None: - raise ValueError( - "Invalid value for `has_previous`, must not be `None`" - ) # noqa: E501 - - self._has_previous = has_previous - - @property - def datasets(self): - """Gets the datasets of this DatasetMetadataPage. - - An array of dataset meatdata. # noqa: E501 - - :return: The datasets of this DatasetMetadataPage. - :rtype: List[DatasetMetadata] - """ - return self._datasets - - @datasets.setter - def datasets(self, datasets): - """Sets the datasets of this DatasetMetadataPage. - - An array of dataset meatdata. # noqa: E501 - - :param datasets: The datasets of this DatasetMetadataPage. - :type datasets: List[DatasetMetadata] - """ - if datasets is None: - raise ValueError( - "Invalid value for `datasets`, must not be `None`" - ) # noqa: E501 - - self._datasets = datasets diff --git a/apps/schematic/api/schematic_api/models/dataset_metadata_page_all_of.py b/apps/schematic/api/schematic_api/models/dataset_metadata_page_all_of.py deleted file mode 100644 index 7a606531c..000000000 --- a/apps/schematic/api/schematic_api/models/dataset_metadata_page_all_of.py +++ /dev/null @@ -1,69 +0,0 @@ -# coding: utf-8 - -from __future__ import absolute_import -from datetime import date, datetime # noqa: F401 - -from typing import List, Dict # noqa: F401 - -from schematic_api.models.base_model_ import Model -from schematic_api.models.dataset_metadata import DatasetMetadata -from schematic_api import util - -from schematic_api.models.dataset_metadata import DatasetMetadata # noqa: E501 - - -class DatasetMetadataPageAllOf(Model): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - """ - - def __init__(self, datasets=None): # noqa: E501 - """DatasetMetadataPageAllOf - a model defined in OpenAPI - - :param datasets: The datasets of this DatasetMetadataPageAllOf. # noqa: E501 - :type datasets: List[DatasetMetadata] - """ - self.openapi_types = {"datasets": List[DatasetMetadata]} - - self.attribute_map = {"datasets": "datasets"} - - self._datasets = datasets - - @classmethod - def from_dict(cls, dikt) -> "DatasetMetadataPageAllOf": - """Returns the dict as a model - - :param dikt: A dict. - :type: dict - :return: The DatasetMetadataPage_allOf of this DatasetMetadataPageAllOf. # noqa: E501 - :rtype: DatasetMetadataPageAllOf - """ - return util.deserialize_model(dikt, cls) - - @property - def datasets(self): - """Gets the datasets of this DatasetMetadataPageAllOf. - - An array of dataset meatdata. # noqa: E501 - - :return: The datasets of this DatasetMetadataPageAllOf. - :rtype: List[DatasetMetadata] - """ - return self._datasets - - @datasets.setter - def datasets(self, datasets): - """Sets the datasets of this DatasetMetadataPageAllOf. - - An array of dataset meatdata. # noqa: E501 - - :param datasets: The datasets of this DatasetMetadataPageAllOf. - :type datasets: List[DatasetMetadata] - """ - if datasets is None: - raise ValueError( - "Invalid value for `datasets`, must not be `None`" - ) # noqa: E501 - - self._datasets = datasets diff --git a/apps/schematic/api/schematic_api/models/file_metadata.py b/apps/schematic/api/schematic_api/models/file_metadata.py deleted file mode 100644 index 7c61eef4b..000000000 --- a/apps/schematic/api/schematic_api/models/file_metadata.py +++ /dev/null @@ -1,94 +0,0 @@ -# coding: utf-8 - -from __future__ import absolute_import -from datetime import date, datetime # noqa: F401 - -from typing import List, Dict # noqa: F401 - -from schematic_api.models.base_model_ import Model -from schematic_api import util - - -class FileMetadata(Model): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - """ - - def __init__(self, name=None, id=None): # noqa: E501 - """FileMetadata - a model defined in OpenAPI - - :param name: The name of this FileMetadata. # noqa: E501 - :type name: str - :param id: The id of this FileMetadata. # noqa: E501 - :type id: str - """ - self.openapi_types = {"name": str, "id": str} - - self.attribute_map = {"name": "name", "id": "id"} - - self._name = name - self._id = id - - @classmethod - def from_dict(cls, dikt) -> "FileMetadata": - """Returns the dict as a model - - :param dikt: A dict. - :type: dict - :return: The FileMetadata of this FileMetadata. # noqa: E501 - :rtype: FileMetadata - """ - return util.deserialize_model(dikt, cls) - - @property - def name(self): - """Gets the name of this FileMetadata. - - The name of the file. # noqa: E501 - - :return: The name of this FileMetadata. - :rtype: str - """ - return self._name - - @name.setter - def name(self, name): - """Sets the name of this FileMetadata. - - The name of the file. # noqa: E501 - - :param name: The name of this FileMetadata. - :type name: str - """ - if name is None: - raise ValueError( - "Invalid value for `name`, must not be `None`" - ) # noqa: E501 - - self._name = name - - @property - def id(self): - """Gets the id of this FileMetadata. - - The ID of the file. # noqa: E501 - - :return: The id of this FileMetadata. - :rtype: str - """ - return self._id - - @id.setter - def id(self, id): - """Sets the id of this FileMetadata. - - The ID of the file. # noqa: E501 - - :param id: The id of this FileMetadata. - :type id: str - """ - if id is None: - raise ValueError("Invalid value for `id`, must not be `None`") # noqa: E501 - - self._id = id diff --git a/apps/schematic/api/schematic_api/models/file_metadata_array.py b/apps/schematic/api/schematic_api/models/file_metadata_array.py deleted file mode 100644 index 53a4689b1..000000000 --- a/apps/schematic/api/schematic_api/models/file_metadata_array.py +++ /dev/null @@ -1,65 +0,0 @@ -# coding: utf-8 - -from __future__ import absolute_import -from datetime import date, datetime # noqa: F401 - -from typing import List, Dict # noqa: F401 - -from schematic_api.models.base_model_ import Model -from schematic_api.models.file_metadata import FileMetadata -from schematic_api import util - -from schematic_api.models.file_metadata import FileMetadata # noqa: E501 - - -class FileMetadataArray(Model): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - """ - - def __init__(self, files=None): # noqa: E501 - """FileMetadataArray - a model defined in OpenAPI - - :param files: The files of this FileMetadataArray. # noqa: E501 - :type files: List[FileMetadata] - """ - self.openapi_types = {"files": List[FileMetadata]} - - self.attribute_map = {"files": "files"} - - self._files = files - - @classmethod - def from_dict(cls, dikt) -> "FileMetadataArray": - """Returns the dict as a model - - :param dikt: A dict. - :type: dict - :return: The FileMetadataArray of this FileMetadataArray. # noqa: E501 - :rtype: FileMetadataArray - """ - return util.deserialize_model(dikt, cls) - - @property - def files(self): - """Gets the files of this FileMetadataArray. - - A list of file metadata. # noqa: E501 - - :return: The files of this FileMetadataArray. - :rtype: List[FileMetadata] - """ - return self._files - - @files.setter - def files(self, files): - """Sets the files of this FileMetadataArray. - - A list of file metadata. # noqa: E501 - - :param files: The files of this FileMetadataArray. - :type files: List[FileMetadata] - """ - - self._files = files diff --git a/apps/schematic/api/schematic_api/models/file_metadata_page.py b/apps/schematic/api/schematic_api/models/file_metadata_page.py deleted file mode 100644 index 0abd10edf..000000000 --- a/apps/schematic/api/schematic_api/models/file_metadata_page.py +++ /dev/null @@ -1,274 +0,0 @@ -# coding: utf-8 - -from __future__ import absolute_import -from datetime import date, datetime # noqa: F401 - -from typing import List, Dict # noqa: F401 - -from schematic_api.models.base_model_ import Model -from schematic_api.models.file_metadata import FileMetadata -from schematic_api import util - -from schematic_api.models.file_metadata import FileMetadata # noqa: E501 - - -class FileMetadataPage(Model): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - """ - - def __init__( - self, - number=None, - size=None, - total_elements=None, - total_pages=None, - has_next=None, - has_previous=None, - files=None, - ): # noqa: E501 - """FileMetadataPage - a model defined in OpenAPI - - :param number: The number of this FileMetadataPage. # noqa: E501 - :type number: int - :param size: The size of this FileMetadataPage. # noqa: E501 - :type size: int - :param total_elements: The total_elements of this FileMetadataPage. # noqa: E501 - :type total_elements: int - :param total_pages: The total_pages of this FileMetadataPage. # noqa: E501 - :type total_pages: int - :param has_next: The has_next of this FileMetadataPage. # noqa: E501 - :type has_next: bool - :param has_previous: The has_previous of this FileMetadataPage. # noqa: E501 - :type has_previous: bool - :param files: The files of this FileMetadataPage. # noqa: E501 - :type files: List[FileMetadata] - """ - self.openapi_types = { - "number": int, - "size": int, - "total_elements": int, - "total_pages": int, - "has_next": bool, - "has_previous": bool, - "files": List[FileMetadata], - } - - self.attribute_map = { - "number": "number", - "size": "size", - "total_elements": "totalElements", - "total_pages": "totalPages", - "has_next": "hasNext", - "has_previous": "hasPrevious", - "files": "files", - } - - self._number = number - self._size = size - self._total_elements = total_elements - self._total_pages = total_pages - self._has_next = has_next - self._has_previous = has_previous - self._files = files - - @classmethod - def from_dict(cls, dikt) -> "FileMetadataPage": - """Returns the dict as a model - - :param dikt: A dict. - :type: dict - :return: The FileMetadataPage of this FileMetadataPage. # noqa: E501 - :rtype: FileMetadataPage - """ - return util.deserialize_model(dikt, cls) - - @property - def number(self): - """Gets the number of this FileMetadataPage. - - The page number. # noqa: E501 - - :return: The number of this FileMetadataPage. - :rtype: int - """ - return self._number - - @number.setter - def number(self, number): - """Sets the number of this FileMetadataPage. - - The page number. # noqa: E501 - - :param number: The number of this FileMetadataPage. - :type number: int - """ - if number is None: - raise ValueError( - "Invalid value for `number`, must not be `None`" - ) # noqa: E501 - - self._number = number - - @property - def size(self): - """Gets the size of this FileMetadataPage. - - The number of items in a single page. # noqa: E501 - - :return: The size of this FileMetadataPage. - :rtype: int - """ - return self._size - - @size.setter - def size(self, size): - """Sets the size of this FileMetadataPage. - - The number of items in a single page. # noqa: E501 - - :param size: The size of this FileMetadataPage. - :type size: int - """ - if size is None: - raise ValueError( - "Invalid value for `size`, must not be `None`" - ) # noqa: E501 - - self._size = size - - @property - def total_elements(self): - """Gets the total_elements of this FileMetadataPage. - - Total number of elements in the result set. # noqa: E501 - - :return: The total_elements of this FileMetadataPage. - :rtype: int - """ - return self._total_elements - - @total_elements.setter - def total_elements(self, total_elements): - """Sets the total_elements of this FileMetadataPage. - - Total number of elements in the result set. # noqa: E501 - - :param total_elements: The total_elements of this FileMetadataPage. - :type total_elements: int - """ - if total_elements is None: - raise ValueError( - "Invalid value for `total_elements`, must not be `None`" - ) # noqa: E501 - - self._total_elements = total_elements - - @property - def total_pages(self): - """Gets the total_pages of this FileMetadataPage. - - Total number of pages in the result set. # noqa: E501 - - :return: The total_pages of this FileMetadataPage. - :rtype: int - """ - return self._total_pages - - @total_pages.setter - def total_pages(self, total_pages): - """Sets the total_pages of this FileMetadataPage. - - Total number of pages in the result set. # noqa: E501 - - :param total_pages: The total_pages of this FileMetadataPage. - :type total_pages: int - """ - if total_pages is None: - raise ValueError( - "Invalid value for `total_pages`, must not be `None`" - ) # noqa: E501 - - self._total_pages = total_pages - - @property - def has_next(self): - """Gets the has_next of this FileMetadataPage. - - Returns if there is a next page. # noqa: E501 - - :return: The has_next of this FileMetadataPage. - :rtype: bool - """ - return self._has_next - - @has_next.setter - def has_next(self, has_next): - """Sets the has_next of this FileMetadataPage. - - Returns if there is a next page. # noqa: E501 - - :param has_next: The has_next of this FileMetadataPage. - :type has_next: bool - """ - if has_next is None: - raise ValueError( - "Invalid value for `has_next`, must not be `None`" - ) # noqa: E501 - - self._has_next = has_next - - @property - def has_previous(self): - """Gets the has_previous of this FileMetadataPage. - - Returns if there is a previous page. # noqa: E501 - - :return: The has_previous of this FileMetadataPage. - :rtype: bool - """ - return self._has_previous - - @has_previous.setter - def has_previous(self, has_previous): - """Sets the has_previous of this FileMetadataPage. - - Returns if there is a previous page. # noqa: E501 - - :param has_previous: The has_previous of this FileMetadataPage. - :type has_previous: bool - """ - if has_previous is None: - raise ValueError( - "Invalid value for `has_previous`, must not be `None`" - ) # noqa: E501 - - self._has_previous = has_previous - - @property - def files(self): - """Gets the files of this FileMetadataPage. - - A list of file metadata. # noqa: E501 - - :return: The files of this FileMetadataPage. - :rtype: List[FileMetadata] - """ - return self._files - - @files.setter - def files(self, files): - """Sets the files of this FileMetadataPage. - - A list of file metadata. # noqa: E501 - - :param files: The files of this FileMetadataPage. - :type files: List[FileMetadata] - """ - if files is None: - raise ValueError( - "Invalid value for `files`, must not be `None`" - ) # noqa: E501 - - self._files = files diff --git a/apps/schematic/api/schematic_api/models/file_metadata_page_all_of.py b/apps/schematic/api/schematic_api/models/file_metadata_page_all_of.py deleted file mode 100644 index ee46de24f..000000000 --- a/apps/schematic/api/schematic_api/models/file_metadata_page_all_of.py +++ /dev/null @@ -1,69 +0,0 @@ -# coding: utf-8 - -from __future__ import absolute_import -from datetime import date, datetime # noqa: F401 - -from typing import List, Dict # noqa: F401 - -from schematic_api.models.base_model_ import Model -from schematic_api.models.file_metadata import FileMetadata -from schematic_api import util - -from schematic_api.models.file_metadata import FileMetadata # noqa: E501 - - -class FileMetadataPageAllOf(Model): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - """ - - def __init__(self, files=None): # noqa: E501 - """FileMetadataPageAllOf - a model defined in OpenAPI - - :param files: The files of this FileMetadataPageAllOf. # noqa: E501 - :type files: List[FileMetadata] - """ - self.openapi_types = {"files": List[FileMetadata]} - - self.attribute_map = {"files": "files"} - - self._files = files - - @classmethod - def from_dict(cls, dikt) -> "FileMetadataPageAllOf": - """Returns the dict as a model - - :param dikt: A dict. - :type: dict - :return: The FileMetadataPage_allOf of this FileMetadataPageAllOf. # noqa: E501 - :rtype: FileMetadataPageAllOf - """ - return util.deserialize_model(dikt, cls) - - @property - def files(self): - """Gets the files of this FileMetadataPageAllOf. - - A list of file metadata. # noqa: E501 - - :return: The files of this FileMetadataPageAllOf. - :rtype: List[FileMetadata] - """ - return self._files - - @files.setter - def files(self, files): - """Sets the files of this FileMetadataPageAllOf. - - A list of file metadata. # noqa: E501 - - :param files: The files of this FileMetadataPageAllOf. - :type files: List[FileMetadata] - """ - if files is None: - raise ValueError( - "Invalid value for `files`, must not be `None`" - ) # noqa: E501 - - self._files = files diff --git a/apps/schematic/api/schematic_api/models/google_sheet_links.py b/apps/schematic/api/schematic_api/models/google_sheet_links.py deleted file mode 100644 index 9207024fa..000000000 --- a/apps/schematic/api/schematic_api/models/google_sheet_links.py +++ /dev/null @@ -1,60 +0,0 @@ -# coding: utf-8 - -from __future__ import absolute_import -from datetime import date, datetime # noqa: F401 - -from typing import List, Dict # noqa: F401 - -from schematic_api.models.base_model_ import Model -from schematic_api import util - - -class GoogleSheetLinks(Model): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - """ - - def __init__(self, links=None): # noqa: E501 - """GoogleSheetLinks - a model defined in OpenAPI - - :param links: The links of this GoogleSheetLinks. # noqa: E501 - :type links: List[str] - """ - self.openapi_types = {"links": List[str]} - - self.attribute_map = {"links": "links"} - - self._links = links - - @classmethod - def from_dict(cls, dikt) -> "GoogleSheetLinks": - """Returns the dict as a model - - :param dikt: A dict. - :type: dict - :return: The GoogleSheetLinks of this GoogleSheetLinks. # noqa: E501 - :rtype: GoogleSheetLinks - """ - return util.deserialize_model(dikt, cls) - - @property - def links(self): - """Gets the links of this GoogleSheetLinks. - - - :return: The links of this GoogleSheetLinks. - :rtype: List[str] - """ - return self._links - - @links.setter - def links(self, links): - """Sets the links of this GoogleSheetLinks. - - - :param links: The links of this GoogleSheetLinks. - :type links: List[str] - """ - - self._links = links diff --git a/apps/schematic/api/schematic_api/models/manifest_metadata.py b/apps/schematic/api/schematic_api/models/manifest_metadata.py deleted file mode 100644 index 90b8eb2cb..000000000 --- a/apps/schematic/api/schematic_api/models/manifest_metadata.py +++ /dev/null @@ -1,191 +0,0 @@ -# coding: utf-8 - -from __future__ import absolute_import -from datetime import date, datetime # noqa: F401 - -from typing import List, Dict # noqa: F401 - -from schematic_api.models.base_model_ import Model -from schematic_api import util - - -class ManifestMetadata(Model): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - """ - - def __init__( - self, - name=None, - id=None, - dataset_name=None, - dataset_id=None, - component_name=None, - ): # noqa: E501 - """ManifestMetadata - a model defined in OpenAPI - - :param name: The name of this ManifestMetadata. # noqa: E501 - :type name: str - :param id: The id of this ManifestMetadata. # noqa: E501 - :type id: str - :param dataset_name: The dataset_name of this ManifestMetadata. # noqa: E501 - :type dataset_name: str - :param dataset_id: The dataset_id of this ManifestMetadata. # noqa: E501 - :type dataset_id: str - :param component_name: The component_name of this ManifestMetadata. # noqa: E501 - :type component_name: str - """ - self.openapi_types = { - "name": str, - "id": str, - "dataset_name": str, - "dataset_id": str, - "component_name": str, - } - - self.attribute_map = { - "name": "name", - "id": "id", - "dataset_name": "datasetName", - "dataset_id": "datasetId", - "component_name": "componentName", - } - - self._name = name - self._id = id - self._dataset_name = dataset_name - self._dataset_id = dataset_id - self._component_name = component_name - - @classmethod - def from_dict(cls, dikt) -> "ManifestMetadata": - """Returns the dict as a model - - :param dikt: A dict. - :type: dict - :return: The ManifestMetadata of this ManifestMetadata. # noqa: E501 - :rtype: ManifestMetadata - """ - return util.deserialize_model(dikt, cls) - - @property - def name(self): - """Gets the name of this ManifestMetadata. - - The name of the manifest file. # noqa: E501 - - :return: The name of this ManifestMetadata. - :rtype: str - """ - return self._name - - @name.setter - def name(self, name): - """Sets the name of this ManifestMetadata. - - The name of the manifest file. # noqa: E501 - - :param name: The name of this ManifestMetadata. - :type name: str - """ - if name is None: - raise ValueError( - "Invalid value for `name`, must not be `None`" - ) # noqa: E501 - - self._name = name - - @property - def id(self): - """Gets the id of this ManifestMetadata. - - The id of the manifest file. # noqa: E501 - - :return: The id of this ManifestMetadata. - :rtype: str - """ - return self._id - - @id.setter - def id(self, id): - """Sets the id of this ManifestMetadata. - - The id of the manifest file. # noqa: E501 - - :param id: The id of this ManifestMetadata. - :type id: str - """ - if id is None: - raise ValueError("Invalid value for `id`, must not be `None`") # noqa: E501 - - self._id = id - - @property - def dataset_name(self): - """Gets the dataset_name of this ManifestMetadata. - - The name of the dataset the manifest belongs to. # noqa: E501 - - :return: The dataset_name of this ManifestMetadata. - :rtype: str - """ - return self._dataset_name - - @dataset_name.setter - def dataset_name(self, dataset_name): - """Sets the dataset_name of this ManifestMetadata. - - The name of the dataset the manifest belongs to. # noqa: E501 - - :param dataset_name: The dataset_name of this ManifestMetadata. - :type dataset_name: str - """ - - self._dataset_name = dataset_name - - @property - def dataset_id(self): - """Gets the dataset_id of this ManifestMetadata. - - The id of the dataset the manifest belongs to. # noqa: E501 - - :return: The dataset_id of this ManifestMetadata. - :rtype: str - """ - return self._dataset_id - - @dataset_id.setter - def dataset_id(self, dataset_id): - """Sets the dataset_id of this ManifestMetadata. - - The id of the dataset the manifest belongs to. # noqa: E501 - - :param dataset_id: The dataset_id of this ManifestMetadata. - :type dataset_id: str - """ - - self._dataset_id = dataset_id - - @property - def component_name(self): - """Gets the component_name of this ManifestMetadata. - - The name of the component the manifest is of. # noqa: E501 - - :return: The component_name of this ManifestMetadata. - :rtype: str - """ - return self._component_name - - @component_name.setter - def component_name(self, component_name): - """Sets the component_name of this ManifestMetadata. - - The name of the component the manifest is of. # noqa: E501 - - :param component_name: The component_name of this ManifestMetadata. - :type component_name: str - """ - - self._component_name = component_name diff --git a/apps/schematic/api/schematic_api/models/manifest_metadata_array.py b/apps/schematic/api/schematic_api/models/manifest_metadata_array.py deleted file mode 100644 index 6e6485ec8..000000000 --- a/apps/schematic/api/schematic_api/models/manifest_metadata_array.py +++ /dev/null @@ -1,65 +0,0 @@ -# coding: utf-8 - -from __future__ import absolute_import -from datetime import date, datetime # noqa: F401 - -from typing import List, Dict # noqa: F401 - -from schematic_api.models.base_model_ import Model -from schematic_api.models.manifest_metadata import ManifestMetadata -from schematic_api import util - -from schematic_api.models.manifest_metadata import ManifestMetadata # noqa: E501 - - -class ManifestMetadataArray(Model): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - """ - - def __init__(self, manifests=None): # noqa: E501 - """ManifestMetadataArray - a model defined in OpenAPI - - :param manifests: The manifests of this ManifestMetadataArray. # noqa: E501 - :type manifests: List[ManifestMetadata] - """ - self.openapi_types = {"manifests": List[ManifestMetadata]} - - self.attribute_map = {"manifests": "manifests"} - - self._manifests = manifests - - @classmethod - def from_dict(cls, dikt) -> "ManifestMetadataArray": - """Returns the dict as a model - - :param dikt: A dict. - :type: dict - :return: The ManifestMetadataArray of this ManifestMetadataArray. # noqa: E501 - :rtype: ManifestMetadataArray - """ - return util.deserialize_model(dikt, cls) - - @property - def manifests(self): - """Gets the manifests of this ManifestMetadataArray. - - A list of manifest metadata # noqa: E501 - - :return: The manifests of this ManifestMetadataArray. - :rtype: List[ManifestMetadata] - """ - return self._manifests - - @manifests.setter - def manifests(self, manifests): - """Sets the manifests of this ManifestMetadataArray. - - A list of manifest metadata # noqa: E501 - - :param manifests: The manifests of this ManifestMetadataArray. - :type manifests: List[ManifestMetadata] - """ - - self._manifests = manifests diff --git a/apps/schematic/api/schematic_api/models/manifest_metadata_page.py b/apps/schematic/api/schematic_api/models/manifest_metadata_page.py deleted file mode 100644 index b69da066c..000000000 --- a/apps/schematic/api/schematic_api/models/manifest_metadata_page.py +++ /dev/null @@ -1,274 +0,0 @@ -# coding: utf-8 - -from __future__ import absolute_import -from datetime import date, datetime # noqa: F401 - -from typing import List, Dict # noqa: F401 - -from schematic_api.models.base_model_ import Model -from schematic_api.models.manifest_metadata import ManifestMetadata -from schematic_api import util - -from schematic_api.models.manifest_metadata import ManifestMetadata # noqa: E501 - - -class ManifestMetadataPage(Model): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - """ - - def __init__( - self, - number=None, - size=None, - total_elements=None, - total_pages=None, - has_next=None, - has_previous=None, - manifests=None, - ): # noqa: E501 - """ManifestMetadataPage - a model defined in OpenAPI - - :param number: The number of this ManifestMetadataPage. # noqa: E501 - :type number: int - :param size: The size of this ManifestMetadataPage. # noqa: E501 - :type size: int - :param total_elements: The total_elements of this ManifestMetadataPage. # noqa: E501 - :type total_elements: int - :param total_pages: The total_pages of this ManifestMetadataPage. # noqa: E501 - :type total_pages: int - :param has_next: The has_next of this ManifestMetadataPage. # noqa: E501 - :type has_next: bool - :param has_previous: The has_previous of this ManifestMetadataPage. # noqa: E501 - :type has_previous: bool - :param manifests: The manifests of this ManifestMetadataPage. # noqa: E501 - :type manifests: List[ManifestMetadata] - """ - self.openapi_types = { - "number": int, - "size": int, - "total_elements": int, - "total_pages": int, - "has_next": bool, - "has_previous": bool, - "manifests": List[ManifestMetadata], - } - - self.attribute_map = { - "number": "number", - "size": "size", - "total_elements": "totalElements", - "total_pages": "totalPages", - "has_next": "hasNext", - "has_previous": "hasPrevious", - "manifests": "manifests", - } - - self._number = number - self._size = size - self._total_elements = total_elements - self._total_pages = total_pages - self._has_next = has_next - self._has_previous = has_previous - self._manifests = manifests - - @classmethod - def from_dict(cls, dikt) -> "ManifestMetadataPage": - """Returns the dict as a model - - :param dikt: A dict. - :type: dict - :return: The ManifestMetadataPage of this ManifestMetadataPage. # noqa: E501 - :rtype: ManifestMetadataPage - """ - return util.deserialize_model(dikt, cls) - - @property - def number(self): - """Gets the number of this ManifestMetadataPage. - - The page number. # noqa: E501 - - :return: The number of this ManifestMetadataPage. - :rtype: int - """ - return self._number - - @number.setter - def number(self, number): - """Sets the number of this ManifestMetadataPage. - - The page number. # noqa: E501 - - :param number: The number of this ManifestMetadataPage. - :type number: int - """ - if number is None: - raise ValueError( - "Invalid value for `number`, must not be `None`" - ) # noqa: E501 - - self._number = number - - @property - def size(self): - """Gets the size of this ManifestMetadataPage. - - The number of items in a single page. # noqa: E501 - - :return: The size of this ManifestMetadataPage. - :rtype: int - """ - return self._size - - @size.setter - def size(self, size): - """Sets the size of this ManifestMetadataPage. - - The number of items in a single page. # noqa: E501 - - :param size: The size of this ManifestMetadataPage. - :type size: int - """ - if size is None: - raise ValueError( - "Invalid value for `size`, must not be `None`" - ) # noqa: E501 - - self._size = size - - @property - def total_elements(self): - """Gets the total_elements of this ManifestMetadataPage. - - Total number of elements in the result set. # noqa: E501 - - :return: The total_elements of this ManifestMetadataPage. - :rtype: int - """ - return self._total_elements - - @total_elements.setter - def total_elements(self, total_elements): - """Sets the total_elements of this ManifestMetadataPage. - - Total number of elements in the result set. # noqa: E501 - - :param total_elements: The total_elements of this ManifestMetadataPage. - :type total_elements: int - """ - if total_elements is None: - raise ValueError( - "Invalid value for `total_elements`, must not be `None`" - ) # noqa: E501 - - self._total_elements = total_elements - - @property - def total_pages(self): - """Gets the total_pages of this ManifestMetadataPage. - - Total number of pages in the result set. # noqa: E501 - - :return: The total_pages of this ManifestMetadataPage. - :rtype: int - """ - return self._total_pages - - @total_pages.setter - def total_pages(self, total_pages): - """Sets the total_pages of this ManifestMetadataPage. - - Total number of pages in the result set. # noqa: E501 - - :param total_pages: The total_pages of this ManifestMetadataPage. - :type total_pages: int - """ - if total_pages is None: - raise ValueError( - "Invalid value for `total_pages`, must not be `None`" - ) # noqa: E501 - - self._total_pages = total_pages - - @property - def has_next(self): - """Gets the has_next of this ManifestMetadataPage. - - Returns if there is a next page. # noqa: E501 - - :return: The has_next of this ManifestMetadataPage. - :rtype: bool - """ - return self._has_next - - @has_next.setter - def has_next(self, has_next): - """Sets the has_next of this ManifestMetadataPage. - - Returns if there is a next page. # noqa: E501 - - :param has_next: The has_next of this ManifestMetadataPage. - :type has_next: bool - """ - if has_next is None: - raise ValueError( - "Invalid value for `has_next`, must not be `None`" - ) # noqa: E501 - - self._has_next = has_next - - @property - def has_previous(self): - """Gets the has_previous of this ManifestMetadataPage. - - Returns if there is a previous page. # noqa: E501 - - :return: The has_previous of this ManifestMetadataPage. - :rtype: bool - """ - return self._has_previous - - @has_previous.setter - def has_previous(self, has_previous): - """Sets the has_previous of this ManifestMetadataPage. - - Returns if there is a previous page. # noqa: E501 - - :param has_previous: The has_previous of this ManifestMetadataPage. - :type has_previous: bool - """ - if has_previous is None: - raise ValueError( - "Invalid value for `has_previous`, must not be `None`" - ) # noqa: E501 - - self._has_previous = has_previous - - @property - def manifests(self): - """Gets the manifests of this ManifestMetadataPage. - - A list of manifest metadata # noqa: E501 - - :return: The manifests of this ManifestMetadataPage. - :rtype: List[ManifestMetadata] - """ - return self._manifests - - @manifests.setter - def manifests(self, manifests): - """Sets the manifests of this ManifestMetadataPage. - - A list of manifest metadata # noqa: E501 - - :param manifests: The manifests of this ManifestMetadataPage. - :type manifests: List[ManifestMetadata] - """ - if manifests is None: - raise ValueError( - "Invalid value for `manifests`, must not be `None`" - ) # noqa: E501 - - self._manifests = manifests diff --git a/apps/schematic/api/schematic_api/models/manifest_metadata_page_all_of.py b/apps/schematic/api/schematic_api/models/manifest_metadata_page_all_of.py deleted file mode 100644 index bdee4bf91..000000000 --- a/apps/schematic/api/schematic_api/models/manifest_metadata_page_all_of.py +++ /dev/null @@ -1,69 +0,0 @@ -# coding: utf-8 - -from __future__ import absolute_import -from datetime import date, datetime # noqa: F401 - -from typing import List, Dict # noqa: F401 - -from schematic_api.models.base_model_ import Model -from schematic_api.models.manifest_metadata import ManifestMetadata -from schematic_api import util - -from schematic_api.models.manifest_metadata import ManifestMetadata # noqa: E501 - - -class ManifestMetadataPageAllOf(Model): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - """ - - def __init__(self, manifests=None): # noqa: E501 - """ManifestMetadataPageAllOf - a model defined in OpenAPI - - :param manifests: The manifests of this ManifestMetadataPageAllOf. # noqa: E501 - :type manifests: List[ManifestMetadata] - """ - self.openapi_types = {"manifests": List[ManifestMetadata]} - - self.attribute_map = {"manifests": "manifests"} - - self._manifests = manifests - - @classmethod - def from_dict(cls, dikt) -> "ManifestMetadataPageAllOf": - """Returns the dict as a model - - :param dikt: A dict. - :type: dict - :return: The ManifestMetadataPage_allOf of this ManifestMetadataPageAllOf. # noqa: E501 - :rtype: ManifestMetadataPageAllOf - """ - return util.deserialize_model(dikt, cls) - - @property - def manifests(self): - """Gets the manifests of this ManifestMetadataPageAllOf. - - A list of manifest metadata # noqa: E501 - - :return: The manifests of this ManifestMetadataPageAllOf. - :rtype: List[ManifestMetadata] - """ - return self._manifests - - @manifests.setter - def manifests(self, manifests): - """Sets the manifests of this ManifestMetadataPageAllOf. - - A list of manifest metadata # noqa: E501 - - :param manifests: The manifests of this ManifestMetadataPageAllOf. - :type manifests: List[ManifestMetadata] - """ - if manifests is None: - raise ValueError( - "Invalid value for `manifests`, must not be `None`" - ) # noqa: E501 - - self._manifests = manifests diff --git a/apps/schematic/api/schematic_api/models/manifest_validation_result.py b/apps/schematic/api/schematic_api/models/manifest_validation_result.py deleted file mode 100644 index 8de5d2fa8..000000000 --- a/apps/schematic/api/schematic_api/models/manifest_validation_result.py +++ /dev/null @@ -1,88 +0,0 @@ -# coding: utf-8 - -from __future__ import absolute_import -from datetime import date, datetime # noqa: F401 - -from typing import List, Dict # noqa: F401 - -from schematic_api.models.base_model_ import Model -from schematic_api import util - - -class ManifestValidationResult(Model): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - """ - - def __init__(self, errors=None, warnings=None): # noqa: E501 - """ManifestValidationResult - a model defined in OpenAPI - - :param errors: The errors of this ManifestValidationResult. # noqa: E501 - :type errors: List[str] - :param warnings: The warnings of this ManifestValidationResult. # noqa: E501 - :type warnings: List[str] - """ - self.openapi_types = {"errors": List[str], "warnings": List[str]} - - self.attribute_map = {"errors": "errors", "warnings": "warnings"} - - self._errors = errors - self._warnings = warnings - - @classmethod - def from_dict(cls, dikt) -> "ManifestValidationResult": - """Returns the dict as a model - - :param dikt: A dict. - :type: dict - :return: The ManifestValidationResult of this ManifestValidationResult. # noqa: E501 - :rtype: ManifestValidationResult - """ - return util.deserialize_model(dikt, cls) - - @property - def errors(self): - """Gets the errors of this ManifestValidationResult. - - Any errors from validation # noqa: E501 - - :return: The errors of this ManifestValidationResult. - :rtype: List[str] - """ - return self._errors - - @errors.setter - def errors(self, errors): - """Sets the errors of this ManifestValidationResult. - - Any errors from validation # noqa: E501 - - :param errors: The errors of this ManifestValidationResult. - :type errors: List[str] - """ - - self._errors = errors - - @property - def warnings(self): - """Gets the warnings of this ManifestValidationResult. - - Any warnings from validation # noqa: E501 - - :return: The warnings of this ManifestValidationResult. - :rtype: List[str] - """ - return self._warnings - - @warnings.setter - def warnings(self, warnings): - """Sets the warnings of this ManifestValidationResult. - - Any warnings from validation # noqa: E501 - - :param warnings: The warnings of this ManifestValidationResult. - :type warnings: List[str] - """ - - self._warnings = warnings diff --git a/apps/schematic/api/schematic_api/models/node.py b/apps/schematic/api/schematic_api/models/node.py deleted file mode 100644 index 50995632f..000000000 --- a/apps/schematic/api/schematic_api/models/node.py +++ /dev/null @@ -1,66 +0,0 @@ -# coding: utf-8 - -from __future__ import absolute_import -from datetime import date, datetime # noqa: F401 - -from typing import List, Dict # noqa: F401 - -from schematic_api.models.base_model_ import Model -from schematic_api import util - - -class Node(Model): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - """ - - def __init__(self, name=None): # noqa: E501 - """Node - a model defined in OpenAPI - - :param name: The name of this Node. # noqa: E501 - :type name: str - """ - self.openapi_types = {"name": str} - - self.attribute_map = {"name": "name"} - - self._name = name - - @classmethod - def from_dict(cls, dikt) -> "Node": - """Returns the dict as a model - - :param dikt: A dict. - :type: dict - :return: The Node of this Node. # noqa: E501 - :rtype: Node - """ - return util.deserialize_model(dikt, cls) - - @property - def name(self): - """Gets the name of this Node. - - The name of the node. # noqa: E501 - - :return: The name of this Node. - :rtype: str - """ - return self._name - - @name.setter - def name(self, name): - """Sets the name of this Node. - - The name of the node. # noqa: E501 - - :param name: The name of this Node. - :type name: str - """ - if name is None: - raise ValueError( - "Invalid value for `name`, must not be `None`" - ) # noqa: E501 - - self._name = name diff --git a/apps/schematic/api/schematic_api/models/node_array.py b/apps/schematic/api/schematic_api/models/node_array.py deleted file mode 100644 index d75f94ab2..000000000 --- a/apps/schematic/api/schematic_api/models/node_array.py +++ /dev/null @@ -1,65 +0,0 @@ -# coding: utf-8 - -from __future__ import absolute_import -from datetime import date, datetime # noqa: F401 - -from typing import List, Dict # noqa: F401 - -from schematic_api.models.base_model_ import Model -from schematic_api.models.node import Node -from schematic_api import util - -from schematic_api.models.node import Node # noqa: E501 - - -class NodeArray(Model): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - """ - - def __init__(self, nodes=None): # noqa: E501 - """NodeArray - a model defined in OpenAPI - - :param nodes: The nodes of this NodeArray. # noqa: E501 - :type nodes: List[Node] - """ - self.openapi_types = {"nodes": List[Node]} - - self.attribute_map = {"nodes": "nodes"} - - self._nodes = nodes - - @classmethod - def from_dict(cls, dikt) -> "NodeArray": - """Returns the dict as a model - - :param dikt: A dict. - :type: dict - :return: The NodeArray of this NodeArray. # noqa: E501 - :rtype: NodeArray - """ - return util.deserialize_model(dikt, cls) - - @property - def nodes(self): - """Gets the nodes of this NodeArray. - - An array of nodes. # noqa: E501 - - :return: The nodes of this NodeArray. - :rtype: List[Node] - """ - return self._nodes - - @nodes.setter - def nodes(self, nodes): - """Sets the nodes of this NodeArray. - - An array of nodes. # noqa: E501 - - :param nodes: The nodes of this NodeArray. - :type nodes: List[Node] - """ - - self._nodes = nodes diff --git a/apps/schematic/api/schematic_api/models/node_page.py b/apps/schematic/api/schematic_api/models/node_page.py deleted file mode 100644 index 05aa7aee6..000000000 --- a/apps/schematic/api/schematic_api/models/node_page.py +++ /dev/null @@ -1,274 +0,0 @@ -# coding: utf-8 - -from __future__ import absolute_import -from datetime import date, datetime # noqa: F401 - -from typing import List, Dict # noqa: F401 - -from schematic_api.models.base_model_ import Model -from schematic_api.models.node import Node -from schematic_api import util - -from schematic_api.models.node import Node # noqa: E501 - - -class NodePage(Model): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - """ - - def __init__( - self, - number=None, - size=None, - total_elements=None, - total_pages=None, - has_next=None, - has_previous=None, - nodes=None, - ): # noqa: E501 - """NodePage - a model defined in OpenAPI - - :param number: The number of this NodePage. # noqa: E501 - :type number: int - :param size: The size of this NodePage. # noqa: E501 - :type size: int - :param total_elements: The total_elements of this NodePage. # noqa: E501 - :type total_elements: int - :param total_pages: The total_pages of this NodePage. # noqa: E501 - :type total_pages: int - :param has_next: The has_next of this NodePage. # noqa: E501 - :type has_next: bool - :param has_previous: The has_previous of this NodePage. # noqa: E501 - :type has_previous: bool - :param nodes: The nodes of this NodePage. # noqa: E501 - :type nodes: List[Node] - """ - self.openapi_types = { - "number": int, - "size": int, - "total_elements": int, - "total_pages": int, - "has_next": bool, - "has_previous": bool, - "nodes": List[Node], - } - - self.attribute_map = { - "number": "number", - "size": "size", - "total_elements": "totalElements", - "total_pages": "totalPages", - "has_next": "hasNext", - "has_previous": "hasPrevious", - "nodes": "nodes", - } - - self._number = number - self._size = size - self._total_elements = total_elements - self._total_pages = total_pages - self._has_next = has_next - self._has_previous = has_previous - self._nodes = nodes - - @classmethod - def from_dict(cls, dikt) -> "NodePage": - """Returns the dict as a model - - :param dikt: A dict. - :type: dict - :return: The NodePage of this NodePage. # noqa: E501 - :rtype: NodePage - """ - return util.deserialize_model(dikt, cls) - - @property - def number(self): - """Gets the number of this NodePage. - - The page number. # noqa: E501 - - :return: The number of this NodePage. - :rtype: int - """ - return self._number - - @number.setter - def number(self, number): - """Sets the number of this NodePage. - - The page number. # noqa: E501 - - :param number: The number of this NodePage. - :type number: int - """ - if number is None: - raise ValueError( - "Invalid value for `number`, must not be `None`" - ) # noqa: E501 - - self._number = number - - @property - def size(self): - """Gets the size of this NodePage. - - The number of items in a single page. # noqa: E501 - - :return: The size of this NodePage. - :rtype: int - """ - return self._size - - @size.setter - def size(self, size): - """Sets the size of this NodePage. - - The number of items in a single page. # noqa: E501 - - :param size: The size of this NodePage. - :type size: int - """ - if size is None: - raise ValueError( - "Invalid value for `size`, must not be `None`" - ) # noqa: E501 - - self._size = size - - @property - def total_elements(self): - """Gets the total_elements of this NodePage. - - Total number of elements in the result set. # noqa: E501 - - :return: The total_elements of this NodePage. - :rtype: int - """ - return self._total_elements - - @total_elements.setter - def total_elements(self, total_elements): - """Sets the total_elements of this NodePage. - - Total number of elements in the result set. # noqa: E501 - - :param total_elements: The total_elements of this NodePage. - :type total_elements: int - """ - if total_elements is None: - raise ValueError( - "Invalid value for `total_elements`, must not be `None`" - ) # noqa: E501 - - self._total_elements = total_elements - - @property - def total_pages(self): - """Gets the total_pages of this NodePage. - - Total number of pages in the result set. # noqa: E501 - - :return: The total_pages of this NodePage. - :rtype: int - """ - return self._total_pages - - @total_pages.setter - def total_pages(self, total_pages): - """Sets the total_pages of this NodePage. - - Total number of pages in the result set. # noqa: E501 - - :param total_pages: The total_pages of this NodePage. - :type total_pages: int - """ - if total_pages is None: - raise ValueError( - "Invalid value for `total_pages`, must not be `None`" - ) # noqa: E501 - - self._total_pages = total_pages - - @property - def has_next(self): - """Gets the has_next of this NodePage. - - Returns if there is a next page. # noqa: E501 - - :return: The has_next of this NodePage. - :rtype: bool - """ - return self._has_next - - @has_next.setter - def has_next(self, has_next): - """Sets the has_next of this NodePage. - - Returns if there is a next page. # noqa: E501 - - :param has_next: The has_next of this NodePage. - :type has_next: bool - """ - if has_next is None: - raise ValueError( - "Invalid value for `has_next`, must not be `None`" - ) # noqa: E501 - - self._has_next = has_next - - @property - def has_previous(self): - """Gets the has_previous of this NodePage. - - Returns if there is a previous page. # noqa: E501 - - :return: The has_previous of this NodePage. - :rtype: bool - """ - return self._has_previous - - @has_previous.setter - def has_previous(self, has_previous): - """Sets the has_previous of this NodePage. - - Returns if there is a previous page. # noqa: E501 - - :param has_previous: The has_previous of this NodePage. - :type has_previous: bool - """ - if has_previous is None: - raise ValueError( - "Invalid value for `has_previous`, must not be `None`" - ) # noqa: E501 - - self._has_previous = has_previous - - @property - def nodes(self): - """Gets the nodes of this NodePage. - - An array of nodes. # noqa: E501 - - :return: The nodes of this NodePage. - :rtype: List[Node] - """ - return self._nodes - - @nodes.setter - def nodes(self, nodes): - """Sets the nodes of this NodePage. - - An array of nodes. # noqa: E501 - - :param nodes: The nodes of this NodePage. - :type nodes: List[Node] - """ - if nodes is None: - raise ValueError( - "Invalid value for `nodes`, must not be `None`" - ) # noqa: E501 - - self._nodes = nodes diff --git a/apps/schematic/api/schematic_api/models/node_page_all_of.py b/apps/schematic/api/schematic_api/models/node_page_all_of.py deleted file mode 100644 index 049864ff5..000000000 --- a/apps/schematic/api/schematic_api/models/node_page_all_of.py +++ /dev/null @@ -1,69 +0,0 @@ -# coding: utf-8 - -from __future__ import absolute_import -from datetime import date, datetime # noqa: F401 - -from typing import List, Dict # noqa: F401 - -from schematic_api.models.base_model_ import Model -from schematic_api.models.node import Node -from schematic_api import util - -from schematic_api.models.node import Node # noqa: E501 - - -class NodePageAllOf(Model): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - """ - - def __init__(self, nodes=None): # noqa: E501 - """NodePageAllOf - a model defined in OpenAPI - - :param nodes: The nodes of this NodePageAllOf. # noqa: E501 - :type nodes: List[Node] - """ - self.openapi_types = {"nodes": List[Node]} - - self.attribute_map = {"nodes": "nodes"} - - self._nodes = nodes - - @classmethod - def from_dict(cls, dikt) -> "NodePageAllOf": - """Returns the dict as a model - - :param dikt: A dict. - :type: dict - :return: The NodePage_allOf of this NodePageAllOf. # noqa: E501 - :rtype: NodePageAllOf - """ - return util.deserialize_model(dikt, cls) - - @property - def nodes(self): - """Gets the nodes of this NodePageAllOf. - - An array of nodes. # noqa: E501 - - :return: The nodes of this NodePageAllOf. - :rtype: List[Node] - """ - return self._nodes - - @nodes.setter - def nodes(self, nodes): - """Sets the nodes of this NodePageAllOf. - - An array of nodes. # noqa: E501 - - :param nodes: The nodes of this NodePageAllOf. - :type nodes: List[Node] - """ - if nodes is None: - raise ValueError( - "Invalid value for `nodes`, must not be `None`" - ) # noqa: E501 - - self._nodes = nodes diff --git a/apps/schematic/api/schematic_api/models/node_property_array.py b/apps/schematic/api/schematic_api/models/node_property_array.py deleted file mode 100644 index bbf3a41cb..000000000 --- a/apps/schematic/api/schematic_api/models/node_property_array.py +++ /dev/null @@ -1,62 +0,0 @@ -# coding: utf-8 - -from __future__ import absolute_import -from datetime import date, datetime # noqa: F401 - -from typing import List, Dict # noqa: F401 - -from schematic_api.models.base_model_ import Model -from schematic_api import util - - -class NodePropertyArray(Model): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - """ - - def __init__(self, node_properties=None): # noqa: E501 - """NodePropertyArray - a model defined in OpenAPI - - :param node_properties: The node_properties of this NodePropertyArray. # noqa: E501 - :type node_properties: List[str] - """ - self.openapi_types = {"node_properties": List[str]} - - self.attribute_map = {"node_properties": "node_properties"} - - self._node_properties = node_properties - - @classmethod - def from_dict(cls, dikt) -> "NodePropertyArray": - """Returns the dict as a model - - :param dikt: A dict. - :type: dict - :return: The NodePropertyArray of this NodePropertyArray. # noqa: E501 - :rtype: NodePropertyArray - """ - return util.deserialize_model(dikt, cls) - - @property - def node_properties(self): - """Gets the node_properties of this NodePropertyArray. - - An array of node properties. # noqa: E501 - - :return: The node_properties of this NodePropertyArray. - :rtype: List[str] - """ - return self._node_properties - - @node_properties.setter - def node_properties(self, node_properties): - """Sets the node_properties of this NodePropertyArray. - - An array of node properties. # noqa: E501 - - :param node_properties: The node_properties of this NodePropertyArray. - :type node_properties: List[str] - """ - - self._node_properties = node_properties diff --git a/apps/schematic/api/schematic_api/models/page_metadata.py b/apps/schematic/api/schematic_api/models/page_metadata.py deleted file mode 100644 index ab4280355..000000000 --- a/apps/schematic/api/schematic_api/models/page_metadata.py +++ /dev/null @@ -1,238 +0,0 @@ -# coding: utf-8 - -from __future__ import absolute_import -from datetime import date, datetime # noqa: F401 - -from typing import List, Dict # noqa: F401 - -from schematic_api.models.base_model_ import Model -from schematic_api import util - - -class PageMetadata(Model): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - """ - - def __init__( - self, - number=None, - size=None, - total_elements=None, - total_pages=None, - has_next=None, - has_previous=None, - ): # noqa: E501 - """PageMetadata - a model defined in OpenAPI - - :param number: The number of this PageMetadata. # noqa: E501 - :type number: int - :param size: The size of this PageMetadata. # noqa: E501 - :type size: int - :param total_elements: The total_elements of this PageMetadata. # noqa: E501 - :type total_elements: int - :param total_pages: The total_pages of this PageMetadata. # noqa: E501 - :type total_pages: int - :param has_next: The has_next of this PageMetadata. # noqa: E501 - :type has_next: bool - :param has_previous: The has_previous of this PageMetadata. # noqa: E501 - :type has_previous: bool - """ - self.openapi_types = { - "number": int, - "size": int, - "total_elements": int, - "total_pages": int, - "has_next": bool, - "has_previous": bool, - } - - self.attribute_map = { - "number": "number", - "size": "size", - "total_elements": "totalElements", - "total_pages": "totalPages", - "has_next": "hasNext", - "has_previous": "hasPrevious", - } - - self._number = number - self._size = size - self._total_elements = total_elements - self._total_pages = total_pages - self._has_next = has_next - self._has_previous = has_previous - - @classmethod - def from_dict(cls, dikt) -> "PageMetadata": - """Returns the dict as a model - - :param dikt: A dict. - :type: dict - :return: The PageMetadata of this PageMetadata. # noqa: E501 - :rtype: PageMetadata - """ - return util.deserialize_model(dikt, cls) - - @property - def number(self): - """Gets the number of this PageMetadata. - - The page number. # noqa: E501 - - :return: The number of this PageMetadata. - :rtype: int - """ - return self._number - - @number.setter - def number(self, number): - """Sets the number of this PageMetadata. - - The page number. # noqa: E501 - - :param number: The number of this PageMetadata. - :type number: int - """ - if number is None: - raise ValueError( - "Invalid value for `number`, must not be `None`" - ) # noqa: E501 - - self._number = number - - @property - def size(self): - """Gets the size of this PageMetadata. - - The number of items in a single page. # noqa: E501 - - :return: The size of this PageMetadata. - :rtype: int - """ - return self._size - - @size.setter - def size(self, size): - """Sets the size of this PageMetadata. - - The number of items in a single page. # noqa: E501 - - :param size: The size of this PageMetadata. - :type size: int - """ - if size is None: - raise ValueError( - "Invalid value for `size`, must not be `None`" - ) # noqa: E501 - - self._size = size - - @property - def total_elements(self): - """Gets the total_elements of this PageMetadata. - - Total number of elements in the result set. # noqa: E501 - - :return: The total_elements of this PageMetadata. - :rtype: int - """ - return self._total_elements - - @total_elements.setter - def total_elements(self, total_elements): - """Sets the total_elements of this PageMetadata. - - Total number of elements in the result set. # noqa: E501 - - :param total_elements: The total_elements of this PageMetadata. - :type total_elements: int - """ - if total_elements is None: - raise ValueError( - "Invalid value for `total_elements`, must not be `None`" - ) # noqa: E501 - - self._total_elements = total_elements - - @property - def total_pages(self): - """Gets the total_pages of this PageMetadata. - - Total number of pages in the result set. # noqa: E501 - - :return: The total_pages of this PageMetadata. - :rtype: int - """ - return self._total_pages - - @total_pages.setter - def total_pages(self, total_pages): - """Sets the total_pages of this PageMetadata. - - Total number of pages in the result set. # noqa: E501 - - :param total_pages: The total_pages of this PageMetadata. - :type total_pages: int - """ - if total_pages is None: - raise ValueError( - "Invalid value for `total_pages`, must not be `None`" - ) # noqa: E501 - - self._total_pages = total_pages - - @property - def has_next(self): - """Gets the has_next of this PageMetadata. - - Returns if there is a next page. # noqa: E501 - - :return: The has_next of this PageMetadata. - :rtype: bool - """ - return self._has_next - - @has_next.setter - def has_next(self, has_next): - """Sets the has_next of this PageMetadata. - - Returns if there is a next page. # noqa: E501 - - :param has_next: The has_next of this PageMetadata. - :type has_next: bool - """ - if has_next is None: - raise ValueError( - "Invalid value for `has_next`, must not be `None`" - ) # noqa: E501 - - self._has_next = has_next - - @property - def has_previous(self): - """Gets the has_previous of this PageMetadata. - - Returns if there is a previous page. # noqa: E501 - - :return: The has_previous of this PageMetadata. - :rtype: bool - """ - return self._has_previous - - @has_previous.setter - def has_previous(self, has_previous): - """Sets the has_previous of this PageMetadata. - - Returns if there is a previous page. # noqa: E501 - - :param has_previous: The has_previous of this PageMetadata. - :type has_previous: bool - """ - if has_previous is None: - raise ValueError( - "Invalid value for `has_previous`, must not be `None`" - ) # noqa: E501 - - self._has_previous = has_previous diff --git a/apps/schematic/api/schematic_api/models/project_metadata.py b/apps/schematic/api/schematic_api/models/project_metadata.py deleted file mode 100644 index a85746a44..000000000 --- a/apps/schematic/api/schematic_api/models/project_metadata.py +++ /dev/null @@ -1,94 +0,0 @@ -# coding: utf-8 - -from __future__ import absolute_import -from datetime import date, datetime # noqa: F401 - -from typing import List, Dict # noqa: F401 - -from schematic_api.models.base_model_ import Model -from schematic_api import util - - -class ProjectMetadata(Model): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - """ - - def __init__(self, name=None, id=None): # noqa: E501 - """ProjectMetadata - a model defined in OpenAPI - - :param name: The name of this ProjectMetadata. # noqa: E501 - :type name: str - :param id: The id of this ProjectMetadata. # noqa: E501 - :type id: str - """ - self.openapi_types = {"name": str, "id": str} - - self.attribute_map = {"name": "name", "id": "id"} - - self._name = name - self._id = id - - @classmethod - def from_dict(cls, dikt) -> "ProjectMetadata": - """Returns the dict as a model - - :param dikt: A dict. - :type: dict - :return: The ProjectMetadata of this ProjectMetadata. # noqa: E501 - :rtype: ProjectMetadata - """ - return util.deserialize_model(dikt, cls) - - @property - def name(self): - """Gets the name of this ProjectMetadata. - - The name of the project. # noqa: E501 - - :return: The name of this ProjectMetadata. - :rtype: str - """ - return self._name - - @name.setter - def name(self, name): - """Sets the name of this ProjectMetadata. - - The name of the project. # noqa: E501 - - :param name: The name of this ProjectMetadata. - :type name: str - """ - if name is None: - raise ValueError( - "Invalid value for `name`, must not be `None`" - ) # noqa: E501 - - self._name = name - - @property - def id(self): - """Gets the id of this ProjectMetadata. - - The ID of the project. # noqa: E501 - - :return: The id of this ProjectMetadata. - :rtype: str - """ - return self._id - - @id.setter - def id(self, id): - """Sets the id of this ProjectMetadata. - - The ID of the project. # noqa: E501 - - :param id: The id of this ProjectMetadata. - :type id: str - """ - if id is None: - raise ValueError("Invalid value for `id`, must not be `None`") # noqa: E501 - - self._id = id diff --git a/apps/schematic/api/schematic_api/models/project_metadata_array.py b/apps/schematic/api/schematic_api/models/project_metadata_array.py deleted file mode 100644 index ca1dc537b..000000000 --- a/apps/schematic/api/schematic_api/models/project_metadata_array.py +++ /dev/null @@ -1,65 +0,0 @@ -# coding: utf-8 - -from __future__ import absolute_import -from datetime import date, datetime # noqa: F401 - -from typing import List, Dict # noqa: F401 - -from schematic_api.models.base_model_ import Model -from schematic_api.models.project_metadata import ProjectMetadata -from schematic_api import util - -from schematic_api.models.project_metadata import ProjectMetadata # noqa: E501 - - -class ProjectMetadataArray(Model): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - """ - - def __init__(self, projects=None): # noqa: E501 - """ProjectMetadataArray - a model defined in OpenAPI - - :param projects: The projects of this ProjectMetadataArray. # noqa: E501 - :type projects: List[ProjectMetadata] - """ - self.openapi_types = {"projects": List[ProjectMetadata]} - - self.attribute_map = {"projects": "projects"} - - self._projects = projects - - @classmethod - def from_dict(cls, dikt) -> "ProjectMetadataArray": - """Returns the dict as a model - - :param dikt: A dict. - :type: dict - :return: The ProjectMetadataArray of this ProjectMetadataArray. # noqa: E501 - :rtype: ProjectMetadataArray - """ - return util.deserialize_model(dikt, cls) - - @property - def projects(self): - """Gets the projects of this ProjectMetadataArray. - - An array of project metadata. # noqa: E501 - - :return: The projects of this ProjectMetadataArray. - :rtype: List[ProjectMetadata] - """ - return self._projects - - @projects.setter - def projects(self, projects): - """Sets the projects of this ProjectMetadataArray. - - An array of project metadata. # noqa: E501 - - :param projects: The projects of this ProjectMetadataArray. - :type projects: List[ProjectMetadata] - """ - - self._projects = projects diff --git a/apps/schematic/api/schematic_api/models/project_metadata_page.py b/apps/schematic/api/schematic_api/models/project_metadata_page.py deleted file mode 100644 index 0b31dc719..000000000 --- a/apps/schematic/api/schematic_api/models/project_metadata_page.py +++ /dev/null @@ -1,274 +0,0 @@ -# coding: utf-8 - -from __future__ import absolute_import -from datetime import date, datetime # noqa: F401 - -from typing import List, Dict # noqa: F401 - -from schematic_api.models.base_model_ import Model -from schematic_api.models.project_metadata import ProjectMetadata -from schematic_api import util - -from schematic_api.models.project_metadata import ProjectMetadata # noqa: E501 - - -class ProjectMetadataPage(Model): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - """ - - def __init__( - self, - number=None, - size=None, - total_elements=None, - total_pages=None, - has_next=None, - has_previous=None, - projects=None, - ): # noqa: E501 - """ProjectMetadataPage - a model defined in OpenAPI - - :param number: The number of this ProjectMetadataPage. # noqa: E501 - :type number: int - :param size: The size of this ProjectMetadataPage. # noqa: E501 - :type size: int - :param total_elements: The total_elements of this ProjectMetadataPage. # noqa: E501 - :type total_elements: int - :param total_pages: The total_pages of this ProjectMetadataPage. # noqa: E501 - :type total_pages: int - :param has_next: The has_next of this ProjectMetadataPage. # noqa: E501 - :type has_next: bool - :param has_previous: The has_previous of this ProjectMetadataPage. # noqa: E501 - :type has_previous: bool - :param projects: The projects of this ProjectMetadataPage. # noqa: E501 - :type projects: List[ProjectMetadata] - """ - self.openapi_types = { - "number": int, - "size": int, - "total_elements": int, - "total_pages": int, - "has_next": bool, - "has_previous": bool, - "projects": List[ProjectMetadata], - } - - self.attribute_map = { - "number": "number", - "size": "size", - "total_elements": "totalElements", - "total_pages": "totalPages", - "has_next": "hasNext", - "has_previous": "hasPrevious", - "projects": "projects", - } - - self._number = number - self._size = size - self._total_elements = total_elements - self._total_pages = total_pages - self._has_next = has_next - self._has_previous = has_previous - self._projects = projects - - @classmethod - def from_dict(cls, dikt) -> "ProjectMetadataPage": - """Returns the dict as a model - - :param dikt: A dict. - :type: dict - :return: The ProjectMetadataPage of this ProjectMetadataPage. # noqa: E501 - :rtype: ProjectMetadataPage - """ - return util.deserialize_model(dikt, cls) - - @property - def number(self): - """Gets the number of this ProjectMetadataPage. - - The page number. # noqa: E501 - - :return: The number of this ProjectMetadataPage. - :rtype: int - """ - return self._number - - @number.setter - def number(self, number): - """Sets the number of this ProjectMetadataPage. - - The page number. # noqa: E501 - - :param number: The number of this ProjectMetadataPage. - :type number: int - """ - if number is None: - raise ValueError( - "Invalid value for `number`, must not be `None`" - ) # noqa: E501 - - self._number = number - - @property - def size(self): - """Gets the size of this ProjectMetadataPage. - - The number of items in a single page. # noqa: E501 - - :return: The size of this ProjectMetadataPage. - :rtype: int - """ - return self._size - - @size.setter - def size(self, size): - """Sets the size of this ProjectMetadataPage. - - The number of items in a single page. # noqa: E501 - - :param size: The size of this ProjectMetadataPage. - :type size: int - """ - if size is None: - raise ValueError( - "Invalid value for `size`, must not be `None`" - ) # noqa: E501 - - self._size = size - - @property - def total_elements(self): - """Gets the total_elements of this ProjectMetadataPage. - - Total number of elements in the result set. # noqa: E501 - - :return: The total_elements of this ProjectMetadataPage. - :rtype: int - """ - return self._total_elements - - @total_elements.setter - def total_elements(self, total_elements): - """Sets the total_elements of this ProjectMetadataPage. - - Total number of elements in the result set. # noqa: E501 - - :param total_elements: The total_elements of this ProjectMetadataPage. - :type total_elements: int - """ - if total_elements is None: - raise ValueError( - "Invalid value for `total_elements`, must not be `None`" - ) # noqa: E501 - - self._total_elements = total_elements - - @property - def total_pages(self): - """Gets the total_pages of this ProjectMetadataPage. - - Total number of pages in the result set. # noqa: E501 - - :return: The total_pages of this ProjectMetadataPage. - :rtype: int - """ - return self._total_pages - - @total_pages.setter - def total_pages(self, total_pages): - """Sets the total_pages of this ProjectMetadataPage. - - Total number of pages in the result set. # noqa: E501 - - :param total_pages: The total_pages of this ProjectMetadataPage. - :type total_pages: int - """ - if total_pages is None: - raise ValueError( - "Invalid value for `total_pages`, must not be `None`" - ) # noqa: E501 - - self._total_pages = total_pages - - @property - def has_next(self): - """Gets the has_next of this ProjectMetadataPage. - - Returns if there is a next page. # noqa: E501 - - :return: The has_next of this ProjectMetadataPage. - :rtype: bool - """ - return self._has_next - - @has_next.setter - def has_next(self, has_next): - """Sets the has_next of this ProjectMetadataPage. - - Returns if there is a next page. # noqa: E501 - - :param has_next: The has_next of this ProjectMetadataPage. - :type has_next: bool - """ - if has_next is None: - raise ValueError( - "Invalid value for `has_next`, must not be `None`" - ) # noqa: E501 - - self._has_next = has_next - - @property - def has_previous(self): - """Gets the has_previous of this ProjectMetadataPage. - - Returns if there is a previous page. # noqa: E501 - - :return: The has_previous of this ProjectMetadataPage. - :rtype: bool - """ - return self._has_previous - - @has_previous.setter - def has_previous(self, has_previous): - """Sets the has_previous of this ProjectMetadataPage. - - Returns if there is a previous page. # noqa: E501 - - :param has_previous: The has_previous of this ProjectMetadataPage. - :type has_previous: bool - """ - if has_previous is None: - raise ValueError( - "Invalid value for `has_previous`, must not be `None`" - ) # noqa: E501 - - self._has_previous = has_previous - - @property - def projects(self): - """Gets the projects of this ProjectMetadataPage. - - An array of project metadata. # noqa: E501 - - :return: The projects of this ProjectMetadataPage. - :rtype: List[ProjectMetadata] - """ - return self._projects - - @projects.setter - def projects(self, projects): - """Sets the projects of this ProjectMetadataPage. - - An array of project metadata. # noqa: E501 - - :param projects: The projects of this ProjectMetadataPage. - :type projects: List[ProjectMetadata] - """ - if projects is None: - raise ValueError( - "Invalid value for `projects`, must not be `None`" - ) # noqa: E501 - - self._projects = projects diff --git a/apps/schematic/api/schematic_api/models/project_metadata_page_all_of.py b/apps/schematic/api/schematic_api/models/project_metadata_page_all_of.py deleted file mode 100644 index 597d536a4..000000000 --- a/apps/schematic/api/schematic_api/models/project_metadata_page_all_of.py +++ /dev/null @@ -1,69 +0,0 @@ -# coding: utf-8 - -from __future__ import absolute_import -from datetime import date, datetime # noqa: F401 - -from typing import List, Dict # noqa: F401 - -from schematic_api.models.base_model_ import Model -from schematic_api.models.project_metadata import ProjectMetadata -from schematic_api import util - -from schematic_api.models.project_metadata import ProjectMetadata # noqa: E501 - - -class ProjectMetadataPageAllOf(Model): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - """ - - def __init__(self, projects=None): # noqa: E501 - """ProjectMetadataPageAllOf - a model defined in OpenAPI - - :param projects: The projects of this ProjectMetadataPageAllOf. # noqa: E501 - :type projects: List[ProjectMetadata] - """ - self.openapi_types = {"projects": List[ProjectMetadata]} - - self.attribute_map = {"projects": "projects"} - - self._projects = projects - - @classmethod - def from_dict(cls, dikt) -> "ProjectMetadataPageAllOf": - """Returns the dict as a model - - :param dikt: A dict. - :type: dict - :return: The ProjectMetadataPage_allOf of this ProjectMetadataPageAllOf. # noqa: E501 - :rtype: ProjectMetadataPageAllOf - """ - return util.deserialize_model(dikt, cls) - - @property - def projects(self): - """Gets the projects of this ProjectMetadataPageAllOf. - - An array of project metadata. # noqa: E501 - - :return: The projects of this ProjectMetadataPageAllOf. - :rtype: List[ProjectMetadata] - """ - return self._projects - - @projects.setter - def projects(self, projects): - """Sets the projects of this ProjectMetadataPageAllOf. - - An array of project metadata. # noqa: E501 - - :param projects: The projects of this ProjectMetadataPageAllOf. - :type projects: List[ProjectMetadata] - """ - if projects is None: - raise ValueError( - "Invalid value for `projects`, must not be `None`" - ) # noqa: E501 - - self._projects = projects diff --git a/apps/schematic/api/schematic_api/models/validation_rule.py b/apps/schematic/api/schematic_api/models/validation_rule.py deleted file mode 100644 index 6c9aabbc4..000000000 --- a/apps/schematic/api/schematic_api/models/validation_rule.py +++ /dev/null @@ -1,66 +0,0 @@ -# coding: utf-8 - -from __future__ import absolute_import -from datetime import date, datetime # noqa: F401 - -from typing import List, Dict # noqa: F401 - -from schematic_api.models.base_model_ import Model -from schematic_api import util - - -class ValidationRule(Model): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - """ - - def __init__(self, name=None): # noqa: E501 - """ValidationRule - a model defined in OpenAPI - - :param name: The name of this ValidationRule. # noqa: E501 - :type name: str - """ - self.openapi_types = {"name": str} - - self.attribute_map = {"name": "name"} - - self._name = name - - @classmethod - def from_dict(cls, dikt) -> "ValidationRule": - """Returns the dict as a model - - :param dikt: A dict. - :type: dict - :return: The ValidationRule of this ValidationRule. # noqa: E501 - :rtype: ValidationRule - """ - return util.deserialize_model(dikt, cls) - - @property - def name(self): - """Gets the name of this ValidationRule. - - The name of the rule, along with the arguments for the given rule. # noqa: E501 - - :return: The name of this ValidationRule. - :rtype: str - """ - return self._name - - @name.setter - def name(self, name): - """Sets the name of this ValidationRule. - - The name of the rule, along with the arguments for the given rule. # noqa: E501 - - :param name: The name of this ValidationRule. - :type name: str - """ - if name is None: - raise ValueError( - "Invalid value for `name`, must not be `None`" - ) # noqa: E501 - - self._name = name diff --git a/apps/schematic/api/schematic_api/models/validation_rule_array.py b/apps/schematic/api/schematic_api/models/validation_rule_array.py deleted file mode 100644 index af5f9fb3e..000000000 --- a/apps/schematic/api/schematic_api/models/validation_rule_array.py +++ /dev/null @@ -1,65 +0,0 @@ -# coding: utf-8 - -from __future__ import absolute_import -from datetime import date, datetime # noqa: F401 - -from typing import List, Dict # noqa: F401 - -from schematic_api.models.base_model_ import Model -from schematic_api.models.validation_rule import ValidationRule -from schematic_api import util - -from schematic_api.models.validation_rule import ValidationRule # noqa: E501 - - -class ValidationRuleArray(Model): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - """ - - def __init__(self, validation_rules=None): # noqa: E501 - """ValidationRuleArray - a model defined in OpenAPI - - :param validation_rules: The validation_rules of this ValidationRuleArray. # noqa: E501 - :type validation_rules: List[ValidationRule] - """ - self.openapi_types = {"validation_rules": List[ValidationRule]} - - self.attribute_map = {"validation_rules": "validation_rules"} - - self._validation_rules = validation_rules - - @classmethod - def from_dict(cls, dikt) -> "ValidationRuleArray": - """Returns the dict as a model - - :param dikt: A dict. - :type: dict - :return: The ValidationRuleArray of this ValidationRuleArray. # noqa: E501 - :rtype: ValidationRuleArray - """ - return util.deserialize_model(dikt, cls) - - @property - def validation_rules(self): - """Gets the validation_rules of this ValidationRuleArray. - - An array of validation rules. # noqa: E501 - - :return: The validation_rules of this ValidationRuleArray. - :rtype: List[ValidationRule] - """ - return self._validation_rules - - @validation_rules.setter - def validation_rules(self, validation_rules): - """Sets the validation_rules of this ValidationRuleArray. - - An array of validation rules. # noqa: E501 - - :param validation_rules: The validation_rules of this ValidationRuleArray. - :type validation_rules: List[ValidationRule] - """ - - self._validation_rules = validation_rules diff --git a/apps/schematic/api/schematic_api/mypy.ini b/apps/schematic/api/schematic_api/mypy.ini deleted file mode 100644 index 1215375ed..000000000 --- a/apps/schematic/api/schematic_api/mypy.ini +++ /dev/null @@ -1,2 +0,0 @@ -[mypy] -ignore_missing_imports = True \ No newline at end of file diff --git a/apps/schematic/api/schematic_api/openapi/openapi.yaml b/apps/schematic/api/schematic_api/openapi/openapi.yaml deleted file mode 100644 index 37f464518..000000000 --- a/apps/schematic/api/schematic_api/openapi/openapi.yaml +++ /dev/null @@ -1,3753 +0,0 @@ -openapi: 3.0.3 -info: - contact: - name: Support - url: https://github.com/Sage-Bionetworks/sage-monorepo - license: - name: Apache 2.0 - url: https://github.com/Sage-Bionetworks/sage-monorepo - title: Schematic REST API - version: 0.1.0 - x-logo: - url: https://Sage-Bionetworks.github.io/rocc-schemas/logo.png -servers: - - url: http://localhost/api/v1 -tags: - - description: Operations about storages. - name: Storage -paths: - /assetTypes/{assetType}/assetViews/{assetViewId}/csv: - get: - description: Gets the asset view table in csv file form - operationId: get_asset_view_csv - parameters: - - description: ID of view listing all project data assets. E.g. for Synapse - this would be the Synapse ID of the fileview listing all data assets for - a given project - explode: false - in: path - name: assetViewId - required: true - schema: - $ref: '#/components/schemas/AssetViewId' - style: simple - - description: 'Type of asset, such as Synapse' - explode: false - in: path - name: assetType - required: true - schema: - $ref: '#/components/schemas/AssetType' - style: simple - responses: - '200': - content: - text/csv: - schema: - type: string - description: Success - '400': - content: - application/problem+json: - schema: - $ref: '#/components/schemas/BasicError' - description: Invalid request - '401': - content: - application/problem+json: - schema: - $ref: '#/components/schemas/BasicError' - description: Unauthorized - '403': - content: - application/problem+json: - schema: - $ref: '#/components/schemas/BasicError' - description: Unauthorized - '404': - content: - application/problem+json: - schema: - $ref: '#/components/schemas/BasicError' - description: The specified resource was not found - '500': - content: - application/problem+json: - schema: - $ref: '#/components/schemas/BasicError' - description: The request cannot be fulfilled due to an unexpected server - error - security: - - bearerAuth: [] - summary: Gets the asset view table in csv file form - tags: - - Storage - x-openapi-router-controller: schematic_api.controllers.storage_controller - /assetTypes/{assetType}/assetViews/{assetViewId}/json: - get: - description: Gets the asset view table in json form - operationId: get_asset_view_json - parameters: - - description: ID of view listing all project data assets. E.g. for Synapse - this would be the Synapse ID of the fileview listing all data assets for - a given project - explode: false - in: path - name: assetViewId - required: true - schema: - $ref: '#/components/schemas/AssetViewId' - style: simple - - description: 'Type of asset, such as Synapse' - explode: false - in: path - name: assetType - required: true - schema: - $ref: '#/components/schemas/AssetType' - style: simple - responses: - '200': - content: - application/json: - schema: - $ref: '#/components/schemas/AssetViewJson' - description: Success - '400': - content: - application/problem+json: - schema: - $ref: '#/components/schemas/BasicError' - description: Invalid request - '401': - content: - application/problem+json: - schema: - $ref: '#/components/schemas/BasicError' - description: Unauthorized - '403': - content: - application/problem+json: - schema: - $ref: '#/components/schemas/BasicError' - description: Unauthorized - '404': - content: - application/problem+json: - schema: - $ref: '#/components/schemas/BasicError' - description: The specified resource was not found - '500': - content: - application/problem+json: - schema: - $ref: '#/components/schemas/BasicError' - description: The request cannot be fulfilled due to an unexpected server - error - security: - - bearerAuth: [] - summary: Gets the asset view table in json form - tags: - - Storage - x-openapi-router-controller: schematic_api.controllers.storage_controller - /assetTypes/{assetType}/assetViews/{assetViewId}/projectMetadataArray: - get: - description: Gets all storage projects the current user has access to. - operationId: get_project_metadata_array - parameters: - - description: ID of view listing all project data assets. E.g. for Synapse - this would be the Synapse ID of the fileview listing all data assets for - a given project - explode: false - in: path - name: assetViewId - required: true - schema: - $ref: '#/components/schemas/AssetViewId' - style: simple - - description: 'Type of asset, such as Synapse' - explode: false - in: path - name: assetType - required: true - schema: - $ref: '#/components/schemas/AssetType' - style: simple - responses: - '200': - content: - application/json: - schema: - $ref: '#/components/schemas/ProjectMetadataArray' - description: Success - '400': - content: - application/problem+json: - schema: - $ref: '#/components/schemas/BasicError' - description: Invalid request - '401': - content: - application/problem+json: - schema: - $ref: '#/components/schemas/BasicError' - description: Unauthorized - '403': - content: - application/problem+json: - schema: - $ref: '#/components/schemas/BasicError' - description: Unauthorized - '404': - content: - application/problem+json: - schema: - $ref: '#/components/schemas/BasicError' - description: The specified resource was not found - '500': - content: - application/problem+json: - schema: - $ref: '#/components/schemas/BasicError' - description: The request cannot be fulfilled due to an unexpected server - error - security: - - bearerAuth: [] - summary: Gets all storage projects the current user has access to. - tags: - - Storage - x-openapi-router-controller: schematic_api.controllers.storage_controller - /assetTypes/{assetType}/assetViews/{assetViewId}/projectMetadataPage: - get: - description: Gets all storage projects the current user has access to. - operationId: get_project_metadata_page - parameters: - - description: ID of view listing all project data assets. E.g. for Synapse - this would be the Synapse ID of the fileview listing all data assets for - a given project - explode: false - in: path - name: assetViewId - required: true - schema: - $ref: '#/components/schemas/AssetViewId' - style: simple - - description: 'Type of asset, such as Synapse' - explode: false - in: path - name: assetType - required: true - schema: - $ref: '#/components/schemas/AssetType' - style: simple - - description: The page number to get for a paginated query - explode: true - in: query - name: pageNumber - required: false - schema: - default: 1 - minimum: 1 - type: integer - style: form - - description: "The maximum number of items per page (up to 100,000) for paginated\ - \ endpoints" - explode: true - in: query - name: pageMaxItems - required: false - schema: - default: 100000 - minimum: 1 - type: integer - style: form - responses: - '200': - content: - application/json: - schema: - $ref: '#/components/schemas/ProjectMetadataPage' - description: Success - '400': - content: - application/problem+json: - schema: - $ref: '#/components/schemas/BasicError' - description: Invalid request - '401': - content: - application/problem+json: - schema: - $ref: '#/components/schemas/BasicError' - description: Unauthorized - '403': - content: - application/problem+json: - schema: - $ref: '#/components/schemas/BasicError' - description: Unauthorized - '404': - content: - application/problem+json: - schema: - $ref: '#/components/schemas/BasicError' - description: The specified resource was not found - '500': - content: - application/problem+json: - schema: - $ref: '#/components/schemas/BasicError' - description: The request cannot be fulfilled due to an unexpected server - error - security: - - bearerAuth: [] - summary: Gets all storage projects the current user has access to. - tags: - - Storage - x-openapi-router-controller: schematic_api.controllers.storage_controller - /assetTypes/{assetType}/datasets/{datasetId}/fileMetadataArray: - get: - description: Gets all files associated with a dataset. - operationId: get_dataset_file_metadata_array - parameters: - - description: The ID of a dataset. - explode: false - in: path - name: datasetId - required: true - schema: - $ref: '#/components/schemas/DatasetId' - style: simple - - description: 'Type of asset, such as Synapse' - explode: false - in: path - name: assetType - required: true - schema: - $ref: '#/components/schemas/AssetType' - style: simple - - description: A list of file names used to filter the output. - explode: true - in: query - name: fileNames - required: false - schema: - $ref: '#/components/schemas/FileNames' - style: form - - description: "Whether or not to return the full path of output, or just the\ - \ basename." - explode: true - in: query - name: useFullFilePath - required: false - schema: - $ref: '#/components/schemas/UseFullFilePath' - style: form - - description: ID of view listing all project data assets. E.g. for Synapse - this would be the Synapse ID of the fileview listing all data assets for - a given project - explode: true - in: query - name: assetViewId - required: true - schema: - $ref: '#/components/schemas/AssetViewId' - style: form - responses: - '200': - content: - application/json: - schema: - $ref: '#/components/schemas/FileMetadataArray' - description: Success - '400': - content: - application/problem+json: - schema: - $ref: '#/components/schemas/BasicError' - description: Invalid request - '401': - content: - application/problem+json: - schema: - $ref: '#/components/schemas/BasicError' - description: Unauthorized - '403': - content: - application/problem+json: - schema: - $ref: '#/components/schemas/BasicError' - description: Unauthorized - '404': - content: - application/problem+json: - schema: - $ref: '#/components/schemas/BasicError' - description: The specified resource was not found - '500': - content: - application/problem+json: - schema: - $ref: '#/components/schemas/BasicError' - description: The request cannot be fulfilled due to an unexpected server - error - security: - - bearerAuth: [] - summary: Gets all files associated with a dataset. - tags: - - Storage - x-openapi-router-controller: schematic_api.controllers.storage_controller - /assetTypes/{assetType}/datasets/{datasetId}/fileMetadataPage: - get: - description: Gets all files associated with a dataset. - operationId: get_dataset_file_metadata_page - parameters: - - description: The ID of a dataset. - explode: false - in: path - name: datasetId - required: true - schema: - $ref: '#/components/schemas/DatasetId' - style: simple - - description: 'Type of asset, such as Synapse' - explode: false - in: path - name: assetType - required: true - schema: - $ref: '#/components/schemas/AssetType' - style: simple - - description: A list of file names used to filter the output. - explode: true - in: query - name: fileNames - required: false - schema: - $ref: '#/components/schemas/FileNames' - style: form - - description: "Whether or not to return the full path of output, or just the\ - \ basename." - explode: true - in: query - name: useFullFilePath - required: false - schema: - $ref: '#/components/schemas/UseFullFilePath' - style: form - - description: ID of view listing all project data assets. E.g. for Synapse - this would be the Synapse ID of the fileview listing all data assets for - a given project - explode: true - in: query - name: assetViewId - required: true - schema: - $ref: '#/components/schemas/AssetViewId' - style: form - - description: The page number to get for a paginated query - explode: true - in: query - name: pageNumber - required: false - schema: - default: 1 - minimum: 1 - type: integer - style: form - - description: "The maximum number of items per page (up to 100,000) for paginated\ - \ endpoints" - explode: true - in: query - name: pageMaxItems - required: false - schema: - default: 100000 - minimum: 1 - type: integer - style: form - responses: - '200': - content: - application/json: - schema: - $ref: '#/components/schemas/FileMetadataPage' - description: Success - '400': - content: - application/problem+json: - schema: - $ref: '#/components/schemas/BasicError' - description: Invalid request - '401': - content: - application/problem+json: - schema: - $ref: '#/components/schemas/BasicError' - description: Unauthorized - '403': - content: - application/problem+json: - schema: - $ref: '#/components/schemas/BasicError' - description: Unauthorized - '404': - content: - application/problem+json: - schema: - $ref: '#/components/schemas/BasicError' - description: The specified resource was not found - '500': - content: - application/problem+json: - schema: - $ref: '#/components/schemas/BasicError' - description: The request cannot be fulfilled due to an unexpected server - error - security: - - bearerAuth: [] - summary: Gets all files associated with a dataset. - tags: - - Storage - x-openapi-router-controller: schematic_api.controllers.storage_controller - /assetTypes/{assetType}/datasets/{datasetId}/manifestCsv: - get: - description: Gets the manifest in csv form - operationId: get_dataset_manifest_csv - parameters: - - description: 'Type of asset, such as Synapse' - explode: false - in: path - name: assetType - required: true - schema: - $ref: '#/components/schemas/AssetType' - style: simple - - description: The ID of a dataset. - explode: false - in: path - name: datasetId - required: true - schema: - $ref: '#/components/schemas/DatasetId' - style: simple - - description: ID of view listing all project data assets. E.g. for Synapse - this would be the Synapse ID of the fileview listing all data assets for - a given project - explode: true - in: query - name: assetViewId - required: true - schema: - $ref: '#/components/schemas/AssetViewId' - style: form - responses: - '200': - content: - text/csv: - schema: - type: string - description: Success - '400': - content: - application/problem+json: - schema: - $ref: '#/components/schemas/BasicError' - description: Invalid request - '401': - content: - application/problem+json: - schema: - $ref: '#/components/schemas/BasicError' - description: Unauthorized - '403': - content: - application/problem+json: - schema: - $ref: '#/components/schemas/BasicError' - description: Unauthorized - '404': - content: - application/problem+json: - schema: - $ref: '#/components/schemas/BasicError' - description: The specified resource was not found - '500': - content: - application/problem+json: - schema: - $ref: '#/components/schemas/BasicError' - description: The request cannot be fulfilled due to an unexpected server - error - security: - - bearerAuth: [] - summary: Gets the manifest in csv form - tags: - - Storage - x-openapi-router-controller: schematic_api.controllers.storage_controller - /assetTypes/{assetType}/datasets/{datasetId}/manifestJson: - get: - description: Gets the manifest in json form - operationId: get_dataset_manifest_json - parameters: - - description: 'Type of asset, such as Synapse' - explode: false - in: path - name: assetType - required: true - schema: - $ref: '#/components/schemas/AssetType' - style: simple - - description: The ID of a dataset. - explode: false - in: path - name: datasetId - required: true - schema: - $ref: '#/components/schemas/DatasetId' - style: simple - - description: ID of view listing all project data assets. E.g. for Synapse - this would be the Synapse ID of the fileview listing all data assets for - a given project - explode: true - in: query - name: assetViewId - required: true - schema: - $ref: '#/components/schemas/AssetViewId' - style: form - responses: - '200': - content: - application/json: - schema: - $ref: '#/components/schemas/ManifestJson' - description: Success - '400': - content: - application/problem+json: - schema: - $ref: '#/components/schemas/BasicError' - description: Invalid request - '401': - content: - application/problem+json: - schema: - $ref: '#/components/schemas/BasicError' - description: Unauthorized - '403': - content: - application/problem+json: - schema: - $ref: '#/components/schemas/BasicError' - description: Unauthorized - '404': - content: - application/problem+json: - schema: - $ref: '#/components/schemas/BasicError' - description: The specified resource was not found - '500': - content: - application/problem+json: - schema: - $ref: '#/components/schemas/BasicError' - description: The request cannot be fulfilled due to an unexpected server - error - security: - - bearerAuth: [] - summary: Gets the manifest in json form - tags: - - Storage - x-openapi-router-controller: schematic_api.controllers.storage_controller - /assetTypes/{assetType}/manifests/{manifestId}/csv: - get: - description: Gets the manifest in csv form - operationId: get_manifest_csv - parameters: - - description: 'Type of asset, such as Synapse' - explode: false - in: path - name: assetType - required: true - schema: - $ref: '#/components/schemas/AssetType' - style: simple - - description: ID of a manifest - explode: false - in: path - name: manifestId - required: true - schema: - $ref: '#/components/schemas/ManifestId' - style: simple - responses: - '200': - content: - text/csv: - schema: - type: string - description: Success - '400': - content: - application/problem+json: - schema: - $ref: '#/components/schemas/BasicError' - description: Invalid request - '401': - content: - application/problem+json: - schema: - $ref: '#/components/schemas/BasicError' - description: Unauthorized - '403': - content: - application/problem+json: - schema: - $ref: '#/components/schemas/BasicError' - description: Unauthorized - '404': - content: - application/problem+json: - schema: - $ref: '#/components/schemas/BasicError' - description: The specified resource was not found - '500': - content: - application/problem+json: - schema: - $ref: '#/components/schemas/BasicError' - description: The request cannot be fulfilled due to an unexpected server - error - security: - - bearerAuth: [] - summary: Gets the manifest in csv form - tags: - - Storage - x-openapi-router-controller: schematic_api.controllers.storage_controller - /assetTypes/{assetType}/manifests/{manifestId}/json: - get: - description: Gets the manifest in json form - operationId: get_manifest_json - parameters: - - description: 'Type of asset, such as Synapse' - explode: false - in: path - name: assetType - required: true - schema: - $ref: '#/components/schemas/AssetType' - style: simple - - description: ID of a manifest - explode: false - in: path - name: manifestId - required: true - schema: - $ref: '#/components/schemas/ManifestId' - style: simple - responses: - '200': - content: - application/json: - schema: - $ref: '#/components/schemas/ManifestJson' - description: Success - '400': - content: - application/problem+json: - schema: - $ref: '#/components/schemas/BasicError' - description: Invalid request - '401': - content: - application/problem+json: - schema: - $ref: '#/components/schemas/BasicError' - description: Unauthorized - '403': - content: - application/problem+json: - schema: - $ref: '#/components/schemas/BasicError' - description: Unauthorized - '404': - content: - application/problem+json: - schema: - $ref: '#/components/schemas/BasicError' - description: The specified resource was not found - '500': - content: - application/problem+json: - schema: - $ref: '#/components/schemas/BasicError' - description: The request cannot be fulfilled due to an unexpected server - error - security: - - bearerAuth: [] - summary: Gets the manifest in json form - tags: - - Storage - x-openapi-router-controller: schematic_api.controllers.storage_controller - /assetTypes/{assetType}/projects/{projectId}/datasetMetadataArray: - get: - description: Gets all dataset meatdata in folder under a given storage project - that the current user has access to. - operationId: get_project_dataset_metadata_array - parameters: - - description: The Synapse ID of a storage project. - explode: false - in: path - name: projectId - required: true - schema: - $ref: '#/components/schemas/ProjectId' - style: simple - - description: 'Type of asset, such as Synapse' - explode: false - in: path - name: assetType - required: true - schema: - $ref: '#/components/schemas/AssetType' - style: simple - - description: ID of view listing all project data assets. E.g. for Synapse - this would be the Synapse ID of the fileview listing all data assets for - a given project - explode: true - in: query - name: assetViewId - required: true - schema: - $ref: '#/components/schemas/AssetViewId' - style: form - responses: - '200': - content: - application/json: - schema: - $ref: '#/components/schemas/DatasetMetadataArray' - description: Success - '400': - content: - application/problem+json: - schema: - $ref: '#/components/schemas/BasicError' - description: Invalid request - '401': - content: - application/problem+json: - schema: - $ref: '#/components/schemas/BasicError' - description: Unauthorized - '403': - content: - application/problem+json: - schema: - $ref: '#/components/schemas/BasicError' - description: Unauthorized - '404': - content: - application/problem+json: - schema: - $ref: '#/components/schemas/BasicError' - description: The specified resource was not found - '500': - content: - application/problem+json: - schema: - $ref: '#/components/schemas/BasicError' - description: The request cannot be fulfilled due to an unexpected server - error - security: - - bearerAuth: [] - summary: Gets all dataset metadata in folder under a given storage project that - the current user has access to. - tags: - - Storage - x-openapi-router-controller: schematic_api.controllers.storage_controller - /assetTypes/{assetType}/projects/{projectId}/datasetMetadataPage: - get: - description: Gets a page of dataset meatdata in folder under a given storage - project that the current user has access to. - operationId: get_project_dataset_metadata_page - parameters: - - description: The Synapse ID of a storage project. - explode: false - in: path - name: projectId - required: true - schema: - $ref: '#/components/schemas/ProjectId' - style: simple - - description: 'Type of asset, such as Synapse' - explode: false - in: path - name: assetType - required: true - schema: - $ref: '#/components/schemas/AssetType' - style: simple - - description: ID of view listing all project data assets. E.g. for Synapse - this would be the Synapse ID of the fileview listing all data assets for - a given project - explode: true - in: query - name: assetViewId - required: true - schema: - $ref: '#/components/schemas/AssetViewId' - style: form - - description: The page number to get for a paginated query - explode: true - in: query - name: pageNumber - required: false - schema: - default: 1 - minimum: 1 - type: integer - style: form - - description: "The maximum number of items per page (up to 100,000) for paginated\ - \ endpoints" - explode: true - in: query - name: pageMaxItems - required: false - schema: - default: 100000 - minimum: 1 - type: integer - style: form - responses: - '200': - content: - application/json: - schema: - $ref: '#/components/schemas/DatasetMetadataPage' - description: Success - '400': - content: - application/problem+json: - schema: - $ref: '#/components/schemas/BasicError' - description: Invalid request - '401': - content: - application/problem+json: - schema: - $ref: '#/components/schemas/BasicError' - description: Unauthorized - '403': - content: - application/problem+json: - schema: - $ref: '#/components/schemas/BasicError' - description: Unauthorized - '404': - content: - application/problem+json: - schema: - $ref: '#/components/schemas/BasicError' - description: The specified resource was not found - '500': - content: - application/problem+json: - schema: - $ref: '#/components/schemas/BasicError' - description: The request cannot be fulfilled due to an unexpected server - error - security: - - bearerAuth: [] - summary: Gets a page of dataset metadata in folder under a given storage project - that the current user has access to. - tags: - - Storage - x-openapi-router-controller: schematic_api.controllers.storage_controller - /assetTypes/{assetType}/projects/{projectId}/manifestMetadataArray: - get: - description: Gets all manifests in a project folder that the current user has - access to. - operationId: get_project_manifest_metadata_array - parameters: - - description: The Synapse ID of a storage project. - explode: false - in: path - name: projectId - required: true - schema: - $ref: '#/components/schemas/ProjectId' - style: simple - - description: 'Type of asset, such as Synapse' - explode: false - in: path - name: assetType - required: true - schema: - $ref: '#/components/schemas/AssetType' - style: simple - - description: ID of view listing all project data assets. E.g. for Synapse - this would be the Synapse ID of the fileview listing all data assets for - a given project - explode: true - in: query - name: assetViewId - required: true - schema: - $ref: '#/components/schemas/AssetViewId' - style: form - responses: - '200': - content: - application/json: - schema: - $ref: '#/components/schemas/ManifestMetadataArray' - description: Success - '400': - content: - application/problem+json: - schema: - $ref: '#/components/schemas/BasicError' - description: Invalid request - '401': - content: - application/problem+json: - schema: - $ref: '#/components/schemas/BasicError' - description: Unauthorized - '403': - content: - application/problem+json: - schema: - $ref: '#/components/schemas/BasicError' - description: Unauthorized - '404': - content: - application/problem+json: - schema: - $ref: '#/components/schemas/BasicError' - description: The specified resource was not found - '500': - content: - application/problem+json: - schema: - $ref: '#/components/schemas/BasicError' - description: The request cannot be fulfilled due to an unexpected server - error - security: - - bearerAuth: [] - summary: Gets all manifests in a project folder that users have access to - tags: - - Storage - x-openapi-router-controller: schematic_api.controllers.storage_controller - /assetTypes/{assetType}/projects/{projectId}/manifestMetadataPage: - get: - description: Gets all manifests in a project folder that the current user has - access to. - operationId: get_project_manifest_metadata_page - parameters: - - description: The Synapse ID of a storage project. - explode: false - in: path - name: projectId - required: true - schema: - $ref: '#/components/schemas/ProjectId' - style: simple - - description: 'Type of asset, such as Synapse' - explode: false - in: path - name: assetType - required: true - schema: - $ref: '#/components/schemas/AssetType' - style: simple - - description: ID of view listing all project data assets. E.g. for Synapse - this would be the Synapse ID of the fileview listing all data assets for - a given project - explode: true - in: query - name: assetViewId - required: true - schema: - $ref: '#/components/schemas/AssetViewId' - style: form - - description: The page number to get for a paginated query - explode: true - in: query - name: pageNumber - required: false - schema: - default: 1 - minimum: 1 - type: integer - style: form - - description: "The maximum number of items per page (up to 100,000) for paginated\ - \ endpoints" - explode: true - in: query - name: pageMaxItems - required: false - schema: - default: 100000 - minimum: 1 - type: integer - style: form - responses: - '200': - content: - application/json: - schema: - $ref: '#/components/schemas/ManifestMetadataPage' - description: Success - '400': - content: - application/problem+json: - schema: - $ref: '#/components/schemas/BasicError' - description: Invalid request - '401': - content: - application/problem+json: - schema: - $ref: '#/components/schemas/BasicError' - description: Unauthorized - '403': - content: - application/problem+json: - schema: - $ref: '#/components/schemas/BasicError' - description: Unauthorized - '404': - content: - application/problem+json: - schema: - $ref: '#/components/schemas/BasicError' - description: The specified resource was not found - '500': - content: - application/problem+json: - schema: - $ref: '#/components/schemas/BasicError' - description: The request cannot be fulfilled due to an unexpected server - error - security: - - bearerAuth: [] - summary: Gets all manifests in a project folder that users have access to - tags: - - Storage - x-openapi-router-controller: schematic_api.controllers.storage_controller - /components/{componentLabel}/: - get: - description: Get all the attributes associated with a specific data model component - formatted as a dataframe (stored as a JSON String). - operationId: get_component - parameters: - - description: The label of a component in a schema - explode: false - in: path - name: componentLabel - required: true - schema: - $ref: '#/components/schemas/ComponentLabel' - style: simple - - description: The URL of a schema in jsonld or csv form - explode: true - in: query - name: schemaUrl - required: true - schema: - $ref: '#/components/schemas/SchemaUrl' - style: form - - description: Whether to include the indexes of the dataframe in the returned - JSON string. - explode: true - in: query - name: includeIndex - required: false - schema: - default: false - type: boolean - style: form - - description: The type of label to display - explode: true - in: query - name: displayLabelType - required: false - schema: - default: class_label - enum: - - class_label - - display_label - type: string - style: form - responses: - '200': - content: - application/json: - schema: - description: The component as a json string - type: string - description: Success - '500': - content: - application/problem+json: - schema: - $ref: '#/components/schemas/BasicError' - description: The request cannot be fulfilled due to an unexpected server - error - summary: Get all the attributes associated with a specific data model component - formatted as a dataframe (stored as a JSON String). - tags: - - Schema - x-openapi-router-controller: schematic_api.controllers.schema_controller - /components/{componentLabel}/requirementsArray: - get: - description: "Given a source model component (see https://w3id.org/biolink/vocab/category\ - \ for definnition of component), return all components required by it in array\ - \ form." - operationId: get_component_requirements_array - parameters: - - description: The label of a component in a schema - explode: false - in: path - name: componentLabel - required: true - schema: - $ref: '#/components/schemas/ComponentLabel' - style: simple - - description: The URL of a schema in jsonld or csv form - explode: true - in: query - name: schemaUrl - required: true - schema: - $ref: '#/components/schemas/SchemaUrl' - style: form - - description: The type of label to display - explode: true - in: query - name: displayLabelType - required: false - schema: - default: class_label - enum: - - class_label - - display_label - type: string - style: form - responses: - '200': - content: - application/json: - schema: - $ref: '#/components/schemas/ComponentRequirementArray' - description: Success - '500': - content: - application/problem+json: - schema: - $ref: '#/components/schemas/BasicError' - description: The request cannot be fulfilled due to an unexpected server - error - summary: "Given a source model component (see https://w3id.org/biolink/vocab/category\ - \ for definnition of component), return all components required by it in array\ - \ form." - tags: - - Schema - x-openapi-router-controller: schematic_api.controllers.schema_controller - /components/{componentLabel}/requirementsGraph: - get: - description: "Given a source model component (see https://w3id.org/biolink/vocab/category\ - \ for definnition of component), return all components required by it in graph\ - \ form." - operationId: get_component_requirements_graph - parameters: - - description: The label of a component in a schema - explode: false - in: path - name: componentLabel - required: true - schema: - $ref: '#/components/schemas/ComponentLabel' - style: simple - - description: The URL of a schema in jsonld or csv form - explode: true - in: query - name: schemaUrl - required: true - schema: - $ref: '#/components/schemas/SchemaUrl' - style: form - - description: The type of label to display - explode: true - in: query - name: displayLabelType - required: false - schema: - default: class_label - enum: - - class_label - - display_label - type: string - style: form - responses: - '200': - content: - application/json: - schema: - $ref: '#/components/schemas/ComponentRequirementGraph' - description: Success - '500': - content: - application/problem+json: - schema: - $ref: '#/components/schemas/BasicError' - description: The request cannot be fulfilled due to an unexpected server - error - summary: "Given a source model component (see https://w3id.org/biolink/vocab/category\ - \ for definnition of component), return all components required by it in graph\ - \ form." - tags: - - Schema - x-openapi-router-controller: schematic_api.controllers.schema_controller - /connectedNodePairArray: - get: - description: Gets a array of connected node pairs - operationId: get_connected_node_pair_array - parameters: - - description: The URL of a schema in jsonld or csv form - explode: true - in: query - name: schemaUrl - required: true - schema: - $ref: '#/components/schemas/SchemaUrl' - style: form - - description: 'Type of relationship in a schema, such as requiresDependency' - explode: true - in: query - name: relationshipType - required: true - schema: - $ref: '#/components/schemas/RelationshipType' - style: form - - description: The type of label to display - explode: true - in: query - name: displayLabelType - required: false - schema: - default: class_label - enum: - - class_label - - display_label - type: string - style: form - responses: - '200': - content: - application/json: - schema: - $ref: '#/components/schemas/ConnectedNodePairArray' - description: Success - '500': - content: - application/problem+json: - schema: - $ref: '#/components/schemas/BasicError' - description: The request cannot be fulfilled due to an unexpected server - error - summary: Gets an array of connected node pairs - tags: - - Schema - x-openapi-router-controller: schematic_api.controllers.schema_controller - /connectedNodePairPage: - get: - description: Gets a page of connected node pairs - operationId: get_connected_node_pair_page - parameters: - - description: The URL of a schema in jsonld or csv form - explode: true - in: query - name: schemaUrl - required: true - schema: - $ref: '#/components/schemas/SchemaUrl' - style: form - - description: 'Type of relationship in a schema, such as requiresDependency' - explode: true - in: query - name: relationshipType - required: true - schema: - $ref: '#/components/schemas/RelationshipType' - style: form - - description: The page number to get for a paginated query - explode: true - in: query - name: pageNumber - required: false - schema: - default: 1 - minimum: 1 - type: integer - style: form - - description: "The maximum number of items per page (up to 100,000) for paginated\ - \ endpoints" - explode: true - in: query - name: pageMaxItems - required: false - schema: - default: 100000 - minimum: 1 - type: integer - style: form - - description: The type of label to display - explode: true - in: query - name: displayLabelType - required: false - schema: - default: class_label - enum: - - class_label - - display_label - type: string - style: form - responses: - '200': - content: - application/json: - schema: - $ref: '#/components/schemas/ConnectedNodePairPage' - description: Success - '500': - content: - application/problem+json: - schema: - $ref: '#/components/schemas/BasicError' - description: The request cannot be fulfilled due to an unexpected server - error - summary: Gets a page of connected node pairs - tags: - - Schema - x-openapi-router-controller: schematic_api.controllers.schema_controller - /generateExcelManifest: - get: - description: Generates an excel file - operationId: generate_excel_manifest - parameters: - - description: The URL of a schema in jsonld or csv form - explode: true - in: query - name: schemaUrl - required: true - schema: - $ref: '#/components/schemas/SchemaUrl' - style: form - - description: 'If true, annotations are added to the manifest' - explode: true - in: query - name: addAnnotations - required: false - schema: - default: false - type: boolean - style: form - - description: "If making one manifest, the title of the manifest. If making\ - \ multiple manifests, the prefix of the title of the manifests." - explode: true - in: query - name: manifestTitle - required: false - schema: - type: string - style: form - - description: The type of label to display - explode: true - in: query - name: displayLabelType - required: false - schema: - default: class_label - enum: - - class_label - - display_label - type: string - style: form - - description: The ID of a dataset. - explode: true - in: query - name: datasetId - required: false - schema: - $ref: '#/components/schemas/DatasetId' - style: form - - description: A data type - explode: true - in: query - name: dataType - required: true - schema: - $ref: '#/components/schemas/DataType' - style: form - - description: ID of view listing all project data assets. E.g. for Synapse - this would be the Synapse ID of the fileview listing all data assets for - a given project - explode: true - in: query - name: assetViewId - required: false - schema: - $ref: '#/components/schemas/AssetViewId' - style: form - responses: - '200': - content: - application/vnd.ms-excel: - schema: - format: binary - type: string - description: Success - '500': - content: - application/problem+json: - schema: - $ref: '#/components/schemas/BasicError' - description: The request cannot be fulfilled due to an unexpected server - error - security: - - bearerAuth: [] - summary: Generates an excel file - tags: - - ManifestGeneration - x-openapi-router-controller: schematic_api.controllers.manifest_generation_controller - /generateGoogleSheetManifests: - get: - description: Generates a list of google sheet links - operationId: generate_google_sheet_manifests - parameters: - - description: The URL of a schema in jsonld or csv form - explode: true - in: query - name: schemaUrl - required: true - schema: - $ref: '#/components/schemas/SchemaUrl' - style: form - - description: 'If true, annotations are added to the manifest' - explode: true - in: query - name: addAnnotations - required: false - schema: - default: false - type: boolean - style: form - - description: "If making one manifest, the title of the manifest. If making\ - \ multiple manifests, the prefix of the title of the manifests." - explode: true - in: query - name: manifestTitle - required: false - schema: - type: string - style: form - - description: The type of label to display - explode: true - in: query - name: displayLabelType - required: false - schema: - default: class_label - enum: - - class_label - - display_label - type: string - style: form - - description: "If true, users are blocked from entering incorrect values. If\ - \ false, users will get a warning when using incorrect values." - explode: true - in: query - name: useStrictValidation - required: false - schema: - default: true - type: boolean - style: form - - description: An array of dataset ids - explode: true - in: query - name: datasetIdArray - required: false - schema: - $ref: '#/components/schemas/DatasetIdArray' - style: form - - description: An array of data types - explode: true - in: query - name: dataTypeArray - required: false - schema: - $ref: '#/components/schemas/DataTypeArray' - style: form - - description: ID of view listing all project data assets. E.g. for Synapse - this would be the Synapse ID of the fileview listing all data assets for - a given project - explode: true - in: query - name: assetViewId - required: false - schema: - $ref: '#/components/schemas/AssetViewId' - style: form - - description: "If true, a manifest for all components will be generated, datasetIds\ - \ will be ignored. If false, manifests for each id in datasetIds will be\ - \ generated." - explode: true - in: query - name: generateAllManifests - required: false - schema: - default: false - type: boolean - style: form - responses: - '200': - content: - application/json: - schema: - $ref: '#/components/schemas/GoogleSheetLinks' - description: Success - '500': - content: - application/problem+json: - schema: - $ref: '#/components/schemas/BasicError' - description: The request cannot be fulfilled due to an unexpected server - error - security: - - bearerAuth: [] - summary: Generates a list of google sheet links - tags: - - ManifestGeneration - x-openapi-router-controller: schematic_api.controllers.manifest_generation_controller - /nodes/{nodeDisplay}/isRequired: - get: - description: Gets whether or not the node is required in the schema - operationId: get_node_is_required - parameters: - - description: The display name of the node in a schema - explode: false - in: path - name: nodeDisplay - required: true - schema: - $ref: '#/components/schemas/NodeDisplay' - style: simple - - description: The URL of a schema in jsonld or csv form - explode: true - in: query - name: schemaUrl - required: true - schema: - $ref: '#/components/schemas/SchemaUrl' - style: form - - description: The type of label to display - explode: true - in: query - name: displayLabelType - required: false - schema: - default: class_label - enum: - - class_label - - display_label - type: string - style: form - responses: - '200': - content: - application/json: - schema: - description: Whether or not the node is required in the schema - type: boolean - description: Success - '500': - content: - application/problem+json: - schema: - $ref: '#/components/schemas/BasicError' - description: The request cannot be fulfilled due to an unexpected server - error - summary: Gets whether or not the node is required in the schema - tags: - - Schema - x-openapi-router-controller: schematic_api.controllers.schema_controller - /nodes/{nodeDisplay}/propertyLabel: - get: - description: Gets the property label of the node - operationId: get_property_label - parameters: - - description: The display name of the node in a schema - explode: false - in: path - name: nodeDisplay - required: true - schema: - $ref: '#/components/schemas/NodeDisplay' - style: simple - - description: Whether or not to use the more strict way of converting to camel - case - explode: true - in: query - name: useStrictCamelCase - required: false - schema: - $ref: '#/components/schemas/UseStrictCamelCase' - style: form - responses: - '200': - content: - application/json: - schema: - $ref: '#/components/schemas/PropertyLabel' - description: Success - '500': - content: - application/problem+json: - schema: - $ref: '#/components/schemas/BasicError' - description: The request cannot be fulfilled due to an unexpected server - error - summary: Gets the property label of the node - tags: - - Schema - x-openapi-router-controller: schematic_api.controllers.schema_controller - /nodes/{nodeDisplay}/validationRules: - get: - description: "Gets the validation rules, along with the arguments for each given\ - \ rule associated with a given node" - operationId: get_node_validation_rules - parameters: - - description: The display name of the node in a schema - explode: false - in: path - name: nodeDisplay - required: true - schema: - $ref: '#/components/schemas/NodeDisplay' - style: simple - - description: The URL of a schema in jsonld or csv form - explode: true - in: query - name: schemaUrl - required: true - schema: - $ref: '#/components/schemas/SchemaUrl' - style: form - - description: The type of label to display - explode: true - in: query - name: displayLabelType - required: false - schema: - default: class_label - enum: - - class_label - - display_label - type: string - style: form - responses: - '200': - content: - application/json: - schema: - $ref: '#/components/schemas/ValidationRuleArray' - description: Success - '500': - content: - application/problem+json: - schema: - $ref: '#/components/schemas/BasicError' - description: The request cannot be fulfilled due to an unexpected server - error - summary: "Gets the validation rules, along with the arguments for each given\ - \ rule associated with a given node" - tags: - - Schema - x-openapi-router-controller: schematic_api.controllers.schema_controller - /nodes/{nodeLabel}/dependencyArray: - get: - description: Gets the immediate dependencies that are related to the given source - node - operationId: get_node_dependency_array - parameters: - - description: The label of the source node in a schema to get the dependencies - of - explode: false - in: path - name: nodeLabel - required: true - schema: - $ref: '#/components/schemas/NodeLabel' - style: simple - - description: The URL of a schema in jsonld or csv form - explode: true - in: query - name: schemaUrl - required: true - schema: - $ref: '#/components/schemas/SchemaUrl' - style: form - - description: "Whether or not to return the display names of the component,\ - \ otherwise the label" - explode: true - in: query - name: returnDisplayNames - required: false - schema: - $ref: '#/components/schemas/ReturnDisplayNames' - style: form - - description: "Whether or not to order the components by their order in the\ - \ schema, otherwise random" - explode: true - in: query - name: returnOrderedBySchema - required: false - schema: - $ref: '#/components/schemas/ReturnOrderedBySchema' - style: form - - description: The type of label to display - explode: true - in: query - name: displayLabelType - required: false - schema: - default: class_label - enum: - - class_label - - display_label - type: string - style: form - responses: - '200': - content: - application/json: - schema: - $ref: '#/components/schemas/NodeArray' - description: Success - '500': - content: - application/problem+json: - schema: - $ref: '#/components/schemas/BasicError' - description: The request cannot be fulfilled due to an unexpected server - error - summary: Gets the immediate dependencies that are related to the given source - node - tags: - - Schema - x-openapi-router-controller: schematic_api.controllers.schema_controller - /nodes/{nodeLabel}/dependencyPage: - get: - description: Gets the immediate dependencies that are related to the given source - node - operationId: get_node_dependency_page - parameters: - - description: The label of the source node in a schema to get the dependencies - of - explode: false - in: path - name: nodeLabel - required: true - schema: - $ref: '#/components/schemas/NodeLabel' - style: simple - - description: The URL of a schema in jsonld or csv form - explode: true - in: query - name: schemaUrl - required: true - schema: - $ref: '#/components/schemas/SchemaUrl' - style: form - - description: "Whether or not to return the display names of the component,\ - \ otherwise the label" - explode: true - in: query - name: returnDisplayNames - required: false - schema: - $ref: '#/components/schemas/ReturnDisplayNames' - style: form - - description: "Whether or not to order the components by their order in the\ - \ schema, otherwise random" - explode: true - in: query - name: returnOrderedBySchema - required: false - schema: - $ref: '#/components/schemas/ReturnOrderedBySchema' - style: form - - description: The page number to get for a paginated query - explode: true - in: query - name: pageNumber - required: false - schema: - default: 1 - minimum: 1 - type: integer - style: form - - description: "The maximum number of items per page (up to 100,000) for paginated\ - \ endpoints" - explode: true - in: query - name: pageMaxItems - required: false - schema: - default: 100000 - minimum: 1 - type: integer - style: form - - description: The type of label to display - explode: true - in: query - name: displayLabelType - required: false - schema: - default: class_label - enum: - - class_label - - display_label - type: string - style: form - responses: - '200': - content: - application/json: - schema: - $ref: '#/components/schemas/NodePage' - description: Success - '500': - content: - application/problem+json: - schema: - $ref: '#/components/schemas/BasicError' - description: The request cannot be fulfilled due to an unexpected server - error - summary: Gets the immediate dependencies that are related to the given source - node - tags: - - Schema - x-openapi-router-controller: schematic_api.controllers.schema_controller - /nodes/{nodeLabel}/nodeProperties: - get: - description: Gets properties associated with a given node - operationId: get_node_properties - parameters: - - description: The label of the source node in a schema to get the dependencies - of - explode: false - in: path - name: nodeLabel - required: true - schema: - $ref: '#/components/schemas/NodeLabel' - style: simple - - description: The URL of a schema in jsonld or csv form - explode: true - in: query - name: schemaUrl - required: true - schema: - $ref: '#/components/schemas/SchemaUrl' - style: form - - description: The type of label to display - explode: true - in: query - name: displayLabelType - required: false - schema: - default: class_label - enum: - - class_label - - display_label - type: string - style: form - responses: - '200': - content: - application/json: - schema: - $ref: '#/components/schemas/NodePropertyArray' - description: Success - '500': - content: - application/problem+json: - schema: - $ref: '#/components/schemas/BasicError' - description: The request cannot be fulfilled due to an unexpected server - error - summary: Gets properties associated with a given node - tags: - - Schema - x-openapi-router-controller: schematic_api.controllers.schema_controller - /schemaAttributes: - get: - description: Get all the attributes associated with a data model formatted as - a dataframe (stored as a JSON String). - operationId: get_schema_attributes - parameters: - - description: The URL of a schema in jsonld or csv form - explode: true - in: query - name: schemaUrl - required: true - schema: - $ref: '#/components/schemas/SchemaUrl' - style: form - - description: The type of label to display - explode: true - in: query - name: displayLabelType - required: false - schema: - default: class_label - enum: - - class_label - - display_label - type: string - style: form - responses: - '200': - content: - application/json: - schema: - description: The schema as a json string - type: string - description: Success - '500': - content: - application/problem+json: - schema: - $ref: '#/components/schemas/BasicError' - description: The request cannot be fulfilled due to an unexpected server - error - summary: Get all the attributes associated with a data model formatted as a - dataframe (stored as a JSON String). - tags: - - Schema - x-openapi-router-controller: schematic_api.controllers.schema_controller - /schematicVersion: - get: - description: Gets the version of the schematic library currently used by the - API - operationId: get_schematic_version - responses: - '200': - content: - application/json: - schema: - example: v21.1.1 - type: string - description: Success - '500': - content: - application/problem+json: - schema: - $ref: '#/components/schemas/BasicError' - description: The request cannot be fulfilled due to an unexpected server - error - summary: Gets the version of the schematic library currently used by the API - tags: - - Versions - x-openapi-router-controller: schematic_api.controllers.versions_controller - /submitManifestCsv: - post: - description: 'Validates manifest in csv form, then submits it' - operationId: submit_manifest_csv - parameters: - - description: The URL of a schema in jsonld or csv form - explode: true - in: query - name: schemaUrl - required: true - schema: - $ref: '#/components/schemas/SchemaUrl' - style: form - - description: 'A component in a schema, either the dsplay label or schema label' - explode: true - in: query - name: component - required: true - schema: - type: string - style: form - - description: "If True, validation suite will only run with in-house validation\ - \ rule. If False, the Great Expectations suite will be utilized and all\ - \ rules will be available." - explode: true - in: query - name: restrictRules - required: false - schema: - $ref: '#/components/schemas/RestrictRules' - style: form - - description: The ID of a dataset. - explode: true - in: query - name: datasetId - required: true - schema: - $ref: '#/components/schemas/DatasetId' - style: form - - description: file_and_entities will store the manifest as a csv and create - Synapse files for each row in the manifest. table_and_file will store the - manifest as a table and a csv on Synapse. file_only will store the manifest - as a csv only on Synapse. table_file_and_entities will perform the options - file_with_entites and table in combination. - explode: true - in: query - name: storageMethod - required: false - schema: - default: table_file_and_entities - enum: - - file_and_entities - - table_and_file - - file_only - - table_file_and_entities - type: string - style: form - - description: "If true, annotations with blank values will be hidden from a\ - \ dataset's annotation list in Synaspe. If false, annotations with blank\ - \ values will be displayed." - explode: true - in: query - name: hideBlanks - required: false - schema: - default: false - type: boolean - style: form - - description: ID of view listing all project data assets. E.g. for Synapse - this would be the Synapse ID of the fileview listing all data assets for - a given project - explode: true - in: query - name: assetViewId - required: true - schema: - $ref: '#/components/schemas/AssetViewId' - style: form - - description: "replace will remove the rows and columns from the existing table\ - \ and store the new rows and columns, preserving the name and synID. upsert\ - \ will add the new rows to the table and preserve the exisitng rows and\ - \ columns in the existing table." - explode: true - in: query - name: tableManipulationMethod - required: false - schema: - default: replace - enum: - - replace - - upsert - type: string - style: form - - description: The type of label to display - explode: true - in: query - name: displayLabelType - required: false - schema: - default: class_label - enum: - - class_label - - display_label - type: string - style: form - - description: The labeling style for annotation keys. - explode: true - in: query - name: annotationKeyStyle - required: false - schema: - default: class_label - enum: - - class_label - - display_label - type: string - style: form - - description: The labeling syle for table column names. - explode: true - in: query - name: tableColumnNameStyle - required: false - schema: - default: class_label - enum: - - class_label - - display_label - - display_name - type: string - style: form - requestBody: - content: - application/csv: - schema: - format: binary - type: string - description: .csv file - required: true - responses: - '200': - content: - application/json: - schema: - $ref: '#/components/schemas/ManifestId' - description: Success - '500': - content: - application/problem+json: - schema: - $ref: '#/components/schemas/BasicError' - description: The request cannot be fulfilled due to an unexpected server - error - security: - - bearerAuth: [] - summary: 'Validates manifest in csv form, then submits it' - tags: - - ManifestValidation - x-openapi-router-controller: schematic_api.controllers.manifest_validation_controller - /submitManifestJson: - post: - description: 'Validates a manifest in json form, then submits it in csv form' - operationId: submit_manifest_json - parameters: - - description: The URL of a schema in jsonld or csv form - explode: true - in: query - name: schemaUrl - required: true - schema: - $ref: '#/components/schemas/SchemaUrl' - style: form - - description: 'A component in a schema, either the dsplay label or schema label' - explode: true - in: query - name: component - required: true - schema: - type: string - style: form - - description: "If True, validation suite will only run with in-house validation\ - \ rule. If False, the Great Expectations suite will be utilized and all\ - \ rules will be available." - explode: true - in: query - name: restrictRules - required: false - schema: - $ref: '#/components/schemas/RestrictRules' - style: form - - description: The ID of a dataset. - explode: true - in: query - name: datasetId - required: true - schema: - $ref: '#/components/schemas/DatasetId' - style: form - - description: file_and_entities will store the manifest as a csv and create - Synapse files for each row in the manifest. table_and_file will store the - manifest as a table and a csv on Synapse. file_only will store the manifest - as a csv only on Synapse. table_file_and_entities will perform the options - file_with_entites and table in combination. - explode: true - in: query - name: storageMethod - required: false - schema: - default: table_file_and_entities - enum: - - file_and_entities - - table_and_file - - file_only - - table_file_and_entities - type: string - style: form - - description: "If true, annotations with blank values will be hidden from a\ - \ dataset's annotation list in Synaspe. If false, annotations with blank\ - \ values will be displayed." - explode: true - in: query - name: hideBlanks - required: false - schema: - default: false - type: boolean - style: form - - description: ID of view listing all project data assets. E.g. for Synapse - this would be the Synapse ID of the fileview listing all data assets for - a given project - explode: true - in: query - name: assetViewId - required: true - schema: - $ref: '#/components/schemas/AssetViewId' - style: form - - description: "replace will remove the rows and columns from the existing table\ - \ and store the new rows and columns, preserving the name and synID. upsert\ - \ will add the new rows to the table and preserve the exisitng rows and\ - \ columns in the existing table." - explode: true - in: query - name: tableManipulationMethod - required: false - schema: - default: replace - enum: - - replace - - upsert - type: string - style: form - - description: The type of label to display - explode: true - in: query - name: displayLabelType - required: false - schema: - default: class_label - enum: - - class_label - - display_label - type: string - style: form - - description: The labeling style for annotation keys. - explode: true - in: query - name: annotationKeyStyle - required: false - schema: - default: class_label - enum: - - class_label - - display_label - type: string - style: form - - description: The labeling syle for table column names. - explode: true - in: query - name: tableColumnNameStyle - required: false - schema: - default: class_label - enum: - - class_label - - display_label - - display_name - type: string - style: form - requestBody: - content: - text/plain: - schema: - type: string - description: A manifest in json form - responses: - '200': - content: - application/json: - schema: - $ref: '#/components/schemas/ManifestId' - description: Success - '500': - content: - application/problem+json: - schema: - $ref: '#/components/schemas/BasicError' - description: The request cannot be fulfilled due to an unexpected server - error - security: - - bearerAuth: [] - summary: 'Validates a manifest in json form, then submits it' - tags: - - ManifestValidation - x-openapi-router-controller: schematic_api.controllers.manifest_validation_controller - /tangledTreeLayers: - get: - description: Get tangled tree node layers to display for a given data model - and figure type - operationId: get_tangled_tree_layers - parameters: - - description: The URL of a schema in jsonld or csv form - explode: true - in: query - name: schemaUrl - required: true - schema: - $ref: '#/components/schemas/SchemaUrl' - style: form - - description: Figure type to generate. - explode: true - in: query - name: figureType - required: false - schema: - default: component - enum: - - component - - dependency - type: string - style: form - - description: The type of label to display - explode: true - in: query - name: displayLabelType - required: false - schema: - default: class_label - enum: - - class_label - - display_label - type: string - style: form - responses: - '200': - content: - application/json: - schema: - $ref: '#/components/schemas/TangledTreeLayers' - description: Success - '500': - content: - application/problem+json: - schema: - $ref: '#/components/schemas/BasicError' - description: The request cannot be fulfilled due to an unexpected server - error - summary: Get tangled tree node layers to display for a given data model and - figure type - tags: - - TangledTree - x-openapi-router-controller: schematic_api.controllers.tangled_tree_controller - /tangledTreeText: - get: - description: "Get tangled tree plain or highlighted text to display for a given\ - \ data model, text formatting and figure type" - operationId: get_tangled_tree_text - parameters: - - description: The URL of a schema in jsonld or csv form - explode: true - in: query - name: schemaUrl - required: true - schema: - $ref: '#/components/schemas/SchemaUrl' - style: form - - description: Figure type to generate. - explode: true - in: query - name: figureType - required: false - schema: - default: component - enum: - - component - - dependency - type: string - style: form - - description: Text formatting type. - explode: true - in: query - name: textFormat - required: false - schema: - default: plain - enum: - - plain - - highlighted - type: string - style: form - - description: The type of label to display - explode: true - in: query - name: displayLabelType - required: false - schema: - default: class_label - enum: - - class_label - - display_label - type: string - style: form - responses: - '200': - content: - application/json: - schema: - $ref: '#/components/schemas/TangledTreeText' - description: Success - '500': - content: - application/problem+json: - schema: - $ref: '#/components/schemas/BasicError' - description: The request cannot be fulfilled due to an unexpected server - error - summary: "Get tangled tree plain or highlighted text to display for a given\ - \ data model, text formatting and figure type" - tags: - - TangledTree - x-openapi-router-controller: schematic_api.controllers.tangled_tree_controller - /validateManifestCsv: - post: - description: Validates a manifest in csv form - operationId: validate_manifest_csv - parameters: - - description: The URL of a schema in jsonld or csv form - explode: true - in: query - name: schemaUrl - required: true - schema: - $ref: '#/components/schemas/SchemaUrl' - style: form - - description: The label of a component in a schema - explode: true - in: query - name: componentLabel - required: true - schema: - $ref: '#/components/schemas/ComponentLabel' - style: form - - description: "If True, validation suite will only run with in-house validation\ - \ rule. If False, the Great Expectations suite will be utilized and all\ - \ rules will be available." - explode: true - in: query - name: restrictRules - required: false - schema: - $ref: '#/components/schemas/RestrictRules' - style: form - - description: The type of label to display - explode: true - in: query - name: displayLabelType - required: false - schema: - default: class_label - enum: - - class_label - - display_label - type: string - style: form - requestBody: - content: - application/csv: - schema: - format: binary - type: string - description: .csv file - required: true - responses: - '200': - content: - application/json: - schema: - $ref: '#/components/schemas/ManifestValidationResult' - description: Success - '500': - content: - application/problem+json: - schema: - $ref: '#/components/schemas/BasicError' - description: The request cannot be fulfilled due to an unexpected server - error - security: - - bearerAuth: [] - summary: Validates a manifest in csv form - tags: - - ManifestValidation - x-openapi-router-controller: schematic_api.controllers.manifest_validation_controller - /validateManifestJson: - post: - description: Validates a manifest in json form - operationId: validate_manifest_json - parameters: - - description: The URL of a schema in jsonld or csv form - explode: true - in: query - name: schemaUrl - required: true - schema: - $ref: '#/components/schemas/SchemaUrl' - style: form - - description: The label of a component in a schema - explode: true - in: query - name: componentLabel - required: true - schema: - $ref: '#/components/schemas/ComponentLabel' - style: form - - description: "If True, validation suite will only run with in-house validation\ - \ rule. If False, the Great Expectations suite will be utilized and all\ - \ rules will be available." - explode: true - in: query - name: restrictRules - required: false - schema: - $ref: '#/components/schemas/RestrictRules' - style: form - - description: The type of label to display - explode: true - in: query - name: displayLabelType - required: false - schema: - default: class_label - enum: - - class_label - - display_label - type: string - style: form - requestBody: - content: - text/plain: - schema: - type: string - description: A manifest in json form - responses: - '200': - content: - application/json: - schema: - $ref: '#/components/schemas/ManifestValidationResult' - description: Success - '500': - content: - application/problem+json: - schema: - $ref: '#/components/schemas/BasicError' - description: The request cannot be fulfilled due to an unexpected server - error - summary: Validates a manifest in json form - tags: - - ManifestValidation - x-openapi-router-controller: schematic_api.controllers.manifest_validation_controller -components: - parameters: - projectId: - description: The Synapse ID of a storage project. - explode: false - in: path - name: projectId - required: true - schema: - $ref: '#/components/schemas/ProjectId' - style: simple - assetType: - description: 'Type of asset, such as Synapse' - explode: false - in: path - name: assetType - required: true - schema: - $ref: '#/components/schemas/AssetType' - style: simple - assetViewIdQuery: - description: ID of view listing all project data assets. E.g. for Synapse this - would be the Synapse ID of the fileview listing all data assets for a given - project - explode: true - in: query - name: assetViewId - required: true - schema: - $ref: '#/components/schemas/AssetViewId' - style: form - pageNumber: - description: The page number to get for a paginated query - explode: true - in: query - name: pageNumber - required: false - schema: - default: 1 - minimum: 1 - type: integer - style: form - pageMaxItems: - description: "The maximum number of items per page (up to 100,000) for paginated\ - \ endpoints" - explode: true - in: query - name: pageMaxItems - required: false - schema: - default: 100000 - minimum: 1 - type: integer - style: form - assetViewId: - description: ID of view listing all project data assets. E.g. for Synapse this - would be the Synapse ID of the fileview listing all data assets for a given - project - explode: false - in: path - name: assetViewId - required: true - schema: - $ref: '#/components/schemas/AssetViewId' - style: simple - datasetId: - description: The ID of a dataset. - explode: false - in: path - name: datasetId - required: true - schema: - $ref: '#/components/schemas/DatasetId' - style: simple - fileNames: - description: A list of file names used to filter the output. - explode: true - in: query - name: fileNames - required: false - schema: - $ref: '#/components/schemas/FileNames' - style: form - useFullFilePath: - description: "Whether or not to return the full path of output, or just the\ - \ basename." - explode: true - in: query - name: useFullFilePath - required: false - schema: - $ref: '#/components/schemas/UseFullFilePath' - style: form - manifestId: - description: ID of a manifest - explode: false - in: path - name: manifestId - required: true - schema: - $ref: '#/components/schemas/ManifestId' - style: simple - nodeLabel: - description: The label of the source node in a schema to get the dependencies - of - explode: false - in: path - name: nodeLabel - required: true - schema: - $ref: '#/components/schemas/NodeLabel' - style: simple - schemaUrl: - description: The URL of a schema in jsonld or csv form - explode: true - in: query - name: schemaUrl - required: true - schema: - $ref: '#/components/schemas/SchemaUrl' - style: form - returnDisplayNames: - description: "Whether or not to return the display names of the component, otherwise\ - \ the label" - explode: true - in: query - name: returnDisplayNames - required: false - schema: - $ref: '#/components/schemas/ReturnDisplayNames' - style: form - returnOrderedBySchema: - description: "Whether or not to order the components by their order in the schema,\ - \ otherwise random" - explode: true - in: query - name: returnOrderedBySchema - required: false - schema: - $ref: '#/components/schemas/ReturnOrderedBySchema' - style: form - displayLabelType: - description: The type of label to display - explode: true - in: query - name: displayLabelType - required: false - schema: - default: class_label - enum: - - class_label - - display_label - type: string - style: form - nodeDisplay: - description: The display name of the node in a schema - explode: false - in: path - name: nodeDisplay - required: true - schema: - $ref: '#/components/schemas/NodeDisplay' - style: simple - useStrictCamelCase: - description: Whether or not to use the more strict way of converting to camel - case - explode: true - in: query - name: useStrictCamelCase - required: false - schema: - $ref: '#/components/schemas/UseStrictCamelCase' - style: form - componentLabel: - description: The label of a component in a schema - explode: false - in: path - name: componentLabel - required: true - schema: - $ref: '#/components/schemas/ComponentLabel' - style: simple - includeIndex: - description: Whether to include the indexes of the dataframe in the returned - JSON string. - explode: true - in: query - name: includeIndex - required: false - schema: - default: false - type: boolean - style: form - relationshipType: - description: 'Type of relationship in a schema, such as requiresDependency' - explode: true - in: query - name: relationshipType - required: true - schema: - $ref: '#/components/schemas/RelationshipType' - style: form - componentLabelQuery: - description: The label of a component in a schema - explode: true - in: query - name: componentLabel - required: true - schema: - $ref: '#/components/schemas/ComponentLabel' - style: form - restrictRules: - description: "If True, validation suite will only run with in-house validation\ - \ rule. If False, the Great Expectations suite will be utilized and all rules\ - \ will be available." - explode: true - in: query - name: restrictRules - required: false - schema: - $ref: '#/components/schemas/RestrictRules' - style: form - component: - description: 'A component in a schema, either the dsplay label or schema label' - explode: true - in: query - name: component - required: true - schema: - type: string - style: form - datasetIdQuery: - description: The ID of a dataset. - explode: true - in: query - name: datasetId - required: true - schema: - $ref: '#/components/schemas/DatasetId' - style: form - storageMethod: - description: file_and_entities will store the manifest as a csv and create Synapse - files for each row in the manifest. table_and_file will store the manifest - as a table and a csv on Synapse. file_only will store the manifest as a csv - only on Synapse. table_file_and_entities will perform the options file_with_entites - and table in combination. - explode: true - in: query - name: storageMethod - required: false - schema: - default: table_file_and_entities - enum: - - file_and_entities - - table_and_file - - file_only - - table_file_and_entities - type: string - style: form - hideBlanks: - description: "If true, annotations with blank values will be hidden from a dataset's\ - \ annotation list in Synaspe. If false, annotations with blank values will\ - \ be displayed." - explode: true - in: query - name: hideBlanks - required: false - schema: - default: false - type: boolean - style: form - tableManipulationMethod: - description: "replace will remove the rows and columns from the existing table\ - \ and store the new rows and columns, preserving the name and synID. upsert\ - \ will add the new rows to the table and preserve the exisitng rows and columns\ - \ in the existing table." - explode: true - in: query - name: tableManipulationMethod - required: false - schema: - default: replace - enum: - - replace - - upsert - type: string - style: form - annotationKeyStyle: - description: The labeling style for annotation keys. - explode: true - in: query - name: annotationKeyStyle - required: false - schema: - default: class_label - enum: - - class_label - - display_label - type: string - style: form - tableColumnNameStyle: - description: The labeling syle for table column names. - explode: true - in: query - name: tableColumnNameStyle - required: false - schema: - default: class_label - enum: - - class_label - - display_label - - display_name - type: string - style: form - figureType: - description: Figure type to generate. - explode: true - in: query - name: figureType - required: false - schema: - default: component - enum: - - component - - dependency - type: string - style: form - textFormat: - description: Text formatting type. - explode: true - in: query - name: textFormat - required: false - schema: - default: plain - enum: - - plain - - highlighted - type: string - style: form - addAnnotations: - description: 'If true, annotations are added to the manifest' - explode: true - in: query - name: addAnnotations - required: false - schema: - default: false - type: boolean - style: form - manifestTitle: - description: "If making one manifest, the title of the manifest. If making multiple\ - \ manifests, the prefix of the title of the manifests." - explode: true - in: query - name: manifestTitle - required: false - schema: - type: string - style: form - useStrictValidation: - description: "If true, users are blocked from entering incorrect values. If\ - \ false, users will get a warning when using incorrect values." - explode: true - in: query - name: useStrictValidation - required: false - schema: - default: true - type: boolean - style: form - responses: - InternalServerError: - content: - application/problem+json: - schema: - $ref: '#/components/schemas/BasicError' - description: The request cannot be fulfilled due to an unexpected server error - BadRequest: - content: - application/problem+json: - schema: - $ref: '#/components/schemas/BasicError' - description: Invalid request - Unauthorized: - content: - application/problem+json: - schema: - $ref: '#/components/schemas/BasicError' - description: Unauthorized - NotFound: - content: - application/problem+json: - schema: - $ref: '#/components/schemas/BasicError' - description: The specified resource was not found - schemas: - BasicError: - description: Problem details (tools.ietf.org/html/rfc7807) - properties: - title: - description: A human readable documentation for the problem type - title: title - type: string - status: - description: The HTTP status code - title: status - type: integer - detail: - description: A human readable explanation specific to this occurrence of - the problem - title: detail - type: string - type: - description: An absolute URI that identifies the problem type - title: type - type: string - required: - - status - - title - title: BasicError - type: object - x-java-class-annotations: - - '@lombok.Builder' - ProjectId: - description: A project ID. - example: syn26251192 - type: string - AssetType: - description: 'Type of asset store, such as Synapse' - enum: - - synapse - example: synapse - title: AssetType - type: string - AssetViewId: - description: An asset view ID. - example: syn23643253 - type: string - DatasetMetadata: - description: The metadata of a dataset. - example: - name: Example dataset - id: Syn1 - properties: - name: - description: The name of the dataset. - example: Example dataset - title: name - type: string - id: - description: The ID of the dataset. - example: Syn1 - title: id - type: string - required: - - id - - name - title: DatasetMetadata - type: object - x-java-class-annotations: - - '@lombok.Builder' - DatasetMetadataArray: - description: An array of dataset metadata. - example: - datasets: - - name: Example dataset - id: Syn1 - - name: Example dataset - id: Syn1 - properties: - datasets: - description: An array of dataset meatdata. - items: - $ref: '#/components/schemas/DatasetMetadata' - title: datasets - type: array - title: DatasetMetadataArray - type: object - PageMetadata: - description: The metadata of a page. - properties: - number: - description: The page number. - example: 99 - format: int32 - title: number - type: integer - size: - description: The number of items in a single page. - example: 99 - format: int32 - title: size - type: integer - totalElements: - description: Total number of elements in the result set. - example: 99 - format: int64 - title: totalElements - type: integer - totalPages: - description: Total number of pages in the result set. - example: 99 - format: int32 - title: totalPages - type: integer - hasNext: - description: Returns if there is a next page. - example: true - title: hasNext - type: boolean - hasPrevious: - description: Returns if there is a previous page. - example: true - title: hasPrevious - type: boolean - required: - - hasNext - - hasPrevious - - number - - size - - totalElements - - totalPages - title: PageMetadata - type: object - DatasetMetadataPage: - allOf: - - $ref: '#/components/schemas/PageMetadata' - - $ref: '#/components/schemas/DatasetMetadataPage_allOf' - description: A page of dataset metadata. - title: DatasetMetadataPage - type: object - x-java-class-annotations: - - '@lombok.Builder' - ManifestMetadata: - description: The metadata for a manifest file - example: - name: synapse_storage_manifest.csv - datasetName: dataset_X - datasetId: syn2 - id: syn1 - componentName: patient - properties: - name: - description: The name of the manifest file. - example: synapse_storage_manifest.csv - title: name - type: string - id: - description: The id of the manifest file. - example: syn1 - title: id - type: string - datasetName: - description: The name of the dataset the manifest belongs to. - example: dataset_X - title: datasetName - type: string - datasetId: - description: The id of the dataset the manifest belongs to. - example: syn2 - title: datasetId - type: string - componentName: - description: The name of the component the manifest is of. - example: patient - title: componentName - type: string - required: - - id - - name - title: ManifestMetadata - type: object - ManifestMetadataArray: - description: An array of manifest metadata - example: - manifests: - - name: synapse_storage_manifest.csv - datasetName: dataset_X - datasetId: syn2 - id: syn1 - componentName: patient - - name: synapse_storage_manifest.csv - datasetName: dataset_X - datasetId: syn2 - id: syn1 - componentName: patient - properties: - manifests: - description: A list of manifest metadata - items: - $ref: '#/components/schemas/ManifestMetadata' - title: manifests - type: array - title: ManifestMetadataArray - type: object - ManifestMetadataPage: - allOf: - - $ref: '#/components/schemas/PageMetadata' - - $ref: '#/components/schemas/ManifestMetadataPage_allOf' - description: A page of manifest metadata - title: ManifestMetadataPage - type: object - x-java-class-annotations: - - '@lombok.Builder' - AssetViewJson: - description: An asset view in json format - type: object - DatasetId: - description: A dataset ID. - example: syn23643250 - title: DatasetId - type: string - ManifestJson: - description: A manifest in json format - type: object - ProjectMetadata: - description: The metadata for a project - example: - name: Example project - id: Syn1 - properties: - name: - description: The name of the project. - example: Example project - title: name - type: string - id: - description: The ID of the project. - example: Syn1 - title: id - type: string - required: - - id - - name - title: ProjectMetadata - type: object - x-java-class-annotations: - - '@lombok.Builder' - ProjectMetadataArray: - description: An array of project metadata. - example: - projects: - - name: Example project - id: Syn1 - - name: Example project - id: Syn1 - properties: - projects: - description: An array of project metadata. - items: - $ref: '#/components/schemas/ProjectMetadata' - title: projects - type: array - title: ProjectMetadataArray - type: object - ProjectMetadataPage: - allOf: - - $ref: '#/components/schemas/PageMetadata' - - $ref: '#/components/schemas/ProjectMetadataPage_allOf' - description: A page of project metadata. - title: ProjectMetadataPage - type: object - x-java-class-annotations: - - '@lombok.Builder' - FileNames: - description: A list of file names. - items: - type: string - type: array - UseFullFilePath: - default: false - description: "Whether or not to return the full path of output, or just the\ - \ basename." - type: boolean - FileMetadata: - description: The metadata for a file - example: - name: file.txt - id: Syn1 - properties: - name: - description: The name of the file. - example: file.txt - title: name - type: string - id: - description: The ID of the file. - example: Syn1 - title: id - type: string - required: - - id - - name - title: FileMetadata - type: object - x-java-class-annotations: - - '@lombok.Builder' - FileMetadataArray: - description: A list of file metadata. - example: - files: - - name: file.txt - id: Syn1 - - name: file.txt - id: Syn1 - properties: - files: - description: A list of file metadata. - items: - $ref: '#/components/schemas/FileMetadata' - title: files - type: array - title: FileMetadataArray - type: object - FileMetadataPage: - allOf: - - $ref: '#/components/schemas/PageMetadata' - - $ref: '#/components/schemas/FileMetadataPage_allOf' - description: A page of file metadata. - title: FileMetadataPage - type: object - x-java-class-annotations: - - '@lombok.Builder' - ManifestId: - description: A manifest ID. - example: syn51078535 - type: string - NodeLabel: - description: The label of a node in a schema - example: MolecularEntity - type: string - SchemaUrl: - description: The URL of a schema in jsonld form - example: https://raw.githubusercontent.com/Sage-Bionetworks/schematic/develop/tests/data/example.model.jsonld - type: string - ReturnDisplayNames: - default: true - description: Whether or not to return the display names of the node otherwise - the label - type: boolean - ReturnOrderedBySchema: - default: true - description: "Whether or not to order the components by their order in the schema,\ - \ otherwise random" - type: boolean - Node: - description: A node of a schema. - example: - name: Patient - properties: - name: - description: The name of the node. - example: Patient - title: name - type: string - required: - - name - title: Node - type: object - x-java-class-annotations: - - '@lombok.Builder' - NodeArray: - description: An array of nodes. - example: - nodes: - - name: Patient - - name: Patient - properties: - nodes: - description: An array of nodes. - items: - $ref: '#/components/schemas/Node' - title: nodes - type: array - title: NodeArray - type: object - NodePage: - allOf: - - $ref: '#/components/schemas/PageMetadata' - - $ref: '#/components/schemas/NodePage_allOf' - description: A page of nodes. - title: NodePage - type: object - x-java-class-annotations: - - '@lombok.Builder' - NodeDisplay: - description: The display name of a node in a schema - example: MolecularEntity - type: string - NodePropertyArray: - description: An array of node properties. - example: - node_properties: - - node_properties - - node_properties - properties: - node_properties: - description: An array of node properties. - items: - type: string - title: node_properties - type: array - title: NodePropertyArray - type: object - UseStrictCamelCase: - default: true - description: Whether or not to use the more strict way of converting to camel - case - type: boolean - PropertyLabel: - description: The property label of a node in a schema - example: MolecularEntity - type: string - ValidationRule: - description: A validation rule. - example: - name: list strict - properties: - name: - description: "The name of the rule, along with the arguments for the given\ - \ rule." - example: list strict - title: name - type: string - required: - - name - title: ValidationRule - type: object - x-java-class-annotations: - - '@lombok.Builder' - ValidationRuleArray: - description: An array of validation rules. - example: - validation_rules: - - name: list strict - - name: list strict - properties: - validation_rules: - description: An array of validation rules. - items: - $ref: '#/components/schemas/ValidationRule' - title: validation_rules - type: array - title: ValidationRuleArray - type: object - ComponentLabel: - description: The label of a component in a schema - example: Patient - type: string - ComponentRequirementArray: - description: An array of components - example: - componentRequirementsList: - - componentRequirementsList - - componentRequirementsList - properties: - componentRequirementsList: - items: - type: string - title: componentRequirementsList - type: array - title: ComponentRequirementArray - type: object - ComponentRequirementSubgraph: - description: A pair of components - example: - component1: component1 - component2: component2 - properties: - component1: - description: The display name of the first component in the graph - example: component1 - title: component1 - type: string - component2: - description: The display name of the second component in the graph - example: component2 - title: component2 - type: string - required: - - component1 - - component2 - title: ComponentRequirementSubgraph - type: object - x-java-class-annotations: - - '@lombok.Builder' - ComponentRequirementGraph: - description: A graph of components - example: - componentRequirementsGraph: - - component1: component1 - component2: component2 - - component1: component1 - component2: component2 - properties: - componentRequirementsGraph: - items: - $ref: '#/components/schemas/ComponentRequirementSubgraph' - title: componentRequirementsGraph - type: array - title: ComponentRequirementGraph - type: object - RelationshipType: - description: A type of schema relationship - example: requiresDependency - type: string - ConnectedNodePair: - description: A pair of conncted nodes - example: - node2: Node2 - node1: Node1 - properties: - node1: - description: The disaplay name of the first node. - example: Node1 - title: node1 - type: string - node2: - description: The display name of the second node. - example: Node2 - title: node2 - type: string - required: - - node1 - - node2 - title: ConnectedNodePair - type: object - x-java-class-annotations: - - '@lombok.Builder' - ConnectedNodePairArray: - description: An array of conncted node pairs - example: - connectedNodes: - - node2: Node2 - node1: Node1 - - node2: Node2 - node1: Node1 - properties: - connectedNodes: - description: An array of conncted node pairs. - items: - $ref: '#/components/schemas/ConnectedNodePair' - title: connectedNodes - type: array - title: ConnectedNodePairArray - type: object - ConnectedNodePairPage: - allOf: - - $ref: '#/components/schemas/PageMetadata' - - $ref: '#/components/schemas/ConnectedNodePairPage_allOf' - description: A page of conncted node pairs - title: ConnectedNodePairPage - type: object - x-java-class-annotations: - - '@lombok.Builder' - RestrictRules: - default: false - description: "If True, validation suite will only run with in-house validation\ - \ rule. If False, the Great Expectations suite will be utilized and all rules\ - \ will be available." - type: boolean - ManifestValidationResult: - description: The results of manifest validation - example: - warnings: - - warnings - - warnings - errors: - - errors - - errors - properties: - errors: - description: Any errors from validation - items: - type: string - title: errors - type: array - warnings: - description: Any warnings from validation - items: - type: string - title: warnings - type: array - title: ManifestValidationResult - type: object - TangledTreeLayers: - description: Tangled tree node layers to display for a given data model - type: string - TangledTreeText: - description: Tangled tree plain or higlighted text to display for a given data - model - type: object - DatasetIdArray: - description: An array of dataset ids - items: - $ref: '#/components/schemas/DatasetId' - type: array - DataType: - description: A data type - example: Patient - title: DataType - type: string - DataTypeArray: - description: An array of data types - items: - $ref: '#/components/schemas/DataType' - type: array - GoogleSheetLinks: - description: An array of google sheet links - example: - links: - - links - - links - properties: - links: - items: - type: string - title: links - type: array - title: GoogleSheetLinks - type: object - DatasetMetadataPage_allOf: - properties: - datasets: - description: An array of dataset meatdata. - items: - $ref: '#/components/schemas/DatasetMetadata' - title: datasets - type: array - required: - - datasets - title: DatasetMetadataPage_allOf - type: object - example: null - ManifestMetadataPage_allOf: - properties: - manifests: - description: A list of manifest metadata - items: - $ref: '#/components/schemas/ManifestMetadata' - title: manifests - type: array - required: - - manifests - title: ManifestMetadataPage_allOf - type: object - example: null - ProjectMetadataPage_allOf: - properties: - projects: - description: An array of project metadata. - items: - $ref: '#/components/schemas/ProjectMetadata' - title: projects - type: array - required: - - projects - title: ProjectMetadataPage_allOf - type: object - example: null - FileMetadataPage_allOf: - properties: - files: - description: A list of file metadata. - items: - $ref: '#/components/schemas/FileMetadata' - title: files - type: array - required: - - files - title: FileMetadataPage_allOf - type: object - example: null - NodePage_allOf: - properties: - nodes: - description: An array of nodes. - items: - $ref: '#/components/schemas/Node' - title: nodes - type: array - required: - - nodes - title: NodePage_allOf - type: object - example: null - ConnectedNodePairPage_allOf: - properties: - connectedNodes: - description: An array of conncted node pairs. - items: - $ref: '#/components/schemas/ConnectedNodePair' - title: connectedNodes - type: array - required: - - connectedNodes - title: ConnectedNodePairPage_allOf - type: object - example: null - securitySchemes: - bearerAuth: - bearerFormat: JWT - scheme: bearer - type: http - x-bearerInfoFunc: schematic_api.controllers.security_controller_.info_from_bearerAuth diff --git a/apps/schematic/api/schematic_api/test/__init__.py b/apps/schematic/api/schematic_api/test/__init__.py deleted file mode 100644 index 77e67a395..000000000 --- a/apps/schematic/api/schematic_api/test/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -import logging - -import connexion -from flask_testing import TestCase - -from schematic_api.encoder import JSONEncoder - - -class BaseTestCase(TestCase): - def create_app(self): - logging.getLogger("connexion.operation").setLevel("ERROR") - app = connexion.App(__name__, specification_dir="../openapi/") - app.app.json_encoder = JSONEncoder - app.add_api("openapi.yaml", pythonic_params=True) - return app.app diff --git a/apps/schematic/api/schematic_api/test/conftest.py b/apps/schematic/api/schematic_api/test/conftest.py deleted file mode 100644 index ea6d5f670..000000000 --- a/apps/schematic/api/schematic_api/test/conftest.py +++ /dev/null @@ -1,109 +0,0 @@ -"""Globals and pytest fixtures for testing""" - -from typing import Generator -import pytest - -import pandas as pd - -from schematic_api.models.manifest_metadata import ManifestMetadata - -# testing functions ------------------------------------------------------------------------------- - - -def csv_to_bytes(path: str) -> str: - """reads in a csv file and returns as bytes""" - dataframe = pd.read_csv(path) - csv_string = dataframe.to_csv(lineterminator="\r\n", index=False) - return bytes(csv_string, encoding="utf-8") - - -def csv_to_json_str(path: str) -> str: - """reads in a csv file and returns as json string""" - dataframe = pd.read_csv(path) - return dataframe.to_json() - - -# strings for mocking ----------------------------------------------------------------------------- - -GET_ACCESS_TOKEN_MOCK = ( - "schematic_api.controllers.manifest_generation_controller_impl.get_access_token" -) -CREATE_MANIFESTS_MOCK = ( - "schematic.manifest.generator.ManifestGenerator.create_manifests" -) - - -EXAMPLE_MANIFEST_METADATA = [ - ManifestMetadata( - name="name", - id="id", - dataset_name="name", - dataset_id="id", - component_name="name", - ), - ManifestMetadata( - name="name", - id="id", - dataset_name="name", - dataset_id="id", - component_name="name", - ), -] - -MANIFEST_METADATA_KEYS = [ - "componentName", - "datasetId", - "datasetName", - "id", - "name", -] - -PAGING_KEYS = [ - "hasNext", - "hasPrevious", - "number", - "size", - "totalElements", - "totalPages", -] - - -@pytest.fixture(scope="session", name="example_manifest_metadata") -def fixture_example_manifest_metadata() -> Generator: - """ - Yields an example of a list of manifest metadata - """ - yield EXAMPLE_MANIFEST_METADATA - - -TEST_SCHEMA_URL = "https://raw.githubusercontent.com/Sage-Bionetworks/schematic/main/tests/data/example.model.jsonld" # pylint: disable=line-too-long - - -@pytest.fixture(scope="session", name="test_schema_url") -def fixture_test_schema_url() -> Generator: - """ - Yields an the URL of the test schema - """ - yield TEST_SCHEMA_URL - - -CORRECT_MANIFEST_PATH = "schematic_api/test/data/manifests/biospecimen.csv" - - -@pytest.fixture(scope="session", name="correct_manifest_path") -def fixture_correct_manifest() -> Generator: - """ - Yields the path to biospecimen manifest csv - """ - yield CORRECT_MANIFEST_PATH - - -INCORRECT_MANIFEST_PATH = "schematic_api/test/data/manifests/biospecimen_incorrect.csv" - - -@pytest.fixture(scope="session", name="incorrect_manifest_path") -def fixture_incorrect_manifest_dataframe() -> Generator: - """ - Yields the path to biospecimen manifest csv - """ - yield INCORRECT_MANIFEST_PATH diff --git a/apps/schematic/api/schematic_api/test/data/manifests/biospecimen.csv b/apps/schematic/api/schematic_api/test/data/manifests/biospecimen.csv deleted file mode 100644 index b8197a6ce..000000000 --- a/apps/schematic/api/schematic_api/test/data/manifests/biospecimen.csv +++ /dev/null @@ -1,2 +0,0 @@ -Sample ID,Patient ID,Tissue Status,Component -123,1,Healthy,Biospecimen \ No newline at end of file diff --git a/apps/schematic/api/schematic_api/test/data/manifests/biospecimen_incorrect.csv b/apps/schematic/api/schematic_api/test/data/manifests/biospecimen_incorrect.csv deleted file mode 100644 index 1eb6c35d3..000000000 --- a/apps/schematic/api/schematic_api/test/data/manifests/biospecimen_incorrect.csv +++ /dev/null @@ -1,2 +0,0 @@ -Sample ID,Patient ID,Tissue Status,Component -123,1,not_a_valid_status,Biospecimen \ No newline at end of file diff --git a/apps/schematic/api/schematic_api/test/data/synapse_config_example.yaml b/apps/schematic/api/schematic_api/test/data/synapse_config_example.yaml deleted file mode 100644 index e4eab7ee6..000000000 --- a/apps/schematic/api/schematic_api/test/data/synapse_config_example.yaml +++ /dev/null @@ -1,5 +0,0 @@ -synapse_token: xxx -test_project: syn53072007 -test_dataset: syn53072009 -test_manifest: syn53072014 -test_asset_view: syn53072011 diff --git a/apps/schematic/api/schematic_api/test/test_manifest_generation_controller_impl.py b/apps/schematic/api/schematic_api/test/test_manifest_generation_controller_impl.py deleted file mode 100644 index 481538c09..000000000 --- a/apps/schematic/api/schematic_api/test/test_manifest_generation_controller_impl.py +++ /dev/null @@ -1,116 +0,0 @@ -"""Tests for manifest generation endpoint functions""" - -# pylint: disable=no-member - -from unittest.mock import patch - -from schematic_api.models.basic_error import BasicError -from schematic_api.models.google_sheet_links import GoogleSheetLinks -from schematic_api.controllers.manifest_generation_controller_impl import ( - generate_excel_manifest_file, - generate_excel_manifest, - generate_google_sheet_manifests, -) -from .conftest import GET_ACCESS_TOKEN_MOCK, CREATE_MANIFESTS_MOCK - - -class TestGenerateExcelManifestFile: - """Tests generate_excel_manifest_file""" - - def test_success(self, test_schema_url: str) -> None: - """Test for successful result""" - with patch(GET_ACCESS_TOKEN_MOCK): - with patch(CREATE_MANIFESTS_MOCK, return_value=["path1"]): - result, status = generate_excel_manifest_file( - schema_url=test_schema_url, - dataset_id="syn2", - asset_view_id="syn1", - data_type="syn4", - add_annotations=False, - manifest_title="title", - display_label_type="class_label", - ) - assert status == 200 - assert result == "path1" - - def test_error(self, test_schema_url: str) -> None: - """Test for successful result""" - with patch(GET_ACCESS_TOKEN_MOCK): - with patch(CREATE_MANIFESTS_MOCK, side_effect=TypeError): - output = generate_excel_manifest_file( - schema_url=test_schema_url, - dataset_id="syn2", - asset_view_id="syn1", - data_type="syn4", - add_annotations=False, - manifest_title="title", - use_strict_validation=True, - display_label_type="class_label", - ) - assert isinstance(output, tuple) - result, status = output - assert status == 500 - assert isinstance(result, BasicError) - - -class TestGenerateExcelManifest: # pylint: disable=too-few-public-methods - """Tests generate_excel_manifest""" - - def test_error(self, test_schema_url: str) -> None: - """Test for successful result""" - with patch(GET_ACCESS_TOKEN_MOCK): - with patch(CREATE_MANIFESTS_MOCK, side_effect=TypeError): - output = generate_excel_manifest( - schema_url=test_schema_url, - dataset_id="syn2", - asset_view_id="syn1", - data_type="syn4", - add_annotations=False, - manifest_title="title", - display_label_type="class_label", - ) - assert isinstance(output, tuple) - result, status = output - assert status == 500 - assert isinstance(result, BasicError) - - -class TestGenerateGoogleSheetManifests: - """Tests generate_google_sheet_manifests""" - - def test_success(self, test_schema_url: str) -> None: - """Test for successful result""" - with patch(GET_ACCESS_TOKEN_MOCK): - with patch(CREATE_MANIFESTS_MOCK, return_value=["link1", "link2"]): - result, status = generate_google_sheet_manifests( - schema_url=test_schema_url, - dataset_id_array=["syn2", "syn3"], - asset_view_id="syn1", - data_type_array=["syn4", "syn5"], - add_annotations=False, - manifest_title="title", - use_strict_validation=True, - generate_all_manifests=False, - display_label_type="class_label", - ) - assert status == 200 - assert isinstance(result, GoogleSheetLinks) - assert result.links == ["link1", "link2"] - - def test_error_statuses(self) -> None: - """Test for error statuses""" - with patch(GET_ACCESS_TOKEN_MOCK): - with patch(CREATE_MANIFESTS_MOCK): - result, status = generate_google_sheet_manifests( - schema_url="not_a_url", - dataset_id_array=["syn2", "syn3"], - asset_view_id="syn1", - data_type_array=["syn4", "syn5"], - add_annotations=False, - manifest_title="title", - use_strict_validation=True, - generate_all_manifests=False, - display_label_type="class_label", - ) - assert status == 404 - assert isinstance(result, BasicError) diff --git a/apps/schematic/api/schematic_api/test/test_manifest_generation_endpoints.py b/apps/schematic/api/schematic_api/test/test_manifest_generation_endpoints.py deleted file mode 100644 index 0261e64c0..000000000 --- a/apps/schematic/api/schematic_api/test/test_manifest_generation_endpoints.py +++ /dev/null @@ -1,100 +0,0 @@ -"""Tests for manifest generation endpoint functions""" - -# pylint: disable=no-member - -from unittest.mock import patch - -from schematic_api.test import BaseTestCase - -from .conftest import ( - CREATE_MANIFESTS_MOCK, - TEST_SCHEMA_URL, -) - -GENERATE_GOOGLE_SHEET_MANIFESTS_URL = ( - f"/api/v1/generateGoogleSheetManifests?schemaUrl={TEST_SCHEMA_URL}" -) - -HEADERS = { - "Accept": "application/json", - "Authorization": "Bearer xxx", -} - - -class TestGenerateGoogleSheetManifests(BaseTestCase): - """Tests google sheet manifest endpoint""" - - def test_success(self) -> None: - """Test for successful result""" - with patch(CREATE_MANIFESTS_MOCK, return_value=["l1"]) as mock_method: # type: ignore - url = ( - f"{GENERATE_GOOGLE_SHEET_MANIFESTS_URL}" - "&datasetIdArray=syn2" - "&dataTypeArray=node_label" - "&assetViewId=syn1" - ) - response = self.client.open(url, method="GET", headers=HEADERS) - self.assert200( - response, f"Response body is : {response.data.decode('utf-8')}" - ) - result = response.json - assert isinstance(result, dict) - assert list(result.keys()) == ["links"] - assert result["links"] == ["l1"] - call_args = mock_method.call_args.kwargs - assert call_args["output_format"] == "google_sheet" - assert call_args["data_types"] == ["node_label"] - assert not call_args["title"] - assert call_args["dataset_ids"] == ["syn2"] - assert call_args["strict"] - assert not call_args["use_annotations"] - assert call_args["data_model_labels"] == "class_label" - - def test_arguments(self) -> None: - """Test for correct arguments""" - with patch(CREATE_MANIFESTS_MOCK, return_value=["l1"]) as mock_method: # type: ignore - url = ( - f"{GENERATE_GOOGLE_SHEET_MANIFESTS_URL}" - "&dataTypeArray=data_type1" - "&dataTypeArray=data_type2" - "&datasetIdArray=syn2" - "&datasetIdArray=syn3" - "&manifestTitle=title" - "&useStrictValidation=false" - "&addAnnotations=true" - "&displayLabelType=display_label" - ) - response = self.client.open(url, method="GET", headers=HEADERS) - self.assert200( - response, f"Response body is : {response.data.decode('utf-8')}" - ) - result = response.json - assert isinstance(result, dict) - assert list(result.keys()) == ["links"] - assert result["links"] == ["l1"] - call_args = mock_method.call_args.kwargs - assert call_args["output_format"] == "google_sheet" - assert call_args["data_types"] == ["data_type1", "data_type2"] - assert call_args["dataset_ids"] == ["syn2", "syn3"] - assert call_args["title"] == "title" - assert not call_args["strict"] - assert call_args["use_annotations"] - assert call_args["data_model_labels"] == "display_label" - - url = f"{GENERATE_GOOGLE_SHEET_MANIFESTS_URL}&generateAllManifests=true" - response = self.client.open(url, method="GET", headers=HEADERS) - self.assert200( - response, f"Response body is : {response.data.decode('utf-8')}" - ) - result = response.json - assert isinstance(result, dict) - assert list(result.keys()) == ["links"] - assert result["links"] == ["l1"] - call_args = mock_method.call_args.kwargs - assert call_args["output_format"] == "google_sheet" - assert call_args["data_types"] == ["all manifests"] - assert not call_args["title"] - assert not call_args["dataset_ids"] - assert call_args["strict"] - assert not call_args["use_annotations"] - assert call_args["data_model_labels"] == "class_label" diff --git a/apps/schematic/api/schematic_api/test/test_manifest_validation_controller_impl.py b/apps/schematic/api/schematic_api/test/test_manifest_validation_controller_impl.py deleted file mode 100644 index 749841651..000000000 --- a/apps/schematic/api/schematic_api/test/test_manifest_validation_controller_impl.py +++ /dev/null @@ -1,232 +0,0 @@ -"""Tests for validation endpoint functions""" - -from unittest.mock import patch - -from schematic_api.models.basic_error import BasicError -import schematic_api.controllers.manifest_validation_controller_impl -from schematic_api.models.manifest_validation_result import ManifestValidationResult -from schematic_api.controllers.manifest_validation_controller_impl import ( - submit_manifest_json, - submit_manifest_csv, - validate_manifest_csv, - validate_manifest_json, -) -from .conftest import csv_to_bytes, csv_to_json_str - - -class TestSubmitManifestCsv: - """Tests submit_manifest_csv""" - - def test_success(self, correct_manifest_path: str, test_schema_url: str) -> None: - """Test for successful result""" - with patch.object( - schematic_api.controllers.manifest_validation_controller_impl, - "submit_manifest_with_schematic", - return_value="syn1", - ): - body = csv_to_bytes(correct_manifest_path) - result, status = submit_manifest_csv( - schema_url=test_schema_url, - component="Biospecimen", - dataset_id="syn2", - asset_view_id="syn3", - body=body, - ) - - assert status == 200 - assert result == "syn1" - - def test_500(self, correct_manifest_path: str, test_schema_url: str) -> None: - """Test for 500 result""" - with patch.object( - schematic_api.controllers.manifest_validation_controller_impl, - "submit_manifest_with_schematic", - side_effect=TypeError, - ): - body = csv_to_bytes(correct_manifest_path) - result, status = submit_manifest_csv( - schema_url=test_schema_url, - component="Biospecimen", - dataset_id="syn2", - asset_view_id="syn3", - body=body, - ) - - assert status == 500 - assert isinstance(result, BasicError) - - -class TestSubmitManifestJson: - """Tests submit_manifest_""" - - def test_success(self, correct_manifest_path: str, test_schema_url: str) -> None: - """Test for successful result""" - with patch.object( - schematic_api.controllers.manifest_validation_controller_impl, - "submit_manifest_with_schematic", - return_value="syn1", - ): - body = csv_to_json_str(correct_manifest_path) - result, status = submit_manifest_json( - schema_url=test_schema_url, - component="Biospecimen", - dataset_id="syn2", - asset_view_id="syn3", - body=body, - ) - - assert status == 200 - assert result == "syn1" - - def test_500(self, correct_manifest_path: str, test_schema_url: str) -> None: - """Test for 500 result""" - with patch.object( - schematic_api.controllers.manifest_validation_controller_impl, - "submit_manifest_with_schematic", - side_effect=TypeError, - ): - body = csv_to_json_str(correct_manifest_path) - result, status = submit_manifest_json( - schema_url=test_schema_url, - component="Biospecimen", - dataset_id="syn2", - asset_view_id="syn3", - body=body, - ) - - assert status == 500 - assert isinstance(result, BasicError) - - -class TestValidateManifestCsv: - """Tests validate_manifest_csv""" - - def test_success_no_errors( - self, correct_manifest_path: str, test_schema_url: str - ) -> None: - """Test for successful result with no validation errors""" - with patch.object( - schematic_api.controllers.manifest_validation_controller_impl, - "get_access_token", - return_value=None, - ): - body = csv_to_bytes(correct_manifest_path) - result, status = validate_manifest_csv( - schema_url=test_schema_url, - component_label="Biospecimen", - body=body, - restrict_rules=False, - ) - assert status == 200 - assert isinstance(result, ManifestValidationResult) - assert result.errors == [] - assert result.warnings == [] - - def test_success_with_one_error( - self, - incorrect_manifest_path: str, - test_schema_url: str, - ) -> None: - """Test for successful result with one validation error""" - with patch.object( - schematic_api.controllers.manifest_validation_controller_impl, - "get_access_token", - return_value=None, - ): - body = csv_to_bytes(incorrect_manifest_path) - result, status = validate_manifest_csv( - schema_url=test_schema_url, - component_label="Biospecimen", - body=body, - restrict_rules=False, - ) - assert status == 200 - assert isinstance(result, ManifestValidationResult) - assert isinstance(result.errors, list) - assert len(result.errors) > 0 - assert result.warnings == [] - - def test_500(self, correct_manifest_path: str, test_schema_url: str) -> None: - """Test for 500 result""" - with patch.object( - schematic_api.controllers.manifest_validation_controller_impl, - "save_manifest_csv_string_as_csv", - side_effect=TypeError, - ): - body = csv_to_bytes(correct_manifest_path) - result, status = validate_manifest_csv( - schema_url=test_schema_url, - component_label="Biospecimen", - body=body, - restrict_rules=False, - ) - - assert status == 500 - assert isinstance(result, BasicError) - - -class TestValidateManifestJson: - """Tests validate_manifest_json""" - - def test_success_no_errors( - self, correct_manifest_path: str, test_schema_url: str - ) -> None: - """Test for successful result with no validation errors""" - with patch.object( - schematic_api.controllers.manifest_validation_controller_impl, - "get_access_token", - return_value=None, - ): - body = csv_to_json_str(correct_manifest_path) - result, status = validate_manifest_json( - schema_url=test_schema_url, - component_label="Biospecimen", - body=body, - restrict_rules=False, - ) - assert status == 200 - assert isinstance(result, ManifestValidationResult) - assert result.errors == [] - assert result.warnings == [] - - def test_success_one_error( - self, - incorrect_manifest_path: str, - test_schema_url: str, - ) -> None: - """Test for successful result with one validation error""" - with patch.object( - schematic_api.controllers.manifest_validation_controller_impl, - "get_access_token", - return_value=None, - ): - body = csv_to_json_str(incorrect_manifest_path) - result, status = validate_manifest_json( - schema_url=test_schema_url, - component_label="Biospecimen", - body=body, - restrict_rules=False, - ) - assert status == 200 - assert isinstance(result, ManifestValidationResult) - assert isinstance(result.errors, list) - assert len(result.errors) > 0 - assert result.warnings == [] - - def test_500(self, correct_manifest_path: str, test_schema_url: str) -> None: - """Test for 500 result""" - with patch.object( - schematic_api.controllers.manifest_validation_controller_impl, - "save_manifest_json_string_as_csv", - side_effect=TypeError, - ): - body = csv_to_json_str(correct_manifest_path) - result, status = validate_manifest_json( - schema_url=test_schema_url, - component_label="Biospecimen", - body=body, - restrict_rules=False, - ) - - assert status == 500 - assert isinstance(result, BasicError) diff --git a/apps/schematic/api/schematic_api/test/test_manifest_validation_endpoints.py b/apps/schematic/api/schematic_api/test/test_manifest_validation_endpoints.py deleted file mode 100644 index 580ed0098..000000000 --- a/apps/schematic/api/schematic_api/test/test_manifest_validation_endpoints.py +++ /dev/null @@ -1,283 +0,0 @@ -"""Tests for validation endpoints""" - -from unittest.mock import patch - -from schematic_api.test import BaseTestCase -import schematic_api.controllers.manifest_validation_controller_impl -from .conftest import ( - TEST_SCHEMA_URL, - CORRECT_MANIFEST_PATH, - csv_to_bytes, - csv_to_json_str, -) - -TEMP_MANIFEST_PATH = "/tmp/manifest.csv" -HEADERS = { - "Accept": "application/json", - "Authorization": "Bearer xxx", -} - -SUBMIT_MANIFEST_CSV_URL = ( - f"/api/v1/submitManifestCsv?schemaUrl={TEST_SCHEMA_URL}" - "&component=component&datasetId=syn1&assetViewId=syn2" -) -SUBMIT_MANIFEST_JSON_URL = ( - f"/api/v1/submitManifestJson?schemaUrl={TEST_SCHEMA_URL}" - "&component=component&datasetId=syn1&assetViewId=syn2" -) -VALIDATE_MANIFEST_CSV_URL = "/api/v1/validateManifestCsv" -VALIDATE_MANIFEST_JSON_URL = "/api/v1/validateManifestJson" - - -class TestSubmitManifestCsv(BaseTestCase): - """Tests for submit manifest csv endpoint""" - - def test_success(self) -> None: - """Test for successful result""" - with patch.object( - schematic_api.controllers.manifest_validation_controller_impl, - "submit_manifest_with_schematic", - return_value="syn1", - ): - body = csv_to_bytes(CORRECT_MANIFEST_PATH) - response = self.client.open( - SUBMIT_MANIFEST_CSV_URL, method="POST", headers=HEADERS, data=body - ) - self.assert200( - response, f"Response body is : {response.data.decode('utf-8')}" - ) - assert response.json == "syn1" - - def test_argument_defaults(self) -> None: - """Test optional arguments""" - with patch.object( - schematic_api.controllers.manifest_validation_controller_impl, - "submit_manifest_with_schematic", - return_value="syn1", - ) as mock_function: - body = csv_to_bytes(CORRECT_MANIFEST_PATH) - - self.client.open( - SUBMIT_MANIFEST_CSV_URL, method="POST", headers=HEADERS, data=body - ) - arguments_dict = mock_function.call_args[1] - assert not arguments_dict["restrict_rules"] - assert not arguments_dict["hide_blanks"] - assert arguments_dict["display_label_type"] == "class_label" - assert arguments_dict["storage_method"] == "table_file_and_entities" - assert arguments_dict["table_manipulation_method"] == "replace" - - def test_arguments(self) -> None: - """Test optional arguments""" - with patch.object( - schematic_api.controllers.manifest_validation_controller_impl, - "submit_manifest_with_schematic", - return_value="syn1", - ) as mock_function: - body = csv_to_bytes(CORRECT_MANIFEST_PATH) - url = ( - f"{SUBMIT_MANIFEST_CSV_URL}" - "&restrictRules=true&" - "hideBlanks=true" - "&displayLabelType=display_label" - "&storageMethod=file_only" - "&tableManipulationMethod=upsert" - ) - self.client.open(url, method="POST", headers=HEADERS, data=body) - arguments_dict = mock_function.call_args[1] - assert arguments_dict["restrict_rules"] - assert arguments_dict["hide_blanks"] - assert arguments_dict["display_label_type"] == "display_label" - assert arguments_dict["storage_method"] == "file_only" - assert arguments_dict["table_manipulation_method"] == "upsert" - - def test_500(self) -> None: - """Test for successful result""" - with patch.object( - schematic_api.controllers.manifest_validation_controller_impl, - "submit_manifest_with_schematic", - side_effect=TypeError, - ): - body = csv_to_bytes(CORRECT_MANIFEST_PATH) - response = self.client.open( - SUBMIT_MANIFEST_CSV_URL, method="POST", headers=HEADERS, data=body - ) - self.assert500( - response, f"Response body is : {response.data.decode('utf-8')}" - ) - - -class TestSubmitManifestJson(BaseTestCase): - """Tests for submit manifest json endpoint""" - - def test_success(self) -> None: - """Test for successful result""" - with patch.object( - schematic_api.controllers.manifest_validation_controller_impl, - "submit_manifest_with_schematic", - return_value="syn1", - ): - body = csv_to_json_str(CORRECT_MANIFEST_PATH) - response = self.client.open( - SUBMIT_MANIFEST_JSON_URL, method="POST", headers=HEADERS, data=body - ) - self.assert200( - response, f"Response body is : {response.data.decode('utf-8')}" - ) - assert response.json == "syn1" - - def test_argument_defaults(self) -> None: - """Test optional arguments""" - with patch.object( - schematic_api.controllers.manifest_validation_controller_impl, - "submit_manifest_with_schematic", - return_value="syn1", - ) as mock_function: - body = csv_to_json_str(CORRECT_MANIFEST_PATH) - - self.client.open( - SUBMIT_MANIFEST_JSON_URL, method="POST", headers=HEADERS, data=body - ) - arguments_dict = mock_function.call_args[1] - assert not arguments_dict["restrict_rules"] - assert not arguments_dict["hide_blanks"] - assert arguments_dict["display_label_type"] == "class_label" - assert arguments_dict["storage_method"] == "table_file_and_entities" - assert arguments_dict["table_manipulation_method"] == "replace" - - def test_arguments(self) -> None: - """Test optional arguments""" - with patch.object( - schematic_api.controllers.manifest_validation_controller_impl, - "submit_manifest_with_schematic", - return_value="syn1", - ) as mock_function: - body = csv_to_json_str(CORRECT_MANIFEST_PATH) - url = ( - f"{SUBMIT_MANIFEST_JSON_URL}" - "&restrictRules=true" - "&hideBlanks=true" - "&displayLabelType=display_label" - "&storageMethod=file_only" - "&tableManipulationMethod=upsert" - ) - self.client.open(url, method="POST", headers=HEADERS, data=body) - arguments_dict = mock_function.call_args[1] - - assert arguments_dict["restrict_rules"] - assert arguments_dict["hide_blanks"] - assert arguments_dict["display_label_type"] == "display_label" - assert arguments_dict["storage_method"] == "file_only" - assert arguments_dict["table_manipulation_method"] == "upsert" - - def test_500(self) -> None: - """Test for successful result""" - with patch.object( - schematic_api.controllers.manifest_validation_controller_impl, - "submit_manifest_with_schematic", - side_effect=TypeError, - ): - body = csv_to_json_str(CORRECT_MANIFEST_PATH) - response = self.client.open( - SUBMIT_MANIFEST_JSON_URL, method="POST", headers=HEADERS, data=body - ) - self.assert500( - response, f"Response body is : {response.data.decode('utf-8')}" - ) - - -class TestValidateManifestCsv(BaseTestCase): - """Tests for validate manifest csv endpoint""" - - def test_success(self) -> None: - """Test for successful result""" - body = csv_to_bytes(CORRECT_MANIFEST_PATH) - url = f"{VALIDATE_MANIFEST_CSV_URL}?schemaUrl={TEST_SCHEMA_URL}&componentLabel=Biospecimen" - response = self.client.open(url, method="POST", headers=HEADERS, data=body) - self.assert200(response, f"Response body is : {response.data.decode('utf-8')}") - assert response.json["errors"] == [] - assert response.json["warnings"] == [] - - def test_arguments(self) -> None: - """Test for the restrict rules argument""" - with patch.object( - schematic_api.controllers.manifest_validation_controller_impl, - "validate_manifest_with_schematic", - return_value=([], []), - ) as mock_function: - body = csv_to_bytes(CORRECT_MANIFEST_PATH) - - url1 = f"{VALIDATE_MANIFEST_CSV_URL}?schemaUrl=x&componentLabel=x&restrictRules=true" - self.client.open(url1, method="POST", headers=HEADERS, data=body) - mock_function.assert_called_with( - TEMP_MANIFEST_PATH, "x", "x", True, display_label_type="class_label" - ) - - url2 = f"{VALIDATE_MANIFEST_CSV_URL}?schemaUrl=x2&componentLabel=x2&restrictRules=false" - self.client.open(url2, method="POST", headers=HEADERS, data=body) - mock_function.assert_called_with( - TEMP_MANIFEST_PATH, "x2", "x2", False, display_label_type="class_label" - ) - - def test_500(self) -> None: - """Test for 500 result""" - with patch.object( - schematic_api.controllers.manifest_validation_controller_impl, - "save_manifest_csv_string_as_csv", - side_effect=TypeError, - ): - url = f"{VALIDATE_MANIFEST_CSV_URL}?schemaUrl=xxx&componentLabel=xxx" - response = self.client.open(url, method="POST", headers=HEADERS) - self.assert500( - response, f"Response body is : {response.data.decode('utf-8')}" - ) - - -class TestValidateManifestJson(BaseTestCase): - """Tests for validate manifest json endpoint""" - - def test_success(self) -> None: - """Test for successful result""" - body = csv_to_json_str(CORRECT_MANIFEST_PATH) - url = f"{VALIDATE_MANIFEST_JSON_URL}?schemaUrl={TEST_SCHEMA_URL}&componentLabel=Biospecimen" - response = self.client.open(url, method="POST", headers=HEADERS, data=body) - self.assert200(response, f"Response body is : {response.data.decode('utf-8')}") - assert response.json["errors"] == [] - assert response.json["warnings"] == [] - - def test_restrict_rules(self) -> None: - """Test for the restrict rules argument""" - with patch.object( - schematic_api.controllers.manifest_validation_controller_impl, - "validate_manifest_with_schematic", - return_value=([], []), - ) as mock_function: - body = csv_to_json_str(CORRECT_MANIFEST_PATH) - - url1 = f"{VALIDATE_MANIFEST_JSON_URL}?schemaUrl=x&componentLabel=x&restrictRules=true" - self.client.open(url1, method="POST", headers=HEADERS, data=body) - mock_function.assert_called_with( - TEMP_MANIFEST_PATH, "x", "x", True, display_label_type="class_label" - ) - - url2 = ( - f"{VALIDATE_MANIFEST_JSON_URL}" - "?schemaUrl=x2&componentLabel=x2&restrictRules=false" - ) - self.client.open(url2, method="POST", headers=HEADERS, data=body) - mock_function.assert_called_with( - TEMP_MANIFEST_PATH, "x2", "x2", False, display_label_type="class_label" - ) - - def test_500(self) -> None: - """Test for 500 result""" - with patch.object( - schematic_api.controllers.manifest_validation_controller_impl, - "save_manifest_json_string_as_csv", - side_effect=TypeError, - ): - url = f"{VALIDATE_MANIFEST_JSON_URL}?schemaUrl=xxx&componentLabel=xxx" - response = self.client.open(url, method="POST", headers=HEADERS) - self.assert500( - response, f"Response body is : {response.data.decode('utf-8')}" - ) diff --git a/apps/schematic/api/schematic_api/test/test_paging.py b/apps/schematic/api/schematic_api/test/test_paging.py deleted file mode 100644 index b9d70847a..000000000 --- a/apps/schematic/api/schematic_api/test/test_paging.py +++ /dev/null @@ -1,108 +0,0 @@ -"""Testing for pagination utilities""" - -import pytest - -from schematic_api.controllers.paging import ( - get_item_slice, - get_page_amount, - get_page_indeces, - Page, -) - - -class TestPage: - """Tests for Page class""" - - def test_page1(self) -> None: - """Tests for Page class""" - items = list(range(1, 22)) - page = Page(items, page_max_items=5, page_number=1) - assert page.page_number == 1 - assert page.page_max_items == 5 - assert page.total_items == 21 - assert page.total_pages == 5 - assert page.has_next - assert not page.has_previous - assert page.items == [1, 2, 3, 4, 5] - - def test_page2(self) -> None: - """Tests for Page class""" - items = list(range(1, 22)) - page = Page(items, page_max_items=5, page_number=2) - assert page.page_number == 2 - assert page.page_max_items == 5 - assert page.total_items == 21 - assert page.total_pages == 5 - assert page.has_next - assert page.has_previous - assert page.items == [6, 7, 8, 9, 10] - - def test_page3(self) -> None: - """Tests for Page class""" - items = list(range(1, 22)) - page = Page(items, page_max_items=5, page_number=5) - assert page.page_number == 5 - assert page.page_max_items == 5 - assert page.total_items == 21 - assert page.total_pages == 5 - assert not page.has_next - assert page.has_previous - assert page.items == [21] - - -class TestPagingUtils: - """Tests for various paging utils""" - - def test_get_page_amount(self) -> None: - """Tests for get_page_amount""" - assert get_page_amount(1, 1) == 1 - assert get_page_amount(2, 1) == 2 - assert get_page_amount(11, 10) == 2 - assert get_page_amount(0, 1) == 0 - with pytest.raises( - ValueError, match="('total_items must be 0 or greater: ', -1)" - ): - get_page_amount(-1, 1) - with pytest.raises( - ValueError, match="('page_max_items must be 1 or greater: ', 0)" - ): - get_page_amount(0, 0) - - def test_get_item_slice(self) -> None: - """Tests for get_item_slice""" - lst = list(range(1, 8)) - assert get_item_slice(items=lst, page_max_items=1, page_number=1) == [1] - assert get_item_slice(items=lst, page_max_items=1, page_number=2) == [2] - assert get_item_slice(items=lst, page_max_items=1, page_number=7) == [7] - assert get_item_slice(items=lst, page_max_items=1, page_number=8) == [] - assert get_item_slice(items=lst, page_max_items=1, page_number=2) == [2] - assert get_item_slice(items=lst, page_max_items=3, page_number=1) == [1, 2, 3] - assert get_item_slice(items=lst, page_max_items=3, page_number=2) == [4, 5, 6] - assert get_item_slice(items=lst, page_max_items=3, page_number=3) == [7] - - def test_get_page_indeces(self) -> None: - """Tests for get_page_indeces""" - assert get_page_indeces(total_items=21, page_max_items=10, page_number=1) == ( - 0, - 10, - ) - assert get_page_indeces(total_items=21, page_max_items=10, page_number=2) == ( - 10, - 20, - ) - assert get_page_indeces(total_items=21, page_max_items=10, page_number=3) == ( - 20, - 21, - ) - with pytest.raises( - ValueError, match="('total_items must be 0 or greater: ', -1)" - ): - get_page_indeces(-1, 1, 1) - with pytest.raises( - ValueError, match="('page_max_items must be 1 or greater: ', 0)" - ): - get_page_indeces(0, 0, 1) - with pytest.raises( - ValueError, match="('page_number must be 1 or greater: ', 0)" - ): - get_page_indeces(0, 1, 0) diff --git a/apps/schematic/api/schematic_api/test/test_schema_controller_endpoints.py b/apps/schematic/api/schematic_api/test/test_schema_controller_endpoints.py deleted file mode 100644 index 02fed6585..000000000 --- a/apps/schematic/api/schematic_api/test/test_schema_controller_endpoints.py +++ /dev/null @@ -1,332 +0,0 @@ -"""Tests for schema endpoints""" - -# pylint: disable=duplicate-code -import unittest - -from schematic_api.test import BaseTestCase -from .conftest import TEST_SCHEMA_URL, PAGING_KEYS - -HEADERS = { - "Accept": "application/json", - "Authorization": "Bearer xxx", -} - -COMPONENT_URL = "/api/v1/components/Patient/?schemaUrl=" -COMPONENT_REQUIREMENTS_ARRAY_URL = ( - "/api/v1/components/Biospecimen/requirementsArray?schemaUrl=" -) -COMPONENT_REQUIREMENTS_GRAPH_URL = ( - "/api/v1/components/Biospecimen/requirementsGraph?schemaUrl=" -) -CONNECTED_NODE_PAIR_ARRAY_URL = "/api/v1/connectedNodePairArray?schemaUrl=" -CONNECTED_NODE_PAIR_PAGE_URL = "/api/v1/connectedNodePairPage?schemaUrl=" -NODE_IS_REQUIRED_URL = "/api/v1/nodes/FamilyHistory/isRequired?schemaUrl=" -PROPERTY_LABEL_URL = "/api/v1/nodes/node_label/propertyLabel?schemaUrl=" -SCHEMA_ATTRIBUTES_URL = "/api/v1/schemaAttributes?schemaUrl=" -NODE_PROPERTIES_URL = "/api/v1/nodes/Patient/nodeProperties?schemaUrl=" -NODE_VALIDATION_RULES_URL = "/api/v1/nodes/CheckRegexList/validationRules?schemaUrl=" -NODE_DEPENDENCY_ARRAY_URL = "/api/v1/nodes/Patient/dependencyArray?schemaUrl=" -NODE_DEPENDENCY_PAGE_URL = "/api/v1/nodes/Patient/dependencyPage?schemaUrl=" - - -class TestGetComponent(BaseTestCase): - """Test case for component endpoint""" - - def test_success(self) -> None: - """Test for successful result""" - url = f"{COMPONENT_URL}{TEST_SCHEMA_URL}" - response = self.client.open(url, method="GET", headers=HEADERS) - self.assert200(response, f"Response body is : {response.data.decode('utf-8')}") - assert isinstance(response.json, str) - - def test_parameters(self) -> None: - """Test for successful result""" - url = f"{COMPONENT_URL}{TEST_SCHEMA_URL}&includeIndex=True" - response = self.client.open(url, method="GET", headers=HEADERS) - self.assert200(response, f"Response body is : {response.data.decode('utf-8')}") - assert isinstance(response.json, str) - - def test_404(self) -> None: - """Test for 404 result""" - url = f"{COMPONENT_URL}not_a_url" - response = self.client.open(url, method="GET", headers=HEADERS) - self.assert404(response, f"Response body is : {response.data.decode('utf-8')}") - - -class TestGetComponentRequirementsArray(BaseTestCase): - """Test case for component requirements array endpoint""" - - def test_success(self) -> None: - """Test for successful result""" - url = f"{COMPONENT_REQUIREMENTS_ARRAY_URL}{TEST_SCHEMA_URL}" - response = self.client.open(url, method="GET", headers=HEADERS) - self.assert200(response, f"Response body is : {response.data.decode('utf-8')}") - assert isinstance(response.json, list) - - def test_500(self) -> None: - """Test for 500 result""" - url = f"{COMPONENT_REQUIREMENTS_ARRAY_URL}not_a_url" - response = self.client.open(url, method="GET", headers=HEADERS) - self.assert500(response, f"Response body is : {response.data.decode('utf-8')}") - - -class TestGetComponentRequirementsGraph(BaseTestCase): - """Test case for component requirements graph endpoint""" - - def test_success(self) -> None: - """Test for successful result""" - url = f"{COMPONENT_REQUIREMENTS_GRAPH_URL}{TEST_SCHEMA_URL}" - response = self.client.open(url, method="GET", headers=HEADERS) - self.assert200(response, f"Response body is : {response.data.decode('utf-8')}") - assert isinstance(response.json, list) - - def test_500(self) -> None: - """Test for 500 result""" - url = f"{COMPONENT_REQUIREMENTS_GRAPH_URL}not_a_url" - response = self.client.open(url, method="GET", headers=HEADERS) - self.assert500(response, f"Response body is : {response.data.decode('utf-8')}") - - -class TestGetConnectedNodePairArray(BaseTestCase): - """Tests for connected node pair array endpoint""" - - def test_success(self) -> None: - """Test for successful result""" - url = ( - f"{CONNECTED_NODE_PAIR_ARRAY_URL}{TEST_SCHEMA_URL}" - "&relationshipType=requiresDependency" - ) - response = self.client.open(url, method="GET", headers=HEADERS) - self.assert200(response, f"Response body is : {response.data.decode('utf-8')}") - result = response.json - assert isinstance(result, dict) - assert isinstance(result["connectedNodes"], list) - for item in result["connectedNodes"]: - assert isinstance(item, dict) - assert isinstance(item["node1"], str) - assert isinstance(item["node2"], str) - - def test_500(self) -> None: - """Test for 500 result""" - url = f"{CONNECTED_NODE_PAIR_ARRAY_URL}not_a_url&relationshipType=requiresDependency" - response = self.client.open(url, method="GET", headers=HEADERS) - self.assert500(response, f"Response body is : {response.data.decode('utf-8')}") - - -class TestGetConnectedNodePairPage(BaseTestCase): - """Tests for connected node pair page endpoint""" - - def test_success(self) -> None: - """Test for successful result""" - url = f"{CONNECTED_NODE_PAIR_PAGE_URL}{TEST_SCHEMA_URL}&relationshipType=requiresDependency" - response = self.client.open(url, method="GET", headers=HEADERS) - self.assert200(response, f"Response body is : {response.data.decode('utf-8')}") - result = response.json - assert isinstance(result, dict) - assert list(result.keys()) == sorted(PAGING_KEYS + ["connectedNodes"]) - assert result["number"] == 1 - assert result["size"] == 100000 - assert not result["hasNext"] - assert not result["hasPrevious"] - assert result["totalPages"] == 1 - assert isinstance(result["totalElements"], int) - assert isinstance(result["connectedNodes"], list) - for item in result["connectedNodes"]: - assert isinstance(item, dict) - assert isinstance(item["node1"], str) - assert isinstance(item["node2"], str) - - def test_500(self) -> None: - """Test for 500 result""" - url = f"{CONNECTED_NODE_PAIR_PAGE_URL}not_a_url&relationshipType=requiresDependency" - response = self.client.open(url, method="GET", headers=HEADERS) - self.assert500(response, f"Response body is : {response.data.decode('utf-8')}") - - -class TestGetNodeIsRequired(BaseTestCase): - """Test case for node is required endpoint""" - - def test_success(self) -> None: - """Test for successful result""" - url = f"{NODE_IS_REQUIRED_URL}{TEST_SCHEMA_URL}" - response = self.client.open(url, method="GET", headers=HEADERS) - self.assert200(response, f"Response body is : {response.data.decode('utf-8')}") - assert response.json - - def test_500(self) -> None: - """Test for 500 result""" - url = f"{NODE_IS_REQUIRED_URL}not_a_url" - response = self.client.open(url, method="GET", headers=HEADERS) - self.assert500(response, f"Response body is : {response.data.decode('utf-8')}") - - -class TestGetPropertyLabel(BaseTestCase): - """Test case for property label endpoint""" - - def test_success(self) -> None: - """Test for successful result""" - url = f"{PROPERTY_LABEL_URL}{TEST_SCHEMA_URL}" - response = self.client.open(url, method="GET", headers=HEADERS) - self.assert200(response, f"Response body is : {response.data.decode('utf-8')}") - assert response.json == "nodeLabel" - - -class TestGetSchemaAttributes(BaseTestCase): - """Test case for schema attributes endpoint""" - - def test_success(self) -> None: - """Test for successful result""" - url = f"{SCHEMA_ATTRIBUTES_URL}{TEST_SCHEMA_URL}" - response = self.client.open(url, method="GET", headers=HEADERS) - self.assert200(response, f"Response body is : {response.data.decode('utf-8')}") - assert isinstance(response.json, str) - - def test_404(self) -> None: - """Test for 404 result""" - url = f"{SCHEMA_ATTRIBUTES_URL}not_a_url" - response = self.client.open(url, method="GET", headers=HEADERS) - self.assert404(response, f"Response body is : {response.data.decode('utf-8')}") - - -class TestGetNodeProperties(BaseTestCase): - """Test case for node attributes endpoint""" - - def test_success(self) -> None: - """Test for successful result""" - url = f"{NODE_PROPERTIES_URL}{TEST_SCHEMA_URL}" - response = self.client.open(url, method="GET", headers=HEADERS) - self.assert200(response, f"Response body is : {response.data.decode('utf-8')}") - result = response.json - assert isinstance(result, dict) - assert list(result.keys()) == ["node_properties"] - for item in result["node_properties"]: - assert isinstance(item, str) - - def test_500(self) -> None: - """Test for 500 result""" - url = f"{NODE_PROPERTIES_URL}not_a_url" - response = self.client.open(url, method="GET", headers=HEADERS) - self.assert500(response, f"Response body is : {response.data.decode('utf-8')}") - - -class TestGetNodeValidationRules(BaseTestCase): - """Test case for node validation rules endpoint""" - - def test_success(self) -> None: - """Test for successful result""" - url = f"{NODE_VALIDATION_RULES_URL}{TEST_SCHEMA_URL}" - response = self.client.open(url, method="GET", headers=HEADERS) - result = response.json - assert isinstance(result, dict) - assert list(result.keys()) == ["validation_rules"] - assert isinstance(result["validation_rules"], list) - for item in result["validation_rules"]: - assert isinstance(item, dict) - assert list(item.keys()) == ["name"] - assert isinstance(item["name"], str) - - def test_500(self) -> None: - """Test for 500 result""" - url = f"{NODE_VALIDATION_RULES_URL}not_a_url" - response = self.client.open(url, method="GET", headers=HEADERS) - self.assert500(response, f"Response body is : {response.data.decode('utf-8')}") - - -class TestGetNodeDependencyArray(BaseTestCase): - """Test case for node depencencies endpoint""" - - def test_success(self) -> None: - """Test for successful result""" - url = f"{NODE_DEPENDENCY_ARRAY_URL}{TEST_SCHEMA_URL}" - response = self.client.open(url, method="GET", headers=HEADERS) - self.assert200(response, f"Response body is : {response.data.decode('utf-8')}") - result = response.json - assert isinstance(result, dict) - assert list(result.keys()) == ["nodes"] - assert isinstance(result["nodes"], list) - for item in result["nodes"]: - assert isinstance(item, dict) - assert list(item.keys()) == ["name"] - assert isinstance(item["name"], str) - - def test_return_display_names(self) -> None: - """Test for returnDisplayNames parameter""" - url = f"{NODE_DEPENDENCY_ARRAY_URL}{TEST_SCHEMA_URL}&returnDisplayNames=true" - response = self.client.open(url, method="GET", headers=HEADERS) - self.assert200(response, f"Response body is : {response.data.decode('utf-8')}") - - url = f"{NODE_DEPENDENCY_ARRAY_URL}{TEST_SCHEMA_URL}&returnDisplayNames=false" - response = self.client.open(url, method="GET", headers=HEADERS) - self.assert200(response, f"Response body is : {response.data.decode('utf-8')}") - - def test_return_ordered_by_schema(self) -> None: - """Test for returnOrderedBySchema parameter""" - url = f"{NODE_DEPENDENCY_ARRAY_URL}{TEST_SCHEMA_URL}&returnOrderedBySchema=true" - response = self.client.open(url, method="GET", headers=HEADERS) - self.assert200(response, f"Response body is : {response.data.decode('utf-8')}") - - url = ( - f"{NODE_DEPENDENCY_ARRAY_URL}{TEST_SCHEMA_URL}&returnOrderedBySchema=false" - ) - response = self.client.open(url, method="GET", headers=HEADERS) - self.assert200(response, f"Response body is : {response.data.decode('utf-8')}") - - def test_500(self) -> None: - """Test for 500 result""" - url = f"{NODE_DEPENDENCY_ARRAY_URL}not_a_url" - response = self.client.open(url, method="GET", headers=HEADERS) - self.assert500(response, f"Response body is : {response.data.decode('utf-8')}") - - -class TestGetNodeDependencyPage(BaseTestCase): - """Test case for node depencencies endpoint""" - - def test_success(self) -> None: - """Test for successful result""" - url = f"{NODE_DEPENDENCY_PAGE_URL}{TEST_SCHEMA_URL}" - response = self.client.open(url, method="GET", headers=HEADERS) - self.assert200(response, f"Response body is : {response.data.decode('utf-8')}") - result = response.json - assert isinstance(result, dict) - assert list(result.keys()) == sorted(PAGING_KEYS + ["nodes"]) - assert result["number"] == 1 - assert result["size"] == 100000 - assert not result["hasNext"] - assert not result["hasPrevious"] - assert result["totalPages"] == 1 - assert isinstance(result["totalElements"], int) - assert isinstance(result["nodes"], list) - for item in result["nodes"]: - assert isinstance(item, dict) - assert list(item.keys()) == ["name"] - assert isinstance(item["name"], str) - - def test_return_display_names(self) -> None: - """Test for returnDisplayNames parameter""" - url = f"{NODE_DEPENDENCY_PAGE_URL}{TEST_SCHEMA_URL}&returnDisplayNames=true" - response = self.client.open(url, method="GET", headers=HEADERS) - self.assert200(response, f"Response body is : {response.data.decode('utf-8')}") - - url = f"{NODE_DEPENDENCY_PAGE_URL}{TEST_SCHEMA_URL}&returnDisplayNames=false" - response = self.client.open(url, method="GET", headers=HEADERS) - self.assert200(response, f"Response body is : {response.data.decode('utf-8')}") - - def test_return_ordered_by_schema(self) -> None: - """Test for returnOrderedBySchema parameter""" - url = f"{NODE_DEPENDENCY_PAGE_URL}{TEST_SCHEMA_URL}&returnOrderedBySchema=true" - response = self.client.open(url, method="GET", headers=HEADERS) - self.assert200(response, f"Response body is : {response.data.decode('utf-8')}") - - url = f"{NODE_DEPENDENCY_PAGE_URL}{TEST_SCHEMA_URL}&returnOrderedBySchema=false" - response = self.client.open(url, method="GET", headers=HEADERS) - self.assert200(response, f"Response body is : {response.data.decode('utf-8')}") - - def test_500(self) -> None: - """Test for 500 result""" - url = f"{NODE_DEPENDENCY_PAGE_URL}not_a_url" - response = self.client.open(url, method="GET", headers=HEADERS) - self.assert500(response, f"Response body is : {response.data.decode('utf-8')}") - - -if __name__ == "__main__": - unittest.main() diff --git a/apps/schematic/api/schematic_api/test/test_schema_controller_impl.py b/apps/schematic/api/schematic_api/test/test_schema_controller_impl.py deleted file mode 100644 index 757f4deb3..000000000 --- a/apps/schematic/api/schematic_api/test/test_schema_controller_impl.py +++ /dev/null @@ -1,325 +0,0 @@ -"""Tests for schema endpoint functions""" - -# pylint: disable=duplicate-code - - -from schematic_api.models.basic_error import BasicError -from schematic_api.models.component_requirement_subgraph import ( - ComponentRequirementSubgraph, -) -from schematic_api.models.node_property_array import NodePropertyArray -from schematic_api.models.validation_rule import ValidationRule -from schematic_api.models.validation_rule_array import ValidationRuleArray -from schematic_api.models.node import Node -from schematic_api.models.node_array import NodeArray -from schematic_api.models.node_page import NodePage -from schematic_api.models.connected_node_pair_page import ConnectedNodePairPage -from schematic_api.models.connected_node_pair_array import ConnectedNodePairArray -from schematic_api.models.connected_node_pair import ConnectedNodePair -from schematic_api.controllers.schema_controller_impl import ( - get_component, - get_component_requirements_array, - get_component_requirements_graph, - get_connected_node_pair_page, - get_connected_node_pair_array, - get_node_is_required, - get_property_label, - get_schema_attributes, - get_node_properties, - get_node_validation_rules, - get_node_dependency_array, - get_node_dependency_page, -) - - -class TestGetComponent: - """Tests get_component""" - - def test_success(self, test_schema_url: str) -> None: - """Test for successful result""" - result, status = get_component( - component_label="Patient", schema_url=test_schema_url - ) - assert status == 200 - assert isinstance(result, str) - - def test_internal_error(self, test_schema_url: str) -> None: - """Test for 500 result""" - result, status = get_component( - component_label="not_a_component", schema_url=test_schema_url - ) - assert status == 500 - assert isinstance(result, BasicError) - - -class TestGetComponentRequirementsArray: - """Tests get_component_requirements_array""" - - def test_success(self, test_schema_url: str) -> None: - """Test for successful result""" - result, status = get_component_requirements_array( - component_label="Biospecimen", - schema_url=test_schema_url, - display_label_type="class_label", - ) - assert status == 200 - assert isinstance(result, list) - for req in result: - assert isinstance(req, str) - - def test_internal_error(self, test_schema_url: str) -> None: - """Test for 500 result""" - result, status = get_component_requirements_array( - component_label="not_a_component", - schema_url=test_schema_url, - display_label_type="class_label", - ) - assert status == 500 - assert isinstance(result, BasicError) - - -class TestGetComponentRequirementsGraph: - """Tests get_component_requirements_graph""" - - def test_success(self, test_schema_url: str) -> None: - """Test for successful result""" - result, status = get_component_requirements_graph( - component_label="Biospecimen", - schema_url=test_schema_url, - display_label_type="class_label", - ) - assert status == 200 - assert isinstance(result, list) - for subgraph in result: - assert isinstance(subgraph, ComponentRequirementSubgraph) - assert isinstance(subgraph.component1, str) - assert isinstance(subgraph.component2, str) - - def test_internal_error(self, test_schema_url: str) -> None: - """Test for 500 result""" - result, status = get_component_requirements_graph( - component_label="not_a_component", - schema_url=test_schema_url, - display_label_type="class_label", - ) - assert status == 500 - assert isinstance(result, BasicError) - - -class TestGetConnectedNodePairArray: - """Tests get_connected_node_pair_array""" - - def test_success(self, test_schema_url: str) -> None: - """Test for successful result""" - result, status = get_connected_node_pair_array( - schema_url=test_schema_url, - relationship_type="requiresDependency", - ) - assert status == 200 - assert isinstance(result, ConnectedNodePairArray) - assert isinstance(result.connected_nodes, list) - for item in result.connected_nodes: - assert isinstance(item, ConnectedNodePair) - assert isinstance(item.node1, str) - assert isinstance(item.node2, str) - - def test_internal_error(self) -> None: - """Test for 500 result""" - result, status = get_connected_node_pair_array( - schema_url="not_a_url", - relationship_type="requiresDependency", - ) - assert status == 500 - assert isinstance(result, BasicError) - - -class TestGetConnectedNodePairPage: - """Tests get_connected_node_pair_page""" - - def test_success(self, test_schema_url: str) -> None: - """Test for successful result""" - result, status = get_connected_node_pair_page( - schema_url=test_schema_url, - relationship_type="requiresDependency", - ) - assert status == 200 - assert isinstance(result, ConnectedNodePairPage) - assert result.number == 1 - assert result.size == 100000 - assert isinstance(result.total_elements, int) - assert isinstance(result.total_pages, int) - assert isinstance(result.has_next, bool) - assert isinstance(result.has_previous, bool) - assert isinstance(result.connected_nodes, list) - for item in result.connected_nodes: - assert isinstance(item, ConnectedNodePair) - assert isinstance(item.node1, str) - assert isinstance(item.node2, str) - - def test_internal_error(self) -> None: - """Test for 500 result""" - result, status = get_connected_node_pair_page( - schema_url="not_a_url", - relationship_type="requiresDependency", - ) - assert status == 500 - assert isinstance(result, BasicError) - - -class TestGetNodeIsRequired: - """Test case for get_node_is_required""" - - def test_success(self, test_schema_url: str) -> None: - """Test for successful result""" - result, status = get_node_is_required( - node_display="FamilyHistory", - schema_url=test_schema_url, - ) - assert status == 200 - assert result - - def test_internal_error(self) -> None: - """Test for 500 result""" - result, status = get_node_is_required( - node_display="name", - schema_url="not_a_url", - ) - assert status == 500 - assert isinstance(result, BasicError) - - -def test_get_property_label() -> None: - """Test for get_property_label""" - result, status = get_property_label( - node_display="display_name", - use_strict_camel_case=True, - ) - assert status == 200 - assert result == "displayName" - - -class TestGetSchemaAttributes: - """Tests get_schema_attributes""" - - def test_success(self, test_schema_url: str) -> None: - """Test for successful result""" - result, status = get_schema_attributes(schema_url=test_schema_url) - assert status == 200 - assert isinstance(result, str) - - def test_404_error( - self, - ) -> None: - """Test for 404 result""" - result, status = get_schema_attributes(schema_url="not_a_url") - assert status == 404 - assert isinstance(result, BasicError) - - -class TestGetNodeProperties: - """Test case for get_node_properties""" - - def test_success(self, test_schema_url: str) -> None: - """Test for successful result""" - result, status = get_node_properties( - node_label="Patient", - schema_url=test_schema_url, - ) - assert status == 200 - assert isinstance(result, NodePropertyArray) - assert isinstance(result.node_properties, list) - for item in result.node_properties: - assert isinstance(item, str) - - def test_internal_error(self) -> None: - """Test for 500 result""" - result, status = get_node_properties( - node_label="Patient", - schema_url="not_a_url", - ) - assert status == 500 - assert isinstance(result, BasicError) - - -class TestGetNodeValidationRuleArray: - """Test case for get_node_validation_rules""" - - def test_success(self, test_schema_url: str) -> None: - """Test for successful result""" - result, status = get_node_validation_rules( - node_display="CheckRegexList", - schema_url=test_schema_url, - ) - assert status == 200 - assert isinstance(result, ValidationRuleArray) - assert isinstance(result.validation_rules, list) - for item in result.validation_rules: - assert isinstance(item, ValidationRule) - assert isinstance(item.name, str) - - def test_internal_error(self) -> None: - """Test for 500 result""" - result, status = get_node_validation_rules( - node_display="CheckRegexList", - schema_url="not_a_url", - ) - assert status == 500 - assert isinstance(result, BasicError) - - -class TestGetNodeDependencyArray: - """Test case for get_node_dependency_array""" - - def test_success(self, test_schema_url: str) -> None: - """Test for successful result""" - result, status = get_node_dependency_array( - schema_url=test_schema_url, - node_label="Patient", - ) - assert status == 200 - assert isinstance(result, NodeArray) - assert isinstance(result.nodes, list) - for item in result.nodes: - assert isinstance(item, Node) - assert isinstance(item.name, str) - - def test_internal_error(self) -> None: - """Test for 500 result""" - result, status = get_node_dependency_page( - schema_url="not_a_url", - node_label="Patient", - ) - assert status == 500 - assert isinstance(result, BasicError) - - -class TestGetNodeDependencyPage: - """Test case for get_node_dependency_page""" - - def test_success(self, test_schema_url: str) -> None: - """Test for successful result""" - result, status = get_node_dependency_page( - schema_url=test_schema_url, - node_label="Patient", - ) - assert status == 200 - assert isinstance(result, NodePage) - assert result.number == 1 - assert result.size == 100000 - assert isinstance(result.total_elements, int) - assert isinstance(result.total_pages, int) - assert isinstance(result.has_next, bool) - assert isinstance(result.has_previous, bool) - assert isinstance(result.nodes, list) - for item in result.nodes: - assert isinstance(item, Node) - assert isinstance(item.name, str) - - def test_internal_error(self) -> None: - """Test for 500 result""" - result, status = get_node_dependency_page( - schema_url="not_a_url", - node_label="Patient", - ) - assert status == 500 - assert isinstance(result, BasicError) diff --git a/apps/schematic/api/schematic_api/test/test_storage_controller_endpoints.py b/apps/schematic/api/schematic_api/test/test_storage_controller_endpoints.py deleted file mode 100644 index 558be240b..000000000 --- a/apps/schematic/api/schematic_api/test/test_storage_controller_endpoints.py +++ /dev/null @@ -1,661 +0,0 @@ -"""Tests for endpoints""" - -# pylint: disable=duplicate-code - -import unittest -from unittest.mock import patch -import pandas as pd - -from synapseclient.core.exceptions import SynapseNoCredentialsError # type: ignore -from schematic.exceptions import AccessCredentialsError # type: ignore - -import schematic_api.controllers.storage_controller_impl -from schematic_api.test import BaseTestCase -from schematic_api.models.file_metadata import FileMetadata - -HEADERS = { - "Accept": "application/json", - "Authorization": "Bearer xxx", -} - -ASSET_VIEW_CSV_URL = "/api/v1/assetTypes/synapse/assetViews/syn1/csv" -ASSET_VIEW_JSON_URL = "/api/v1/assetTypes/synapse/assetViews/syn1/json" -DATASET_FILE_METADATA_ARRAY_URL = ( - "/api/v1/assetTypes/synapse/datasets/syn2/fileMetadataArray?assetViewId=syn1" -) -DATASET_FILE_METADATA_PAGE_URL = ( - "/api/v1/assetTypes/synapse/datasets/syn2/fileMetadataPage?assetViewId=syn1" -) -DATASET_MANIFEST_CSV_URL = ( - "/api/v1/assetTypes/synapse/datasets/syn2/manifestCsv?assetViewId=syn1" -) -DATASET_MANIFEST_JSON_URL = ( - "/api/v1/assetTypes/synapse/datasets/syn2/manifestJson?assetViewId=syn1" -) -MANIFEST_CSV_URL = "/api/v1/assetTypes/synapse/manifests/syn1/csv" -MANIFEST_JSON_URL = "/api/v1/assetTypes/synapse/manifests/syn1/json" - - -class TestGetAssetViewCsv(BaseTestCase): - """Test case for asset view json endpoint""" - - def test_success(self) -> None: - """Test for successful result""" - - with patch.object( - schematic_api.controllers.storage_controller_impl, - "get_asset_view_from_schematic", - return_value=pd.DataFrame({"col1": [1, 2], "col2": [3, 4]}), - ): - response = self.client.open( - ASSET_VIEW_CSV_URL, method="GET", headers=HEADERS - ) - self.assert200( - response, f"Response body is : {response.data.decode('utf-8')}" - ) - result = response.json - assert isinstance(result, str) - assert result.endswith("asset_view.csv") - - def test_401(self) -> None: - """Test for 401 result""" - with patch.object( - schematic_api.controllers.storage_controller_impl, - "get_asset_view_from_schematic", - side_effect=SynapseNoCredentialsError, - ): - response = self.client.open( - ASSET_VIEW_CSV_URL, method="GET", headers=HEADERS - ) - self.assert401( - response, f"Response body is : {response.data.decode('utf-8')}" - ) - - def test_403(self) -> None: - """Test for 403 result""" - with patch.object( - schematic_api.controllers.storage_controller_impl, - "get_asset_view_from_schematic", - side_effect=AccessCredentialsError("project"), - ): - response = self.client.open( - ASSET_VIEW_CSV_URL, method="GET", headers=HEADERS - ) - self.assert403( - response, f"Response body is : {response.data.decode('utf-8')}" - ) - - def test_500(self) -> None: - """Test for 500 result""" - with patch.object( - schematic_api.controllers.storage_controller_impl, - "get_asset_view_from_schematic", - side_effect=TypeError, - ): - response = self.client.open( - ASSET_VIEW_CSV_URL, method="GET", headers=HEADERS - ) - self.assert500( - response, f"Response body is : {response.data.decode('utf-8')}" - ) - - -class TestGetAssetViewJson(BaseTestCase): - """Test case for asset view json endpoint""" - - def test_success(self) -> None: - """Test for successful result""" - - with patch.object( - schematic_api.controllers.storage_controller_impl, - "get_asset_view_from_schematic", - return_value=pd.DataFrame({"col1": [1, 2], "col2": [3, 4]}), - ): - response = self.client.open( - ASSET_VIEW_JSON_URL, method="GET", headers=HEADERS - ) - self.assert200( - response, f"Response body is : {response.data.decode('utf-8')}" - ) - assert response.json == '{"col1":{"0":1,"1":2},"col2":{"0":3,"1":4}}' - - def test_401(self) -> None: - """Test for 401 result""" - with patch.object( - schematic_api.controllers.storage_controller_impl, - "get_asset_view_from_schematic", - side_effect=SynapseNoCredentialsError, - ): - response = self.client.open( - ASSET_VIEW_JSON_URL, method="GET", headers=HEADERS - ) - self.assert401( - response, f"Response body is : {response.data.decode('utf-8')}" - ) - - def test_403(self) -> None: - """Test for 403 result""" - with patch.object( - schematic_api.controllers.storage_controller_impl, - "get_asset_view_from_schematic", - side_effect=AccessCredentialsError("project"), - ): - response = self.client.open( - ASSET_VIEW_JSON_URL, method="GET", headers=HEADERS - ) - self.assert403( - response, f"Response body is : {response.data.decode('utf-8')}" - ) - - def test_500(self) -> None: - """Test for 500 result""" - with patch.object( - schematic_api.controllers.storage_controller_impl, - "get_asset_view_from_schematic", - side_effect=TypeError, - ): - response = self.client.open( - ASSET_VIEW_JSON_URL, method="GET", headers=HEADERS - ) - self.assert500( - response, f"Response body is : {response.data.decode('utf-8')}" - ) - - -class TestGetDatasetFileMetadataArray(BaseTestCase): - """Test case for files endpoint""" - - def test_success(self) -> None: - """Test for successful result""" - - with patch.object( - schematic_api.controllers.storage_controller_impl, - "get_dataset_file_metadata_from_schematic", - return_value=[FileMetadata("syn1", "name1"), FileMetadata("syn2", "name2")], - ): - response = self.client.open( - DATASET_FILE_METADATA_ARRAY_URL, method="GET", headers=HEADERS - ) - self.assert200( - response, f"Response body is : {response.data.decode('utf-8')}" - ) - result = response.json - assert isinstance(result, dict) - assert isinstance(result["files"], list) - for item in result["files"]: - assert isinstance(item, dict) - assert isinstance(item["id"], str) - assert isinstance(item["name"], str) - - def test_file_names(self) -> None: - """Test with file_names parameter""" - - with patch.object( - schematic_api.controllers.storage_controller_impl, - "get_dataset_file_metadata_from_schematic", - return_value=[FileMetadata("syn1", "name1"), FileMetadata("syn2", "name2")], - ) as mock_function: - response = self.client.open( - DATASET_FILE_METADATA_ARRAY_URL, method="GET", headers=HEADERS - ) - self.assert200( - response, f"Response body is : {response.data.decode('utf-8')}" - ) - mock_function.assert_called_with("syn2", "synapse", None, False) - - url = f"{DATASET_FILE_METADATA_ARRAY_URL}&fileNames=file.text" - response = self.client.open(url, method="GET", headers=HEADERS) - self.assert200( - response, f"Response body is : {response.data.decode('utf-8')}" - ) - mock_function.assert_called_with("syn2", "synapse", ["file.text"], False) - - url = f"{DATASET_FILE_METADATA_ARRAY_URL}&fileNames=file.text&fileNames=file2.text" - response = self.client.open(url, method="GET", headers=HEADERS) - self.assert200( - response, f"Response body is : {response.data.decode('utf-8')}" - ) - mock_function.assert_called_with( - "syn2", "synapse", ["file.text", "file2.text"], False - ) - - def test_use_full_file_path(self) -> None: - """Test with use_full_file_path parameter""" - - with patch.object( - schematic_api.controllers.storage_controller_impl, - "get_dataset_file_metadata_from_schematic", - return_value=[FileMetadata("syn1", "name1"), FileMetadata("syn2", "name2")], - ) as mock_function: - url = f"{DATASET_FILE_METADATA_ARRAY_URL}&useFullFilePath=true" - response = self.client.open(url, method="GET", headers=HEADERS) - self.assert200( - response, f"Response body is : {response.data.decode('utf-8')}" - ) - mock_function.assert_called_with("syn2", "synapse", None, True) - - url = f"{DATASET_FILE_METADATA_ARRAY_URL}&useFullFilePath=false" - response = self.client.open(url, method="GET", headers=HEADERS) - self.assert200( - response, f"Response body is : {response.data.decode('utf-8')}" - ) - mock_function.assert_called_with("syn2", "synapse", None, False) - - def test_401(self) -> None: - """Test for 401 result""" - with patch.object( - schematic_api.controllers.storage_controller_impl, - "get_dataset_file_metadata_from_schematic", - side_effect=SynapseNoCredentialsError, - ): - response = self.client.open( - DATASET_FILE_METADATA_ARRAY_URL, method="GET", headers=HEADERS - ) - self.assert401( - response, f"Response body is : {response.data.decode('utf-8')}" - ) - - def test_403(self) -> None: - """Test for 403 result""" - with patch.object( - schematic_api.controllers.storage_controller_impl, - "get_dataset_file_metadata_from_schematic", - side_effect=AccessCredentialsError("project"), - ): - response = self.client.open( - DATASET_FILE_METADATA_ARRAY_URL, method="GET", headers=HEADERS - ) - self.assert403( - response, f"Response body is : {response.data.decode('utf-8')}" - ) - - def test_500(self) -> None: - """Test for 500 result""" - with patch.object( - schematic_api.controllers.storage_controller_impl, - "get_dataset_file_metadata_from_schematic", - side_effect=TypeError, - ): - response = self.client.open( - DATASET_FILE_METADATA_ARRAY_URL, method="GET", headers=HEADERS - ) - self.assert500( - response, f"Response body is : {response.data.decode('utf-8')}" - ) - - -class TestGetDatasetFileMetadataPage(BaseTestCase): - """Test case for files endpoint""" - - def test_success(self) -> None: - """Test for successful result""" - - with patch.object( - schematic_api.controllers.storage_controller_impl, - "get_dataset_file_metadata_from_schematic", - return_value=[FileMetadata("syn1", "name1"), FileMetadata("syn2", "name2")], - ): - response = self.client.open( - DATASET_FILE_METADATA_PAGE_URL, method="GET", headers=HEADERS - ) - self.assert200( - response, f"Response body is : {response.data.decode('utf-8')}" - ) - result = response.json - assert isinstance(result, dict) - assert result["number"] == 1 - assert result["size"] == 100000 - assert not result["hasNext"] - assert not result["hasPrevious"] - assert result["totalPages"] == 1 - assert isinstance(result["totalElements"], int) - assert isinstance(result["files"], list) - for item in result["files"]: - assert isinstance(item, dict) - assert list(item.keys()) == ["id", "name"] - assert isinstance(item["id"], str) - assert isinstance(item["name"], str) - - def test_file_names(self) -> None: - """Test with file_names parameter""" - - with patch.object( - schematic_api.controllers.storage_controller_impl, - "get_dataset_file_metadata_from_schematic", - return_value=[FileMetadata("syn1", "name1"), FileMetadata("syn2", "name2")], - ) as mock_function: - response = self.client.open( - DATASET_FILE_METADATA_PAGE_URL, method="GET", headers=HEADERS - ) - self.assert200( - response, f"Response body is : {response.data.decode('utf-8')}" - ) - mock_function.assert_called_with("syn2", "synapse", None, False) - - url = f"{DATASET_FILE_METADATA_PAGE_URL}&fileNames=file.text" - response = self.client.open(url, method="GET", headers=HEADERS) - self.assert200( - response, f"Response body is : {response.data.decode('utf-8')}" - ) - mock_function.assert_called_with("syn2", "synapse", ["file.text"], False) - - url = f"{DATASET_FILE_METADATA_PAGE_URL}&fileNames=file.text&fileNames=file2.text" - response = self.client.open(url, method="GET", headers=HEADERS) - self.assert200( - response, f"Response body is : {response.data.decode('utf-8')}" - ) - mock_function.assert_called_with( - "syn2", "synapse", ["file.text", "file2.text"], False - ) - - def test_use_full_file_path(self) -> None: - """Test with use_full_file_path parameter""" - - with patch.object( - schematic_api.controllers.storage_controller_impl, - "get_dataset_file_metadata_from_schematic", - return_value=[FileMetadata("syn1", "name1"), FileMetadata("syn2", "name2")], - ) as mock_function: - url = f"{DATASET_FILE_METADATA_PAGE_URL}&useFullFilePath=true" - response = self.client.open(url, method="GET", headers=HEADERS) - self.assert200( - response, f"Response body is : {response.data.decode('utf-8')}" - ) - mock_function.assert_called_with("syn2", "synapse", None, True) - - url = f"{DATASET_FILE_METADATA_PAGE_URL}&useFullFilePath=false" - response = self.client.open(url, method="GET", headers=HEADERS) - self.assert200( - response, f"Response body is : {response.data.decode('utf-8')}" - ) - mock_function.assert_called_with("syn2", "synapse", None, False) - - def test_401(self) -> None: - """Test for 401 result""" - with patch.object( - schematic_api.controllers.storage_controller_impl, - "get_dataset_file_metadata_from_schematic", - side_effect=SynapseNoCredentialsError, - ): - response = self.client.open( - DATASET_FILE_METADATA_PAGE_URL, method="GET", headers=HEADERS - ) - self.assert401( - response, f"Response body is : {response.data.decode('utf-8')}" - ) - - def test_403(self) -> None: - """Test for 403 result""" - with patch.object( - schematic_api.controllers.storage_controller_impl, - "get_dataset_file_metadata_from_schematic", - side_effect=AccessCredentialsError("project"), - ): - response = self.client.open( - DATASET_FILE_METADATA_PAGE_URL, method="GET", headers=HEADERS - ) - self.assert403( - response, f"Response body is : {response.data.decode('utf-8')}" - ) - - def test_500(self) -> None: - """Test for 500 result""" - with patch.object( - schematic_api.controllers.storage_controller_impl, - "get_dataset_file_metadata_from_schematic", - side_effect=TypeError, - ): - response = self.client.open( - DATASET_FILE_METADATA_PAGE_URL, method="GET", headers=HEADERS - ) - self.assert500( - response, f"Response body is : {response.data.decode('utf-8')}" - ) - - -class TestGetDatasetManifestCSV(BaseTestCase): - """Test case for manifest json endpoint""" - - def test_success(self) -> None: - """Test for successful result""" - - with patch.object( - schematic_api.controllers.storage_controller_impl, - "get_dataset_manifest_from_schematic", - return_value=pd.DataFrame({"col1": [1, 2], "col2": [3, 4]}), - ): - response = self.client.open( - DATASET_MANIFEST_CSV_URL, method="GET", headers=HEADERS - ) - self.assert200( - response, f"Response body is : {response.data.decode('utf-8')}" - ) - result = response.json - assert isinstance(result, str) - assert result.endswith("manifest.csv") - - def test_401(self) -> None: - """Test for 401 result""" - with patch.object( - schematic_api.controllers.storage_controller_impl, - "get_dataset_manifest_from_schematic", - side_effect=SynapseNoCredentialsError, - ): - response = self.client.open( - DATASET_MANIFEST_CSV_URL, method="GET", headers=HEADERS - ) - self.assert401( - response, f"Response body is : {response.data.decode('utf-8')}" - ) - - def test_403(self) -> None: - """Test for 403 result""" - with patch.object( - schematic_api.controllers.storage_controller_impl, - "get_dataset_manifest_from_schematic", - side_effect=AccessCredentialsError("project"), - ): - response = self.client.open( - DATASET_MANIFEST_CSV_URL, method="GET", headers=HEADERS - ) - self.assert403( - response, f"Response body is : {response.data.decode('utf-8')}" - ) - - def test_500(self) -> None: - """Test for 500 result""" - with patch.object( - schematic_api.controllers.storage_controller_impl, - "get_dataset_manifest_from_schematic", - side_effect=TypeError, - ): - response = self.client.open( - DATASET_MANIFEST_CSV_URL, method="GET", headers=HEADERS - ) - self.assert500( - response, f"Response body is : {response.data.decode('utf-8')}" - ) - - -class TestGetDatasetManifestJson(BaseTestCase): - """Test case for manifest json endpoint""" - - def test_success(self) -> None: - """Test for successful result""" - - with patch.object( - schematic_api.controllers.storage_controller_impl, - "get_dataset_manifest_from_schematic", - return_value=pd.DataFrame({"col1": [1, 2], "col2": [3, 4]}), - ): - response = self.client.open( - DATASET_MANIFEST_JSON_URL, method="GET", headers=HEADERS - ) - self.assert200( - response, f"Response body is : {response.data.decode('utf-8')}" - ) - assert response.json == '{"col1":{"0":1,"1":2},"col2":{"0":3,"1":4}}' - - def test_401(self) -> None: - """Test for 401 result""" - with patch.object( - schematic_api.controllers.storage_controller_impl, - "get_dataset_manifest_from_schematic", - side_effect=SynapseNoCredentialsError, - ): - response = self.client.open( - DATASET_MANIFEST_JSON_URL, method="GET", headers=HEADERS - ) - self.assert401( - response, f"Response body is : {response.data.decode('utf-8')}" - ) - - def test_403(self) -> None: - """Test for 403 result""" - with patch.object( - schematic_api.controllers.storage_controller_impl, - "get_dataset_manifest_from_schematic", - side_effect=AccessCredentialsError("project"), - ): - response = self.client.open( - DATASET_MANIFEST_JSON_URL, method="GET", headers=HEADERS - ) - self.assert403( - response, f"Response body is : {response.data.decode('utf-8')}" - ) - - def test_500(self) -> None: - """Test for 500 result""" - with patch.object( - schematic_api.controllers.storage_controller_impl, - "get_dataset_manifest_from_schematic", - side_effect=TypeError, - ): - response = self.client.open( - DATASET_MANIFEST_JSON_URL, method="GET", headers=HEADERS - ) - self.assert500( - response, f"Response body is : {response.data.decode('utf-8')}" - ) - - -class TestGetManifestCSV(BaseTestCase): - """Test case for manifest json endpoint""" - - def test_success(self) -> None: - """Test for successful result""" - - with patch.object( - schematic_api.controllers.storage_controller_impl, - "get_manifest_from_schematic", - return_value=pd.DataFrame({"col1": [1, 2], "col2": [3, 4]}), - ): - response = self.client.open(MANIFEST_CSV_URL, method="GET", headers=HEADERS) - self.assert200( - response, f"Response body is : {response.data.decode('utf-8')}" - ) - result = response.json - assert isinstance(result, str) - assert result.endswith("manifest.csv") - - def test_401(self) -> None: - """Test for 401 result""" - with patch.object( - schematic_api.controllers.storage_controller_impl, - "get_manifest_from_schematic", - side_effect=SynapseNoCredentialsError, - ): - response = self.client.open(MANIFEST_CSV_URL, method="GET", headers=HEADERS) - self.assert401( - response, f"Response body is : {response.data.decode('utf-8')}" - ) - - def test_403(self) -> None: - """Test for 403 result""" - with patch.object( - schematic_api.controllers.storage_controller_impl, - "get_manifest_from_schematic", - side_effect=AccessCredentialsError("project"), - ): - response = self.client.open(MANIFEST_CSV_URL, method="GET", headers=HEADERS) - self.assert403( - response, f"Response body is : {response.data.decode('utf-8')}" - ) - - def test_500(self) -> None: - """Test for 500 result""" - with patch.object( - schematic_api.controllers.storage_controller_impl, - "get_manifest_from_schematic", - side_effect=TypeError, - ): - response = self.client.open(MANIFEST_CSV_URL, method="GET", headers=HEADERS) - self.assert500( - response, f"Response body is : {response.data.decode('utf-8')}" - ) - - -class TestGetManifestJson(BaseTestCase): - """Test case for manifest json endpoint""" - - def test_success(self) -> None: - """Test for successful result""" - - with patch.object( - schematic_api.controllers.storage_controller_impl, - "get_manifest_from_schematic", - return_value=pd.DataFrame({"col1": [1, 2], "col2": [3, 4]}), - ): - response = self.client.open( - MANIFEST_JSON_URL, method="GET", headers=HEADERS - ) - self.assert200( - response, f"Response body is : {response.data.decode('utf-8')}" - ) - assert response.json == '{"col1":{"0":1,"1":2},"col2":{"0":3,"1":4}}' - - def test_401(self) -> None: - """Test for 401 result""" - with patch.object( - schematic_api.controllers.storage_controller_impl, - "get_manifest_from_schematic", - side_effect=SynapseNoCredentialsError, - ): - response = self.client.open( - MANIFEST_JSON_URL, method="GET", headers=HEADERS - ) - self.assert401( - response, f"Response body is : {response.data.decode('utf-8')}" - ) - - def test_403(self) -> None: - """Test for 403 result""" - with patch.object( - schematic_api.controllers.storage_controller_impl, - "get_manifest_from_schematic", - side_effect=AccessCredentialsError("project"), - ): - response = self.client.open( - MANIFEST_JSON_URL, method="GET", headers=HEADERS - ) - self.assert403( - response, f"Response body is : {response.data.decode('utf-8')}" - ) - - def test_500(self) -> None: - """Test for 500 result""" - with patch.object( - schematic_api.controllers.storage_controller_impl, - "get_manifest_from_schematic", - side_effect=TypeError, - ): - response = self.client.open( - MANIFEST_JSON_URL, method="GET", headers=HEADERS - ) - self.assert500( - response, f"Response body is : {response.data.decode('utf-8')}" - ) - - -if __name__ == "__main__": - unittest.main() diff --git a/apps/schematic/api/schematic_api/test/test_storage_controller_endpoints2.py b/apps/schematic/api/schematic_api/test/test_storage_controller_endpoints2.py deleted file mode 100644 index e22852bf5..000000000 --- a/apps/schematic/api/schematic_api/test/test_storage_controller_endpoints2.py +++ /dev/null @@ -1,493 +0,0 @@ -"""Tests for endpoints""" - -# pylint: disable=duplicate-code - -import unittest -from unittest.mock import patch - -from synapseclient.core.exceptions import SynapseNoCredentialsError # type: ignore -from schematic.exceptions import AccessCredentialsError # type: ignore - -import schematic_api.controllers.storage_controller_impl -from schematic_api.test import BaseTestCase -from schematic_api.models.dataset_metadata import DatasetMetadata -from schematic_api.models.project_metadata import ProjectMetadata -from .conftest import EXAMPLE_MANIFEST_METADATA, MANIFEST_METADATA_KEYS, PAGING_KEYS - -HEADERS = { - "Accept": "application/json", - "Authorization": "Bearer xxx", -} - -PROJECT_METADATA_ARRAY_URL = ( - "/api/v1/assetTypes/synapse/assetViews/syn1/projectMetadataArray" -) -PROJECT_METADATA_PAGE_URL = ( - "/api/v1/assetTypes/synapse/assetViews/syn1/projectMetadataPage" -) -PROJECT_DATASET_METATDATA_ARRRAY_URL = ( - "/api/v1/assetTypes/synapse/projects/syn2/datasetMetadataArray?assetViewId=syn1" -) -PROJECT_DATASET_METATDATA_PAGE_URL = ( - "/api/v1/assetTypes/synapse/projects/syn2/datasetMetadataPage?assetViewId=syn1" -) -PROJECT_MANIFEST_METADATA_ARRAY_URL = ( - "/api/v1/assetTypes/synapse/projects/syn2/manifestMetadataArray?assetViewId=syn1" -) -PROJECT_MANIFEST_METADATA_PAGE_URL = ( - "/api/v1/assetTypes/synapse/projects/syn2/manifestMetadataPage?assetViewId=syn1" -) - - -class TestGetProjectMetadataArray(BaseTestCase): - """Test case for projects endpoint""" - - def test_success(self) -> None: - """Test for successful result""" - - with patch.object( - schematic_api.controllers.storage_controller_impl, - "get_project_metadata_from_schematic", - return_value=[ - ProjectMetadata("syn1", "name1"), - ProjectMetadata("syn2", "name2"), - ], - ): - response = self.client.open( - PROJECT_METADATA_ARRAY_URL, method="GET", headers=HEADERS - ) - self.assert200( - response, f"Response body is : {response.data.decode('utf-8')}" - ) - result = response.json - assert isinstance(result, dict) - assert list(result.keys()) == ["projects"] - assert isinstance(result["projects"], list) - for item in result["projects"]: - assert isinstance(item, dict) - assert list(item.keys()) == ["id", "name"] - assert isinstance(item["name"], str) - assert isinstance(item["id"], str) - - def test_401(self) -> None: - """Test for 401 result""" - with patch.object( - schematic_api.controllers.storage_controller_impl, - "get_project_metadata_from_schematic", - side_effect=SynapseNoCredentialsError, - ): - response = self.client.open( - PROJECT_METADATA_ARRAY_URL, method="GET", headers=HEADERS - ) - self.assert401( - response, f"Response body is : {response.data.decode('utf-8')}" - ) - - def test_403(self) -> None: - """Test for 403 result""" - with patch.object( - schematic_api.controllers.storage_controller_impl, - "get_project_metadata_from_schematic", - side_effect=AccessCredentialsError("project"), - ): - response = self.client.open( - PROJECT_METADATA_ARRAY_URL, method="GET", headers=HEADERS - ) - self.assert403( - response, f"Response body is : {response.data.decode('utf-8')}" - ) - - def test_500(self) -> None: - """Test for 500 result""" - with patch.object( - schematic_api.controllers.storage_controller_impl, - "get_project_metadata_from_schematic", - side_effect=TypeError, - ): - response = self.client.open( - PROJECT_METADATA_ARRAY_URL, method="GET", headers=HEADERS - ) - self.assert500( - response, f"Response body is : {response.data.decode('utf-8')}" - ) - - -class TestGetProjectMetadataPage(BaseTestCase): - """Test case for projects endpoint""" - - def test_success(self) -> None: - """Test for successful result""" - - with patch.object( - schematic_api.controllers.storage_controller_impl, - "get_project_metadata_from_schematic", - return_value=[ - ProjectMetadata("syn1", "name1"), - ProjectMetadata("syn2", "name2"), - ], - ): - response = self.client.open( - PROJECT_METADATA_PAGE_URL, method="GET", headers=HEADERS - ) - self.assert200( - response, f"Response body is : {response.data.decode('utf-8')}" - ) - result = response.json - assert isinstance(result, dict) - assert list(result.keys()) == sorted(PAGING_KEYS + ["projects"]) - assert result["number"] == 1 - assert result["size"] == 100000 - assert not result["hasNext"] - assert not result["hasPrevious"] - assert result["totalPages"] == 1 - assert isinstance(result["totalElements"], int) - assert isinstance(result["projects"], list) - for item in result["projects"]: - assert isinstance(item, dict) - assert list(item.keys()) == ["id", "name"] - assert isinstance(item["name"], str) - assert isinstance(item["id"], str) - - def test_401(self) -> None: - """Test for 401 result""" - with patch.object( - schematic_api.controllers.storage_controller_impl, - "get_project_metadata_from_schematic", - side_effect=SynapseNoCredentialsError, - ): - response = self.client.open( - PROJECT_METADATA_PAGE_URL, method="GET", headers=HEADERS - ) - self.assert401( - response, f"Response body is : {response.data.decode('utf-8')}" - ) - - def test_403(self) -> None: - """Test for 403 result""" - with patch.object( - schematic_api.controllers.storage_controller_impl, - "get_project_metadata_from_schematic", - side_effect=AccessCredentialsError("project"), - ): - response = self.client.open( - PROJECT_METADATA_PAGE_URL, method="GET", headers=HEADERS - ) - self.assert403( - response, f"Response body is : {response.data.decode('utf-8')}" - ) - - def test_500(self) -> None: - """Test for 500 result""" - with patch.object( - schematic_api.controllers.storage_controller_impl, - "get_project_metadata_from_schematic", - side_effect=TypeError, - ): - response = self.client.open( - PROJECT_METADATA_PAGE_URL, method="GET", headers=HEADERS - ) - self.assert500( - response, f"Response body is : {response.data.decode('utf-8')}" - ) - - -class TestGetProjectDatasetMetadataArray(BaseTestCase): - """Test case for dataset metadat endpoint""" - - def test_success(self) -> None: - """Test for successful result""" - with patch.object( - schematic_api.controllers.storage_controller_impl, - "get_project_dataset_metadata_from_schematic", - return_value=[ - DatasetMetadata("syn1", "name1"), - DatasetMetadata("syn2", "name2"), - ], - ): - response = self.client.open( - PROJECT_DATASET_METATDATA_ARRRAY_URL, method="GET", headers=HEADERS - ) - self.assert200( - response, f"Response body is : {response.data.decode('utf-8')}" - ) - result = response.json - assert isinstance(result, dict) - assert isinstance(result["datasets"], list) - for item in result["datasets"]: - assert isinstance(item, dict) - assert list(item.keys()) == ["id", "name"] - assert isinstance(item["id"], str) - assert isinstance(item["name"], str) - - def test_401(self) -> None: - """Test for 401 result""" - with patch.object( - schematic_api.controllers.storage_controller_impl, - "get_project_dataset_metadata_from_schematic", - side_effect=SynapseNoCredentialsError, - ): - response = self.client.open( - PROJECT_DATASET_METATDATA_ARRRAY_URL, method="GET", headers=HEADERS - ) - self.assert401( - response, f"Response body is : {response.data.decode('utf-8')}" - ) - - def test_403(self) -> None: - """Test for 403 result""" - with patch.object( - schematic_api.controllers.storage_controller_impl, - "get_project_dataset_metadata_from_schematic", - side_effect=AccessCredentialsError("project"), - ): - response = self.client.open( - PROJECT_DATASET_METATDATA_ARRRAY_URL, method="GET", headers=HEADERS - ) - self.assert403( - response, f"Response body is : {response.data.decode('utf-8')}" - ) - - def test_500(self) -> None: - """Test for 500 result""" - with patch.object( - schematic_api.controllers.storage_controller_impl, - "get_project_dataset_metadata_from_schematic", - side_effect=TypeError, - ): - response = self.client.open( - PROJECT_DATASET_METATDATA_ARRRAY_URL, method="GET", headers=HEADERS - ) - self.assert500( - response, f"Response body is : {response.data.decode('utf-8')}" - ) - - -class TestGetProjectDatasetMetadataPage(BaseTestCase): - """Test case for dataset metadat endpoint""" - - def test_success(self) -> None: - """Test for successful result""" - with patch.object( - schematic_api.controllers.storage_controller_impl, - "get_project_dataset_metadata_from_schematic", - return_value=[ - DatasetMetadata("syn1", "name1"), - DatasetMetadata("syn2", "name2"), - ], - ): - response = self.client.open( - PROJECT_DATASET_METATDATA_PAGE_URL, method="GET", headers=HEADERS - ) - self.assert200( - response, f"Response body is : {response.data.decode('utf-8')}" - ) - result = response.json - assert isinstance(result, dict) - assert result["number"] == 1 - assert result["size"] == 100000 - assert not result["hasNext"] - assert not result["hasPrevious"] - assert result["totalPages"] == 1 - assert isinstance(result["totalElements"], int) - assert isinstance(result["datasets"], list) - for item in result["datasets"]: - assert isinstance(item, dict) - assert list(item.keys()) == ["id", "name"] - assert isinstance(item["id"], str) - assert isinstance(item["name"], str) - - def test_401(self) -> None: - """Test for 401 result""" - with patch.object( - schematic_api.controllers.storage_controller_impl, - "get_project_dataset_metadata_from_schematic", - side_effect=SynapseNoCredentialsError, - ): - response = self.client.open( - PROJECT_DATASET_METATDATA_PAGE_URL, method="GET", headers=HEADERS - ) - self.assert401( - response, f"Response body is : {response.data.decode('utf-8')}" - ) - - def test_403(self) -> None: - """Test for 403 result""" - with patch.object( - schematic_api.controllers.storage_controller_impl, - "get_project_dataset_metadata_from_schematic", - side_effect=AccessCredentialsError("project"), - ): - response = self.client.open( - PROJECT_DATASET_METATDATA_PAGE_URL, method="GET", headers=HEADERS - ) - self.assert403( - response, f"Response body is : {response.data.decode('utf-8')}" - ) - - def test_500(self) -> None: - """Test for 500 result""" - with patch.object( - schematic_api.controllers.storage_controller_impl, - "get_project_dataset_metadata_from_schematic", - side_effect=TypeError, - ): - response = self.client.open( - PROJECT_DATASET_METATDATA_PAGE_URL, method="GET", headers=HEADERS - ) - self.assert500( - response, f"Response body is : {response.data.decode('utf-8')}" - ) - - -class TestGetProjectManifestMetadataArray(BaseTestCase): - """Test case for manifests endpoint""" - - def test_success(self) -> None: - """Test for successful result""" - - with patch.object( - schematic_api.controllers.storage_controller_impl, - "get_project_manifest_metadata_from_schematic", - return_value=EXAMPLE_MANIFEST_METADATA, - ): - response = self.client.open( - PROJECT_MANIFEST_METADATA_ARRAY_URL, method="GET", headers=HEADERS - ) - self.assert200( - response, f"Response body is : {response.data.decode('utf-8')}" - ) - result = response.json - assert isinstance(result, dict) - assert isinstance(result["manifests"], list) - for item in result["manifests"]: - assert isinstance(item, dict) - assert list(item.keys()) == MANIFEST_METADATA_KEYS - assert isinstance(item["id"], str) - assert isinstance(item["name"], str) - assert isinstance(item["datasetName"], str) - assert isinstance(item["datasetId"], str) - assert isinstance(item["componentName"], str) - - def test_401(self) -> None: - """Test for 401 result""" - with patch.object( - schematic_api.controllers.storage_controller_impl, - "get_project_manifest_metadata_from_schematic", - side_effect=SynapseNoCredentialsError, - ): - response = self.client.open( - PROJECT_MANIFEST_METADATA_ARRAY_URL, method="GET", headers=HEADERS - ) - self.assert401( - response, f"Response body is : {response.data.decode('utf-8')}" - ) - - def test_403(self) -> None: - """Test for 403 result""" - with patch.object( - schematic_api.controllers.storage_controller_impl, - "get_project_manifest_metadata_from_schematic", - side_effect=AccessCredentialsError("project"), - ): - response = self.client.open( - PROJECT_MANIFEST_METADATA_ARRAY_URL, method="GET", headers=HEADERS - ) - self.assert403( - response, f"Response body is : {response.data.decode('utf-8')}" - ) - - def test_500(self) -> None: - """Test for 500 result""" - with patch.object( - schematic_api.controllers.storage_controller_impl, - "get_project_manifest_metadata_from_schematic", - side_effect=TypeError, - ): - response = self.client.open( - PROJECT_MANIFEST_METADATA_ARRAY_URL, method="GET", headers=HEADERS - ) - self.assert500( - response, f"Response body is : {response.data.decode('utf-8')}" - ) - - -class TestGetProjectManifestMetadataPage(BaseTestCase): - """Test case for manifests endpoint""" - - def test_success(self) -> None: - """Test for successful result""" - - with patch.object( - schematic_api.controllers.storage_controller_impl, - "get_project_manifest_metadata_from_schematic", - return_value=EXAMPLE_MANIFEST_METADATA, - ): - response = self.client.open( - PROJECT_MANIFEST_METADATA_PAGE_URL, method="GET", headers=HEADERS - ) - self.assert200( - response, f"Response body is : {response.data.decode('utf-8')}" - ) - result = response.json - assert isinstance(result, dict) - assert result["number"] == 1 - assert result["size"] == 100000 - assert not result["hasNext"] - assert not result["hasPrevious"] - assert result["totalPages"] == 1 - assert isinstance(result["totalElements"], int) - assert isinstance(result["manifests"], list) - for item in result["manifests"]: - assert isinstance(item, dict) - assert list(item.keys()) == MANIFEST_METADATA_KEYS - assert isinstance(item["id"], str) - assert isinstance(item["name"], str) - assert isinstance(item["datasetName"], str) - assert isinstance(item["datasetId"], str) - assert isinstance(item["componentName"], str) - - def test_401(self) -> None: - """Test for 401 result""" - with patch.object( - schematic_api.controllers.storage_controller_impl, - "get_project_manifest_metadata_from_schematic", - side_effect=SynapseNoCredentialsError, - ): - response = self.client.open( - PROJECT_MANIFEST_METADATA_PAGE_URL, method="GET", headers=HEADERS - ) - self.assert401( - response, f"Response body is : {response.data.decode('utf-8')}" - ) - - def test_403(self) -> None: - """Test for 403 result""" - with patch.object( - schematic_api.controllers.storage_controller_impl, - "get_project_manifest_metadata_from_schematic", - side_effect=AccessCredentialsError("project"), - ): - response = self.client.open( - PROJECT_MANIFEST_METADATA_PAGE_URL, method="GET", headers=HEADERS - ) - self.assert403( - response, f"Response body is : {response.data.decode('utf-8')}" - ) - - def test_500(self) -> None: - """Test for 500 result""" - with patch.object( - schematic_api.controllers.storage_controller_impl, - "get_project_manifest_metadata_from_schematic", - side_effect=TypeError, - ): - response = self.client.open( - PROJECT_MANIFEST_METADATA_PAGE_URL, method="GET", headers=HEADERS - ) - self.assert500( - response, f"Response body is : {response.data.decode('utf-8')}" - ) - - -if __name__ == "__main__": - unittest.main() diff --git a/apps/schematic/api/schematic_api/test/test_storage_controller_impl.py b/apps/schematic/api/schematic_api/test/test_storage_controller_impl.py deleted file mode 100644 index e03ff4aa5..000000000 --- a/apps/schematic/api/schematic_api/test/test_storage_controller_impl.py +++ /dev/null @@ -1,960 +0,0 @@ -"""Tests for storage endpoint functions""" - -# pylint: disable=duplicate-code - -from unittest.mock import patch - -import pandas as pd -from synapseclient.core.exceptions import ( # type: ignore - SynapseNoCredentialsError, - SynapseAuthenticationError, -) -from schematic.exceptions import AccessCredentialsError # type: ignore - -from schematic_api.models.basic_error import BasicError -from schematic_api.models.manifest_metadata import ManifestMetadata -from schematic_api.models.manifest_metadata_array import ManifestMetadataArray -from schematic_api.models.manifest_metadata_page import ManifestMetadataPage -from schematic_api.models.dataset_metadata import DatasetMetadata -from schematic_api.models.dataset_metadata_array import DatasetMetadataArray -from schematic_api.models.dataset_metadata_page import DatasetMetadataPage -from schematic_api.models.project_metadata import ProjectMetadata -from schematic_api.models.project_metadata_array import ProjectMetadataArray -from schematic_api.models.project_metadata_page import ProjectMetadataPage -from schematic_api.models.file_metadata import FileMetadata -from schematic_api.models.file_metadata_page import FileMetadataPage -from schematic_api.models.file_metadata_array import FileMetadataArray -import schematic_api.controllers.storage_controller_impl -from schematic_api.controllers.storage_controller_impl import ( - get_dataset_manifest_csv, - get_dataset_manifest_json, - get_manifest_csv, - get_manifest_json, - get_asset_view_csv, - get_asset_view_json, - get_dataset_file_metadata_array, - get_dataset_file_metadata_page, - get_project_metadata_array, - get_project_metadata_page, - get_project_dataset_metadata_array, - get_project_dataset_metadata_page, - get_project_manifest_metadata_array, - get_project_manifest_metadata_page, -) - - -class TestGetAssetViewCsv: - """Test case for get_asset_view_csv""" - - def test_success(self) -> None: - """Test for successful result""" - with patch.object( - schematic_api.controllers.storage_controller_impl, - "get_asset_view_from_schematic", - return_value=pd.DataFrame({"col1": [1, 2], "col2": [3, 4]}), - ): - result, status = get_asset_view_csv( - asset_type="synapse", asset_view_id="syn1" - ) - assert status == 200 - assert result.endswith("asset_view.csv") - - def test_internal_error(self) -> None: - """Test for 500 result""" - with patch.object( - schematic_api.controllers.storage_controller_impl, - "get_asset_view_from_schematic", - side_effect=TypeError, - ): - result, status = get_asset_view_csv( - asset_type="synapse", asset_view_id="syn1" - ) - assert status == 500 - assert isinstance(result, BasicError) - - -class TestGetAssetViewJson: - """Test case for get_asset_view_json""" - - def test_success(self) -> None: - """Test for successful result""" - with patch.object( - schematic_api.controllers.storage_controller_impl, - "get_asset_view_from_schematic", - return_value=pd.DataFrame({"col1": [1, 2], "col2": [3, 4]}), - ): - result, status = get_asset_view_json( - asset_type="synapse", asset_view_id="syn1" - ) - assert status == 200 - assert result == '{"col1":{"0":1,"1":2},"col2":{"0":3,"1":4}}' - - def test_no_credentials_error(self) -> None: - """Test for 401 result""" - with patch.object( - schematic_api.controllers.storage_controller_impl, - "get_asset_view_from_schematic", - side_effect=SynapseNoCredentialsError, - ): - result, status = get_asset_view_json( - asset_type="synapse", asset_view_id="syn1" - ) - assert status == 401 - assert isinstance(result, BasicError) - - def test_bad_credentials_error(self) -> None: - """Test for 401 result""" - with patch.object( - schematic_api.controllers.storage_controller_impl, - "get_asset_view_from_schematic", - side_effect=SynapseAuthenticationError, - ): - result, status = get_asset_view_json( - asset_type="synapse", asset_view_id="syn1" - ) - assert status == 401 - assert isinstance(result, BasicError) - - def test_no_access_error(self) -> None: - """Test for 403 result""" - with patch.object( - schematic_api.controllers.storage_controller_impl, - "get_asset_view_from_schematic", - side_effect=AccessCredentialsError("project"), - ): - result, status = get_asset_view_json( - asset_type="synapse", asset_view_id="syn1" - ) - assert status == 403 - assert isinstance(result, BasicError) - - def test_internal_error(self) -> None: - """Test for 500 result""" - with patch.object( - schematic_api.controllers.storage_controller_impl, - "get_asset_view_from_schematic", - side_effect=TypeError, - ): - result, status = get_asset_view_json( - asset_type="synapse", asset_view_id="syn1" - ) - assert status == 500 - assert isinstance(result, BasicError) - - -class TestGetDatasetFileMetadataArray: - """Test case for get_dataset_file_metadata_array""" - - def test_success(self) -> None: - """Test for successful result""" - with patch.object( - schematic_api.controllers.storage_controller_impl, - "get_dataset_file_metadata_from_schematic", - return_value=[FileMetadata("syn1", "name1"), FileMetadata("syn2", "name2")], - ): - result, status = get_dataset_file_metadata_array( - dataset_id="syn1", asset_view_id="syn2", asset_type="synapse" - ) - assert status == 200 - assert isinstance(result, FileMetadataArray) - assert isinstance(result.files, list) - for item in result.files: - assert isinstance(item, FileMetadata) - assert isinstance(item.id, str) - assert isinstance(item.name, str) - - def test_no_credentials_error(self) -> None: - """Test for 401 result""" - with patch.object( - schematic_api.controllers.storage_controller_impl, - "get_dataset_file_metadata_from_schematic", - side_effect=SynapseNoCredentialsError, - ): - result, status = get_dataset_file_metadata_array( - dataset_id="syn1", asset_view_id="syn2", asset_type="synapse" - ) - assert status == 401 - assert isinstance(result, BasicError) - - def test_bad_credentials_error(self) -> None: - """Test for 401 result""" - with patch.object( - schematic_api.controllers.storage_controller_impl, - "get_dataset_file_metadata_from_schematic", - side_effect=SynapseAuthenticationError, - ): - result, status = get_dataset_file_metadata_array( - dataset_id="syn1", asset_view_id="syn2", asset_type="synapse" - ) - assert status == 401 - assert isinstance(result, BasicError) - - def test_no_access_error(self) -> None: - """Test for 403 result""" - with patch.object( - schematic_api.controllers.storage_controller_impl, - "get_dataset_file_metadata_from_schematic", - side_effect=AccessCredentialsError("project"), - ): - result, status = get_dataset_file_metadata_array( - dataset_id="syn1", asset_view_id="syn2", asset_type="synapse" - ) - assert status == 403 - assert isinstance(result, BasicError) - - def test_internal_error(self) -> None: - """Test for 500 result""" - with patch.object( - schematic_api.controllers.storage_controller_impl, - "get_dataset_file_metadata_from_schematic", - side_effect=TypeError, - ): - result, status = get_dataset_file_metadata_array( - dataset_id="syn1", asset_view_id="syn2", asset_type="synapse" - ) - assert status == 500 - assert isinstance(result, BasicError) - - -class TestGetDatasetFileMetadataPage: - """Test case for get_dataset_file_metadata_page""" - - def test_success(self) -> None: - """Test for successful result""" - with patch.object( - schematic_api.controllers.storage_controller_impl, - "get_dataset_file_metadata_from_schematic", - return_value=[FileMetadata("syn1", "name1"), FileMetadata("syn2", "name2")], - ): - result, status = get_dataset_file_metadata_page( - dataset_id="syn1", asset_view_id="syn2", asset_type="synapse" - ) - assert status == 200 - assert isinstance(result, FileMetadataPage) - assert result.number == 1 - assert result.size == 100000 - assert isinstance(result.total_elements, int) - assert isinstance(result.total_pages, int) - assert isinstance(result.has_next, bool) - assert isinstance(result.has_previous, bool) - assert isinstance(result.files, list) - for item in result.files: - assert isinstance(item, FileMetadata) - assert isinstance(item.id, str) - assert isinstance(item.name, str) - - def test_no_credentials_error(self) -> None: - """Test for 401 result""" - with patch.object( - schematic_api.controllers.storage_controller_impl, - "get_dataset_file_metadata_from_schematic", - side_effect=SynapseNoCredentialsError, - ): - result, status = get_dataset_file_metadata_page( - dataset_id="syn1", asset_view_id="syn2", asset_type="synapse" - ) - assert status == 401 - assert isinstance(result, BasicError) - - def test_bad_credentials_error(self) -> None: - """Test for 401 result""" - with patch.object( - schematic_api.controllers.storage_controller_impl, - "get_dataset_file_metadata_from_schematic", - side_effect=SynapseAuthenticationError, - ): - result, status = get_dataset_file_metadata_page( - dataset_id="syn1", asset_view_id="syn2", asset_type="synapse" - ) - assert status == 401 - assert isinstance(result, BasicError) - - def test_no_access_error(self) -> None: - """Test for 403 result""" - with patch.object( - schematic_api.controllers.storage_controller_impl, - "get_dataset_file_metadata_from_schematic", - side_effect=AccessCredentialsError("project"), - ): - result, status = get_dataset_file_metadata_page( - dataset_id="syn1", asset_view_id="syn2", asset_type="synapse" - ) - assert status == 403 - assert isinstance(result, BasicError) - - def test_internal_error(self) -> None: - """Test for 500 result""" - with patch.object( - schematic_api.controllers.storage_controller_impl, - "get_dataset_file_metadata_from_schematic", - side_effect=TypeError, - ): - result, status = get_dataset_file_metadata_page( - dataset_id="syn1", asset_view_id="syn2", asset_type="synapse" - ) - assert status == 500 - assert isinstance(result, BasicError) - - -class TestGetDatasetManifestCsv: - """Test case for get_dataset_manifest_csv""" - - def test_success(self) -> None: - """Test for successful result""" - with patch.object( - schematic_api.controllers.storage_controller_impl, - "get_dataset_manifest_from_schematic", - return_value=pd.DataFrame({"col1": [1, 2], "col2": [3, 4]}), - ): - result, status = get_dataset_manifest_csv( - asset_type="synapse", dataset_id="syn1", asset_view_id="syn2" - ) - assert status == 200 - assert result.endswith("manifest.csv") - - def test_internal_error(self) -> None: - """Test for 500 result""" - with patch.object( - schematic_api.controllers.storage_controller_impl, - "get_dataset_manifest_from_schematic", - side_effect=TypeError, - ): - result, status = get_dataset_manifest_csv( - asset_type="synapse", dataset_id="syn1", asset_view_id="syn2" - ) - assert status == 500 - assert isinstance(result, BasicError) - - -class TestGetDatasetManifestJson: - """Test case for get_dataset_manifest_json""" - - def test_success(self) -> None: - """Test for successful result""" - with patch.object( - schematic_api.controllers.storage_controller_impl, - "get_dataset_manifest_from_schematic", - return_value=pd.DataFrame({"col1": [1, 2], "col2": [3, 4]}), - ): - result, status = get_dataset_manifest_json( - asset_type="synapse", dataset_id="syn1", asset_view_id="syn2" - ) - assert status == 200 - assert result == '{"col1":{"0":1,"1":2},"col2":{"0":3,"1":4}}' - - def test_no_credentials_error(self) -> None: - """Test for 401 result""" - with patch.object( - schematic_api.controllers.storage_controller_impl, - "get_dataset_manifest_from_schematic", - side_effect=SynapseNoCredentialsError, - ): - result, status = get_dataset_manifest_json( - asset_type="synapse", dataset_id="syn1", asset_view_id="syn2" - ) - assert status == 401 - assert isinstance(result, BasicError) - - def test_bad_credentials_error(self) -> None: - """Test for 401 result""" - with patch.object( - schematic_api.controllers.storage_controller_impl, - "get_dataset_manifest_from_schematic", - side_effect=SynapseAuthenticationError, - ): - result, status = get_dataset_manifest_json( - asset_type="synapse", dataset_id="syn1", asset_view_id="syn2" - ) - assert status == 401 - assert isinstance(result, BasicError) - - def test_no_access_error(self) -> None: - """Test for 403 result""" - with patch.object( - schematic_api.controllers.storage_controller_impl, - "get_dataset_manifest_from_schematic", - side_effect=AccessCredentialsError("project"), - ): - result, status = get_dataset_manifest_json( - asset_type="synapse", dataset_id="syn1", asset_view_id="syn2" - ) - assert status == 403 - assert isinstance(result, BasicError) - - def test_internal_error(self) -> None: - """Test for 500 result""" - with patch.object( - schematic_api.controllers.storage_controller_impl, - "get_dataset_manifest_from_schematic", - side_effect=TypeError, - ): - result, status = get_dataset_manifest_json( - asset_type="synapse", dataset_id="syn1", asset_view_id="syn2" - ) - assert status == 500 - assert isinstance(result, BasicError) - - -class TestGetManifestCsv: - """Test case for get_manifest_csv""" - - def test_success(self) -> None: - """Test for successful result""" - with patch.object( - schematic_api.controllers.storage_controller_impl, - "get_manifest_from_schematic", - return_value=pd.DataFrame({"col1": [1, 2], "col2": [3, 4]}), - ): - result, status = get_manifest_csv(asset_type="synapse", manifest_id="syn1") - assert status == 200 - assert result.endswith("manifest.csv") - - def test_internal_error(self) -> None: - """Test for 500 result""" - with patch.object( - schematic_api.controllers.storage_controller_impl, - "get_manifest_from_schematic", - side_effect=TypeError, - ): - result, status = get_manifest_csv(asset_type="synapse", manifest_id="syn1") - assert status == 500 - assert isinstance(result, BasicError) - - -class TestGetManifestJson: - """Test case for get_manifest_json""" - - def test_success(self) -> None: - """Test for successful result""" - with patch.object( - schematic_api.controllers.storage_controller_impl, - "get_manifest_from_schematic", - return_value=pd.DataFrame({"col1": [1, 2], "col2": [3, 4]}), - ): - result, status = get_manifest_json(asset_type="synapse", manifest_id="syn1") - assert status == 200 - assert result == '{"col1":{"0":1,"1":2},"col2":{"0":3,"1":4}}' - - def test_no_credentials_error(self) -> None: - """Test for 401 result""" - with patch.object( - schematic_api.controllers.storage_controller_impl, - "get_manifest_from_schematic", - side_effect=SynapseNoCredentialsError, - ): - result, status = get_manifest_json(asset_type="synapse", manifest_id="syn1") - assert status == 401 - assert isinstance(result, BasicError) - - def test_bad_credentials_error(self) -> None: - """Test for 401 result""" - with patch.object( - schematic_api.controllers.storage_controller_impl, - "get_manifest_from_schematic", - side_effect=SynapseAuthenticationError, - ): - result, status = get_manifest_json(asset_type="synapse", manifest_id="syn1") - assert status == 401 - assert isinstance(result, BasicError) - - def test_no_access_error(self) -> None: - """Test for 403 result""" - with patch.object( - schematic_api.controllers.storage_controller_impl, - "get_manifest_from_schematic", - side_effect=AccessCredentialsError("project"), - ): - result, status = get_manifest_json(asset_type="synapse", manifest_id="syn1") - assert status == 403 - assert isinstance(result, BasicError) - - def test_internal_error(self) -> None: - """Test for 500 result""" - with patch.object( - schematic_api.controllers.storage_controller_impl, - "get_manifest_from_schematic", - side_effect=TypeError, - ): - result, status = get_manifest_json(asset_type="synapse", manifest_id="syn1") - assert status == 500 - assert isinstance(result, BasicError) - - -class TestGetProjectMetadataArray: - """Test case for get_project_metadata_array""" - - def test_success(self) -> None: - """Test for successful result""" - with patch.object( - schematic_api.controllers.storage_controller_impl, - "get_project_metadata_from_schematic", - return_value=[ - ProjectMetadata("syn1", "name1"), - ProjectMetadata("syn2", "name2"), - ], - ): - result, status = get_project_metadata_array( - asset_view_id="syn1", asset_type="synapse" - ) - assert status == 200 - assert isinstance(result, ProjectMetadataArray) - assert isinstance(result.projects, list) - for item in result.projects: - assert isinstance(item, ProjectMetadata) - assert isinstance(item.id, str) - assert isinstance(item.name, str) - - def test_no_credentials_error(self) -> None: - """Test for 401 result""" - with patch.object( - schematic_api.controllers.storage_controller_impl, - "get_project_metadata_from_schematic", - side_effect=SynapseNoCredentialsError, - ): - result, status = get_project_metadata_array( - asset_view_id="syn1", asset_type="synapse" - ) - assert status == 401 - assert isinstance(result, BasicError) - - def test_bad_credentials_error(self) -> None: - """Test for 401 result""" - with patch.object( - schematic_api.controllers.storage_controller_impl, - "get_project_metadata_from_schematic", - side_effect=SynapseAuthenticationError, - ): - result, status = get_project_metadata_array( - asset_view_id="syn1", asset_type="synapse" - ) - assert status == 401 - assert isinstance(result, BasicError) - - def test_no_access_error(self) -> None: - """Test for 403 result""" - with patch.object( - schematic_api.controllers.storage_controller_impl, - "get_project_metadata_from_schematic", - side_effect=AccessCredentialsError("project"), - ): - result, status = get_project_metadata_array( - asset_view_id="syn1", asset_type="synapse" - ) - assert status == 403 - assert isinstance(result, BasicError) - - def test_internal_error(self) -> None: - """Test for 500 result""" - with patch.object( - schematic_api.controllers.storage_controller_impl, - "get_project_metadata_from_schematic", - side_effect=TypeError, - ): - result, status = get_project_metadata_array( - asset_view_id="syn1", asset_type="synapse" - ) - assert status == 500 - assert isinstance(result, BasicError) - - -class TestGetProjectMetadataPage: - """Test case for get_project_metadata_page""" - - def test_success(self) -> None: - """Test for successful result""" - with patch.object( - schematic_api.controllers.storage_controller_impl, - "get_project_metadata_from_schematic", - return_value=[ - ProjectMetadata("syn1", "name1"), - ProjectMetadata("syn2", "name2"), - ], - ): - result, status = get_project_metadata_page( - asset_view_id="syn1", asset_type="synapse" - ) - assert status == 200 - assert isinstance(result, ProjectMetadataPage) - assert result.number == 1 - assert result.size == 100000 - assert isinstance(result.total_elements, int) - assert isinstance(result.total_pages, int) - assert isinstance(result.has_next, bool) - assert isinstance(result.has_previous, bool) - assert isinstance(result.projects, list) - for item in result.projects: - assert isinstance(item, ProjectMetadata) - assert isinstance(item.id, str) - assert isinstance(item.name, str) - - def test_no_credentials_error(self) -> None: - """Test for 401 result""" - with patch.object( - schematic_api.controllers.storage_controller_impl, - "get_project_metadata_from_schematic", - side_effect=SynapseNoCredentialsError, - ): - result, status = get_project_metadata_page( - asset_view_id="syn1", asset_type="synapse" - ) - assert status == 401 - assert isinstance(result, BasicError) - - def test_bad_credentials_error(self) -> None: - """Test for 401 result""" - with patch.object( - schematic_api.controllers.storage_controller_impl, - "get_project_metadata_from_schematic", - side_effect=SynapseAuthenticationError, - ): - result, status = get_project_metadata_page( - asset_view_id="syn1", asset_type="synapse" - ) - assert status == 401 - assert isinstance(result, BasicError) - - def test_no_access_error(self) -> None: - """Test for 403 result""" - with patch.object( - schematic_api.controllers.storage_controller_impl, - "get_project_metadata_from_schematic", - side_effect=AccessCredentialsError("project"), - ): - result, status = get_project_metadata_page( - asset_view_id="syn1", asset_type="synapse" - ) - assert status == 403 - assert isinstance(result, BasicError) - - def test_internal_error(self) -> None: - """Test for 500 result""" - with patch.object( - schematic_api.controllers.storage_controller_impl, - "get_project_metadata_from_schematic", - side_effect=TypeError, - ): - result, status = get_project_metadata_page( - asset_view_id="syn1", asset_type="synapse" - ) - assert status == 500 - assert isinstance(result, BasicError) - - -class TestGetProjectDatasetMetadataArray: - """Test case for get_project_dataset_metadata_array""" - - def test_success(self) -> None: - """Test for successful result""" - with patch.object( - schematic_api.controllers.storage_controller_impl, - "get_project_dataset_metadata_from_schematic", - return_value=[ - DatasetMetadata("syn1", "name1"), - DatasetMetadata("syn2", "name2"), - ], - ): - result, status = get_project_dataset_metadata_array( - project_id="syn1", asset_view_id="syn2", asset_type="synapse" - ) - assert status == 200 - assert isinstance(result, DatasetMetadataArray) - assert isinstance(result.datasets, list) - for item in result.datasets: - assert isinstance(item, DatasetMetadata) - assert isinstance(item.id, str) - assert isinstance(item.name, str) - - def test_no_credentials_error(self) -> None: - """Test for 401 result""" - with patch.object( - schematic_api.controllers.storage_controller_impl, - "get_project_dataset_metadata_from_schematic", - side_effect=SynapseNoCredentialsError, - ): - result, status = get_project_dataset_metadata_array( - project_id="syn1", asset_view_id="syn2", asset_type="synapse" - ) - assert status == 401 - assert isinstance(result, BasicError) - - def test_bad_credentials_error(self) -> None: - """Test for 401 result""" - with patch.object( - schematic_api.controllers.storage_controller_impl, - "get_project_dataset_metadata_from_schematic", - side_effect=SynapseAuthenticationError, - ): - result, status = get_project_dataset_metadata_array( - project_id="syn1", asset_view_id="syn2", asset_type="synapse" - ) - assert status == 401 - assert isinstance(result, BasicError) - - def test_no_access_error(self) -> None: - """Test for 403 result""" - with patch.object( - schematic_api.controllers.storage_controller_impl, - "get_project_dataset_metadata_from_schematic", - side_effect=AccessCredentialsError("project"), - ): - result, status = get_project_dataset_metadata_array( - project_id="syn1", asset_view_id="syn2", asset_type="synapse" - ) - assert status == 403 - assert isinstance(result, BasicError) - - def test_internal_error(self) -> None: - """Test for 500 result""" - with patch.object( - schematic_api.controllers.storage_controller_impl, - "get_project_dataset_metadata_from_schematic", - side_effect=TypeError, - ): - result, status = get_project_dataset_metadata_array( - project_id="syn1", asset_view_id="syn2", asset_type="synapse" - ) - assert status == 500 - assert isinstance(result, BasicError) - - -class TestGetProjectDatasetMetadataPage: - """Test case for get_project_dataset_metadata_page""" - - def test_success(self) -> None: - """Test for successful result""" - with patch.object( - schematic_api.controllers.storage_controller_impl, - "get_project_dataset_metadata_from_schematic", - return_value=[ - DatasetMetadata("syn1", "name1"), - DatasetMetadata("syn2", "name2"), - ], - ): - result, status = get_project_dataset_metadata_page( - project_id="syn1", asset_view_id="syn2", asset_type="synapse" - ) - assert status == 200 - assert isinstance(result, DatasetMetadataPage) - assert result.number == 1 - assert result.size == 100000 - assert isinstance(result.total_elements, int) - assert isinstance(result.total_pages, int) - assert isinstance(result.has_next, bool) - assert isinstance(result.has_previous, bool) - assert isinstance(result.datasets, list) - for item in result.datasets: - assert isinstance(item, DatasetMetadata) - assert isinstance(item.id, str) - assert isinstance(item.name, str) - - def test_no_credentials_error(self) -> None: - """Test for 401 result""" - with patch.object( - schematic_api.controllers.storage_controller_impl, - "get_project_dataset_metadata_from_schematic", - side_effect=SynapseNoCredentialsError, - ): - result, status = get_project_dataset_metadata_page( - project_id="syn1", asset_view_id="syn2", asset_type="synapse" - ) - assert status == 401 - assert isinstance(result, BasicError) - - def test_bad_credentials_error(self) -> None: - """Test for 401 result""" - with patch.object( - schematic_api.controllers.storage_controller_impl, - "get_project_dataset_metadata_from_schematic", - side_effect=SynapseAuthenticationError, - ): - result, status = get_project_dataset_metadata_page( - project_id="syn1", asset_view_id="syn2", asset_type="synapse" - ) - assert status == 401 - assert isinstance(result, BasicError) - - def test_no_access_error(self) -> None: - """Test for 403 result""" - with patch.object( - schematic_api.controllers.storage_controller_impl, - "get_project_dataset_metadata_from_schematic", - side_effect=AccessCredentialsError("project"), - ): - result, status = get_project_dataset_metadata_page( - project_id="syn1", asset_view_id="syn2", asset_type="synapse" - ) - assert status == 403 - assert isinstance(result, BasicError) - - def test_internal_error(self) -> None: - """Test for 500 result""" - with patch.object( - schematic_api.controllers.storage_controller_impl, - "get_project_dataset_metadata_from_schematic", - side_effect=TypeError, - ): - result, status = get_project_dataset_metadata_page( - project_id="syn1", asset_view_id="syn2", asset_type="synapse" - ) - assert status == 500 - assert isinstance(result, BasicError) - - -class TestGetProjectManifestMetadataArray: - """Test case for get_project_manifest_metadata_array""" - - def test_success(self, example_manifest_metadata: list[ManifestMetadata]) -> None: - """Test for successful result""" - with patch.object( - schematic_api.controllers.storage_controller_impl, - "get_project_manifest_metadata_from_schematic", - return_value=example_manifest_metadata, - ): - result, status = get_project_manifest_metadata_array( - project_id="syn1", asset_view_id="syn2", asset_type="synapse" - ) - assert status == 200 - assert isinstance(result, ManifestMetadataArray) - assert isinstance(result.manifests, list) - for item in result.manifests: - assert isinstance(item, ManifestMetadata) - assert isinstance(item.id, str) - assert isinstance(item.name, str) - assert isinstance(item.dataset_name, str) - assert isinstance(item.dataset_id, str) - assert isinstance(item.component_name, str) - - def test_no_credentials_error(self) -> None: - """Test for 401 result""" - with patch.object( - schematic_api.controllers.storage_controller_impl, - "get_project_manifest_metadata_from_schematic", - side_effect=SynapseNoCredentialsError, - ): - result, status = get_project_manifest_metadata_array( - project_id="syn1", asset_view_id="syn2", asset_type="synapse" - ) - assert status == 401 - assert isinstance(result, BasicError) - - def test_bad_credentials_error(self) -> None: - """Test for 401 result""" - with patch.object( - schematic_api.controllers.storage_controller_impl, - "get_project_manifest_metadata_from_schematic", - side_effect=SynapseAuthenticationError, - ): - result, status = get_project_manifest_metadata_array( - project_id="syn1", asset_view_id="syn2", asset_type="synapse" - ) - assert status == 401 - assert isinstance(result, BasicError) - - def test_no_access_error(self) -> None: - """Test for 403 result""" - with patch.object( - schematic_api.controllers.storage_controller_impl, - "get_project_manifest_metadata_from_schematic", - side_effect=AccessCredentialsError("project"), - ): - result, status = get_project_manifest_metadata_array( - project_id="syn1", asset_view_id="syn2", asset_type="synapse" - ) - assert status == 403 - assert isinstance(result, BasicError) - - def test_internal_error(self) -> None: - """Test for 500 result""" - with patch.object( - schematic_api.controllers.storage_controller_impl, - "get_project_manifest_metadata_from_schematic", - side_effect=TypeError, - ): - result, status = get_project_manifest_metadata_array( - project_id="syn1", asset_view_id="syn2", asset_type="synapse" - ) - assert status == 500 - assert isinstance(result, BasicError) - - -class TestGetProjectManifestMetadataPage: - """Test case for get_project_manifest_metadata_page""" - - def test_success(self, example_manifest_metadata: list[ManifestMetadata]) -> None: - """Test for successful result""" - with patch.object( - schematic_api.controllers.storage_controller_impl, - "get_project_manifest_metadata_from_schematic", - return_value=example_manifest_metadata, - ): - result, status = get_project_manifest_metadata_page( - project_id="syn1", asset_view_id="syn2", asset_type="synapse" - ) - assert status == 200 - assert isinstance(result, ManifestMetadataPage) - assert result.number == 1 - assert result.size == 100000 - assert isinstance(result.total_elements, int) - assert isinstance(result.total_pages, int) - assert isinstance(result.has_next, bool) - assert isinstance(result.has_previous, bool) - assert isinstance(result.manifests, list) - for item in result.manifests: - assert isinstance(item, ManifestMetadata) - assert isinstance(item.id, str) - assert isinstance(item.name, str) - assert isinstance(item.dataset_name, str) - assert isinstance(item.dataset_id, str) - assert isinstance(item.component_name, str) - - def test_no_credentials_error(self) -> None: - """Test for 401 result""" - with patch.object( - schematic_api.controllers.storage_controller_impl, - "get_project_manifest_metadata_from_schematic", - side_effect=SynapseNoCredentialsError, - ): - result, status = get_project_manifest_metadata_page( - project_id="syn1", asset_view_id="syn2", asset_type="synapse" - ) - assert status == 401 - assert isinstance(result, BasicError) - - def test_bad_credentials_error(self) -> None: - """Test for 401 result""" - with patch.object( - schematic_api.controllers.storage_controller_impl, - "get_project_manifest_metadata_from_schematic", - side_effect=SynapseAuthenticationError, - ): - result, status = get_project_manifest_metadata_page( - project_id="syn1", asset_view_id="syn2", asset_type="synapse" - ) - assert status == 401 - assert isinstance(result, BasicError) - - def test_no_access_error(self) -> None: - """Test for 403 result""" - with patch.object( - schematic_api.controllers.storage_controller_impl, - "get_project_manifest_metadata_from_schematic", - side_effect=AccessCredentialsError("project"), - ): - result, status = get_project_manifest_metadata_page( - project_id="syn1", asset_view_id="syn2", asset_type="synapse" - ) - assert status == 403 - assert isinstance(result, BasicError) - - def test_internal_error(self) -> None: - """Test for 500 result""" - with patch.object( - schematic_api.controllers.storage_controller_impl, - "get_project_manifest_metadata_from_schematic", - side_effect=TypeError, - ): - result, status = get_project_manifest_metadata_page( - project_id="syn1", asset_view_id="syn2", asset_type="synapse" - ) - assert status == 500 - assert isinstance(result, BasicError) diff --git a/apps/schematic/api/schematic_api/test/test_synapse_endpoints.py b/apps/schematic/api/schematic_api/test/test_synapse_endpoints.py deleted file mode 100644 index aed2d8871..000000000 --- a/apps/schematic/api/schematic_api/test/test_synapse_endpoints.py +++ /dev/null @@ -1,385 +0,0 @@ -"""Tests for endpoints that use Synapse without mocking the Synapse client""" - -import json -import os -from unittest import mock -import shutil -from typing import Generator - -import pytest -import yaml -import pandas as pd - -from schematic.store import SynapseStorage # type: ignore - -from schematic_api.controllers.utils import ( - purge_synapse_cache, - check_synapse_cache_size, -) -from schematic_api.test import BaseTestCase -from .conftest import ( - MANIFEST_METADATA_KEYS, - TEST_SCHEMA_URL, - CORRECT_MANIFEST_PATH, - csv_to_bytes, - csv_to_json_str, -) - - -SECRETS_FILE = "schematic_api/test/data/synapse_config.yaml" -EXAMPLE_SECRETS_FILE = "schematic_api/test/data/synapse_config_example.yaml" - -if os.path.exists(SECRETS_FILE): - with open(SECRETS_FILE, "r", encoding="utf-8") as file: - secrets = yaml.safe_load(file) -else: - with open(EXAMPLE_SECRETS_FILE, "r", encoding="utf-8") as file: - secrets = yaml.safe_load(file) - -SYNAPSE_TOKEN = secrets["synapse_token"] -TEST_DATASET = secrets["test_dataset"] -TEST_MANIFEST = secrets["test_manifest"] -TEST_ASSET_VIEW = secrets["test_asset_view"] -TEST_PROJECT = secrets["test_project"] - -HEADERS = { - "Accept": "application/json", - "Authorization": f"Bearer {SYNAPSE_TOKEN}", -} - - -@pytest.fixture(scope="session", name="synapse_store") -def fixture_synapse_store() -> Generator[SynapseStorage, None, None]: - """ - Yields A synapse storage object, and deletes the cache at the end of the session - """ - synapse_store = SynapseStorage( - access_token=SYNAPSE_TOKEN, synapse_cache_path="test_cache_path" - ) - yield synapse_store - shutil.rmtree("test_cache_path") - - -@pytest.mark.synapse -@pytest.mark.secrets -class TestGenerateExcelManifest(BaseTestCase): - """Tests excel manifest endpoint""" - - def test_500(self) -> None: - """Test for successful result""" - url = ( - "/api/v1/generateExcelManifest?schemaUrl=xxx" - "&assetViewId=syn28559058" - "&dataType=Patient" - "&datasetId=syn51730545" - ) - response = self.client.open(url, method="GET", headers=HEADERS) - self.assert500(response, f"Response body is : {response.data.decode('utf-8')}") - - -@pytest.mark.synapse -@pytest.mark.secrets -class TestGenerateGoogleSheetManifests(BaseTestCase): - """Tests google sheet manifest endpoint""" - - # local environment has variable 'SECRETS_MANAGER_SECRETS that causes an error when creating - # google credentials - @mock.patch.dict(os.environ, {}, clear=True) - def test_success1(self) -> None: - """Test for successful result""" - url = ( - f"/api/v1/generateGoogleSheetManifests?schemaUrl={TEST_SCHEMA_URL}" - "&assetViewId=syn28559058" - "&dataTypeArray=Patient" - "&dataTypeArray=Biospecimen" - "&datasetIdArray=syn51730545" - "&datasetIdArray=syn51730547" - ) - response = self.client.open(url, method="GET", headers=HEADERS) - self.assert200(response, f"Response body is : {response.data.decode('utf-8')}") - result = response.json - assert isinstance(result, dict) - assert list(result.keys()) == ["links"] - links = result["links"] - assert isinstance(links, list) - assert len(links) == 2 - - # local environment has variable 'SECRETS_MANAGER_SECRETS that causes an error when creating - # google credentials - @mock.patch.dict(os.environ, {}, clear=True) - def test_success2(self) -> None: - """Test for successful result""" - url = ( - f"/api/v1/generateGoogleSheetManifests?schemaUrl={TEST_SCHEMA_URL}" - "&assetViewId=syn28559058" - "&generateAllManifests=true" - ) - response = self.client.open(url, method="GET", headers=HEADERS) - self.assert200(response, f"Response body is : {response.data.decode('utf-8')}") - result = response.json - assert isinstance(result, dict) - assert list(result.keys()) == ["links"] - links = result["links"] - assert isinstance(links, list) - assert len(links) == 3 - - -@pytest.mark.synapse -@pytest.mark.secrets -class TestValidationEndpoints(BaseTestCase): - """Integration tests""" - - def test_submit_manifest_csv(self) -> None: - """Test for successful result""" - url = ( - f"/api/v1/submitManifestCsv?schemaUrl={TEST_SCHEMA_URL}" - "&component=Biospecimen" - f"&datasetId={TEST_DATASET}" - f"&assetViewId={TEST_ASSET_VIEW}" - "&storageMethod=file_only" - ) - body = csv_to_bytes(CORRECT_MANIFEST_PATH) - response = self.client.open(url, method="POST", headers=HEADERS, data=body) - self.assert200(response, f"Response body is : {response.data.decode('utf-8')}") - assert isinstance(response.json, str) - - def test_submit_manifest_json(self) -> None: - """Test for successful result""" - url = ( - f"/api/v1/submitManifestJson?schemaUrl={TEST_SCHEMA_URL}" - "&component=Biospecimen" - f"&datasetId={TEST_DATASET}" - f"&assetViewId={TEST_ASSET_VIEW}" - "&storageMethod=file_only" - ) - body = csv_to_json_str(CORRECT_MANIFEST_PATH) - response = self.client.open(url, method="POST", headers=HEADERS, data=body) - self.assert200(response, f"Response body is : {response.data.decode('utf-8')}") - assert isinstance(response.json, str) - - -@pytest.mark.synapse -@pytest.mark.secrets -class TestStorageEndpoints(BaseTestCase): - """Integration tests""" - - def test_get_asset_view_csv(self) -> None: - """Test for successful result""" - url = "/api/v1/assetTypes/synapse/" f"assetViews/{TEST_ASSET_VIEW}/csv" - response = self.client.open(url, method="GET", headers=HEADERS) - self.assert200(response, f"Response body is : {response.data.decode('utf-8')}") - path = json.loads(response.data) - assert isinstance(path, str) - assert path.endswith(".asset_view.csv") - asset_view = pd.read_csv(path) - assert isinstance(asset_view, pd.DataFrame) - - def test_get_asset_view_json(self) -> None: - """Test for successful result""" - url = "/api/v1/assetTypes/synapse/" f"assetViews/{TEST_ASSET_VIEW}/json" - response = self.client.open(url, method="GET", headers=HEADERS) - self.assert200(response, f"Response body is : {response.data.decode('utf-8')}") - assert isinstance(response.json, str) - response_dict = json.loads(response.json) - assert isinstance(response_dict, dict) - dataframe = pd.DataFrame.from_dict(response_dict) - assert isinstance(dataframe, pd.DataFrame) - - def test_get_dataset_file_metadata_array(self) -> None: - """Test for successful result""" - url = ( - "/api/v1/assetTypes/synapse/" - f"datasets/{TEST_DATASET}/fileMetadataArray" - f"?assetViewId={TEST_ASSET_VIEW}" - ) - response = self.client.open(url, method="GET", headers=HEADERS) - self.assert200(response, f"Response body is : {response.data.decode('utf-8')}") - assert isinstance(response.json, dict) - assert "files" in response.json - assert isinstance(response.json["files"], list) - for item in response.json["files"]: - assert isinstance(item, dict) - for key in ["id", "name"]: - assert key in item - - def test_get_dataset_file_metadata_page(self) -> None: - """Test for successful result""" - url = ( - "/api/v1/assetTypes/synapse/" - f"datasets/{TEST_DATASET}/fileMetadataPage" - f"?assetViewId={TEST_ASSET_VIEW}" - ) - response = self.client.open(url, method="GET", headers=HEADERS) - self.assert200(response, f"Response body is : {response.data.decode('utf-8')}") - assert isinstance(response.json, dict) - assert "files" in response.json - assert isinstance(response.json["files"], list) - for item in response.json["files"]: - assert isinstance(item, dict) - for key in ["id", "name"]: - assert key in item - - def test_get_dataset_manifest_csv(self) -> None: - """Test for successful result""" - url = ( - "/api/v1/assetTypes/synapse/" - f"datasets/{TEST_DATASET}/manifestCsv" - f"?assetViewId={TEST_ASSET_VIEW}" - ) - response = self.client.open(url, method="GET", headers=HEADERS) - self.assert200(response, f"Response body is : {response.data.decode('utf-8')}") - path = json.loads(response.data) - assert isinstance(path, str) - assert path.endswith(".manifest.csv") - asset_view = pd.read_csv(path) - assert isinstance(asset_view, pd.DataFrame) - - def test_get_dataset_manifest_json(self) -> None: - """Test for successful result""" - url = ( - "/api/v1/assetTypes/synapse/" - f"datasets/{TEST_DATASET}/manifestJson" - f"?assetViewId={TEST_ASSET_VIEW}" - ) - response = self.client.open(url, method="GET", headers=HEADERS) - self.assert200(response, f"Response body is : {response.data.decode('utf-8')}") - assert isinstance(response.json, str) - response_dict = json.loads(response.json) - assert isinstance(response_dict, dict) - dataframe = pd.DataFrame.from_dict(response_dict) - assert isinstance(dataframe, pd.DataFrame) - - def test_get_manifest_csv(self) -> None: - """Test for successful result""" - url = "/api/v1/assetTypes/synapse/" f"manifests/{TEST_MANIFEST}/csv" - response = self.client.open(url, method="GET", headers=HEADERS) - self.assert200(response, f"Response body is : {response.data.decode('utf-8')}") - path = json.loads(response.data) - assert isinstance(path, str) - assert path.endswith(".manifest.csv") - asset_view = pd.read_csv(path) - assert isinstance(asset_view, pd.DataFrame) - - def test_get_manifest_json(self) -> None: - """Test for successful result""" - url = "/api/v1/assetTypes/synapse/" f"manifests/{TEST_MANIFEST}/json" - response = self.client.open(url, method="GET", headers=HEADERS) - self.assert200(response, f"Response body is : {response.data.decode('utf-8')}") - assert isinstance(response.json, str) - response_dict = json.loads(response.json) - assert isinstance(response_dict, dict) - dataframe = pd.DataFrame.from_dict(response_dict) - assert isinstance(dataframe, pd.DataFrame) - - def test_get_project_metadata_array(self) -> None: - """Test for successful result""" - url = ( - "/api/v1/assetTypes/synapse/" - f"assetViews/{TEST_ASSET_VIEW}/projectMetadataArray" - ) - response = self.client.open(url, method="GET", headers=HEADERS) - self.assert200(response, f"Response body is : {response.data.decode('utf-8')}") - assert isinstance(response.json, dict) - assert "projects" in response.json - assert isinstance(response.json["projects"], list) - for project in response.json["projects"]: - assert isinstance(project, dict) - for key in ["id", "name"]: - assert key in project - - def test_get_project_metadata_page(self) -> None: - """Test for successful result""" - url = ( - "/api/v1/assetTypes/synapse/" - f"assetViews/{TEST_ASSET_VIEW}/projectMetadataPage" - ) - response = self.client.open(url, method="GET", headers=HEADERS) - self.assert200(response, f"Response body is : {response.data.decode('utf-8')}") - assert isinstance(response.json, dict) - assert "projects" in response.json - assert isinstance(response.json["projects"], list) - for project in response.json["projects"]: - assert isinstance(project, dict) - for key in ["id", "name"]: - assert key in project - - def test_get_project_dataset_metadata_array(self) -> None: - """Test for successful result""" - url = ( - "/api/v1/assetTypes/synapse/" - f"projects/{TEST_PROJECT}/datasetMetadataArray" - f"?assetViewId={TEST_ASSET_VIEW}" - ) - response = self.client.open(url, method="GET", headers=HEADERS) - self.assert200(response, f"Response body is : {response.data.decode('utf-8')}") - assert isinstance(response.json, dict) - assert "datasets" in response.json - assert isinstance(response.json["datasets"], list) - for dataset in response.json["datasets"]: - assert isinstance(dataset, dict) - for key in ["id", "name"]: - assert key in dataset - - def test_get_project_dataset_metadata_page(self) -> None: - """Test for successful result""" - url = ( - "/api/v1/assetTypes/synapse/" - f"projects/{TEST_PROJECT}/datasetMetadataPage" - f"?assetViewId={TEST_ASSET_VIEW}" - ) - response = self.client.open(url, method="GET", headers=HEADERS) - self.assert200(response, f"Response body is : {response.data.decode('utf-8')}") - assert isinstance(response.json, dict) - assert "datasets" in response.json - assert isinstance(response.json["datasets"], list) - for dataset in response.json["datasets"]: - assert isinstance(dataset, dict) - for key in ["id", "name"]: - assert key in dataset - - def test_get_project_manifest_metadata_array(self) -> None: - """Test for successful result""" - url = ( - "/api/v1/assetTypes/synapse/" - f"projects/{TEST_PROJECT}/manifestMetadataArray" - f"?assetViewId={TEST_ASSET_VIEW}" - ) - response = self.client.open(url, method="GET", headers=HEADERS) - self.assert200(response, f"Response body is : {response.data.decode('utf-8')}") - assert isinstance(response.json, dict) - assert "manifests" in response.json - assert isinstance(response.json["manifests"], list) - for manifest in response.json["manifests"]: - assert isinstance(manifest, dict) - assert list(manifest.keys()) == MANIFEST_METADATA_KEYS - - def test_get_project_manifest_metadata_page(self) -> None: - """Test for successful result""" - url = ( - "/api/v1/assetTypes/synapse/" - f"projects/{TEST_PROJECT}/manifestMetadataPage" - f"?assetViewId={TEST_ASSET_VIEW}" - ) - response = self.client.open(url, method="GET", headers=HEADERS) - self.assert200(response, f"Response body is : {response.data.decode('utf-8')}") - assert isinstance(response.json, dict) - assert "manifests" in response.json - assert isinstance(response.json["manifests"], list) - for manifest in response.json["manifests"]: - assert isinstance(manifest, dict) - assert list(manifest.keys()) == MANIFEST_METADATA_KEYS - - -@pytest.mark.synapse -@pytest.mark.secrets -class TestPurgeSynapseCache: # pylint: disable=too-few-public-methods - """Tests purge_synapse_cache""" - - def test_success(self, synapse_store: SynapseStorage) -> None: - """Tests for a successful purge""" - size_before_purge = check_synapse_cache_size(synapse_store.root_synapse_cache) - purge_synapse_cache( - synapse_store, maximum_storage_allowed_cache_gb=0.000001, minute_buffer=0 - ) - size_after_purge = check_synapse_cache_size(synapse_store.root_synapse_cache) - assert size_before_purge > size_after_purge diff --git a/apps/schematic/api/schematic_api/test/test_tangled_tree_endpoints.py b/apps/schematic/api/schematic_api/test/test_tangled_tree_endpoints.py deleted file mode 100644 index 8a2c7aaa6..000000000 --- a/apps/schematic/api/schematic_api/test/test_tangled_tree_endpoints.py +++ /dev/null @@ -1,121 +0,0 @@ -"""Tests for schema endpoints""" - -# pylint: disable=duplicate-code -from __future__ import absolute_import -from unittest.mock import patch - -import schematic_api.controllers.tangled_tree_controller_impl -from schematic_api.test import BaseTestCase - -HEADERS = { - "Accept": "application/json", - "Authorization": "Bearer xxx", -} - -TANGLED_TREE_LAYERS_URL = "/api/v1/tangledTreeLayers" -TANGLED_TREE_TEXT_URL = "/api/v1/tangledTreeText" - - -class TestGetTangledTreeLayers(BaseTestCase): - """Test case for tangled tree layers endpoint""" - - def test_success(self) -> None: - """Test for successful result""" - - with patch.object( - schematic_api.controllers.tangled_tree_controller_impl, - "get_tangled_tree_layers", - return_value="xxx", - ) as mock_function: - url = f"{TANGLED_TREE_LAYERS_URL}?schemaUrl=url" - response = self.client.open(url, method="GET", headers=HEADERS) - self.assert200( - response, f"Response body is : {response.data.decode('utf-8')}" - ) - mock_function.assert_called_once_with("url", "component", "class_label") - assert response.json == "xxx" - - def test_parameters(self) -> None: - """Test for successful result""" - - with patch.object( - schematic_api.controllers.tangled_tree_controller_impl, - "get_tangled_tree_layers", - return_value="xxx", - ) as mock_function: - url = f"{TANGLED_TREE_LAYERS_URL}?schemaUrl=url2&figureType=dependency" - response = self.client.open(url, method="GET", headers=HEADERS) - self.assert200( - response, f"Response body is : {response.data.decode('utf-8')}" - ) - mock_function.assert_called_once_with("url2", "dependency", "class_label") - assert response.json == "xxx" - - def test_500(self) -> None: - """Test for 500 result""" - with patch.object( - schematic_api.controllers.tangled_tree_controller_impl, - "get_tangled_tree_layers", - side_effect=TypeError, - ): - url = f"{TANGLED_TREE_LAYERS_URL}?schemaUrl=url" - response = self.client.open(url, method="GET", headers=HEADERS) - self.assert500( - response, f"Response body is : {response.data.decode('utf-8')}" - ) - - -class TestGetTangledTreeText(BaseTestCase): - """Test case for tangled tree text endpoint""" - - def test_success(self) -> None: - """Test for successful result""" - - with patch.object( - schematic_api.controllers.tangled_tree_controller_impl, - "get_tangled_tree_text", - return_value="xxx", - ) as mock_function: - url = f"{TANGLED_TREE_TEXT_URL}?schemaUrl=url" - response = self.client.open(url, method="GET", headers=HEADERS) - self.assert200( - response, f"Response body is : {response.data.decode('utf-8')}" - ) - mock_function.assert_called_once_with( - "url", "component", "plain", "class_label" - ) - assert response.json == "xxx" - - def test_parameters(self) -> None: - """Test for successful result""" - - with patch.object( - schematic_api.controllers.tangled_tree_controller_impl, - "get_tangled_tree_text", - return_value="xxx", - ) as mock_function: - url = ( - f"{TANGLED_TREE_TEXT_URL}" - "?schemaUrl=url2&figureType=dependency&text_format=highligted" - ) - response = self.client.open(url, method="GET", headers=HEADERS) - self.assert200( - response, f"Response body is : {response.data.decode('utf-8')}" - ) - mock_function.assert_called_once_with( - "url2", "dependency", "plain", "class_label" - ) - assert response.json == "xxx" - - def test_500(self) -> None: - """Test for 500 result""" - with patch.object( - schematic_api.controllers.tangled_tree_controller_impl, - "get_tangled_tree_text", - side_effect=TypeError, - ): - url = f"{TANGLED_TREE_TEXT_URL}?schemaUrl=url" - response = self.client.open(url, method="GET", headers=HEADERS) - self.assert500( - response, f"Response body is : {response.data.decode('utf-8')}" - ) diff --git a/apps/schematic/api/schematic_api/test/test_tangled_tree_impl.py b/apps/schematic/api/schematic_api/test/test_tangled_tree_impl.py deleted file mode 100644 index bc34aeb2b..000000000 --- a/apps/schematic/api/schematic_api/test/test_tangled_tree_impl.py +++ /dev/null @@ -1,50 +0,0 @@ -"""Tests for schema endpoint functions""" - -from schematic_api.models.basic_error import BasicError -from schematic_api.controllers.tangled_tree_controller_impl import ( - get_tangled_tree_layers, - get_tangled_tree_text, -) - - -class TestGetTangledTreeLayers: - """Tests get_tangled_tree_layers""" - - def test_success(self, test_schema_url: str) -> None: - """Test for successful result""" - result, status = get_tangled_tree_layers( - schema_url=test_schema_url, - figure_type="component", - ) - assert status == 200 - assert isinstance(result, str) - - def test_404_error(self) -> None: - """Test for 404 result""" - result, status = get_tangled_tree_layers( - schema_url="not_a_url", figure_type="component" - ) - assert status == 404 - assert isinstance(result, BasicError) - - -class TestGetTangledTreeText: - """Tests get_tangled_tree_text""" - - def test_success(self, test_schema_url: str) -> None: - """Test for successful result""" - res = get_tangled_tree_text( - schema_url=test_schema_url, figure_type="component", text_format="plain" - ) - (result, status) = res # pylint: disable=unpacking-non-sequence - assert status == 200 - assert isinstance(result, str) - - def test_404_error(self) -> None: - """Test for 404 result""" - res = get_tangled_tree_text( - schema_url="not_a_url", figure_type="component", text_format="plain" - ) - (result, status) = res # pylint: disable=unpacking-non-sequence - assert status == 404 - assert isinstance(result, BasicError) diff --git a/apps/schematic/api/schematic_api/test/test_utils.py b/apps/schematic/api/schematic_api/test/test_utils.py deleted file mode 100644 index 6e1a828e0..000000000 --- a/apps/schematic/api/schematic_api/test/test_utils.py +++ /dev/null @@ -1,122 +0,0 @@ -"""Tests for utils""" - -from datetime import datetime - -import pytest - -from synapseclient.core.exceptions import ( # type: ignore - SynapseNoCredentialsError, - SynapseAuthenticationError, -) -from schematic.exceptions import AccessCredentialsError # type: ignore - -from schematic_api.controllers.utils import ( - handle_exceptions, - download_schema_file_as_jsonld, - InvalidSchemaURL, - calculate_datetime, - calculate_byte_size, -) -from schematic_api.models.basic_error import BasicError - - -@handle_exceptions -def func(exc: BaseException, raise_error: bool = True) -> tuple[str, int]: - """used to test decorator""" - if raise_error: - raise exc - return ("xxx", 200) - - -class TestHandleExceptions: - """Tests handle_exceptions""" - - def test_401(self) -> None: - "Tests for 401 status" - res, status = func(SynapseNoCredentialsError) - assert status == 401 - assert isinstance(res, BasicError) - - res, status = func(SynapseAuthenticationError) - assert status == 401 - assert isinstance(res, BasicError) - - def test_403(self) -> None: - "Tests for 403 status" - res, status = func(AccessCredentialsError("project")) - assert status == 403 - assert isinstance(res, BasicError) - - def test_404(self) -> None: - "Tests for 404 status" - res, status = func(InvalidSchemaURL("message", "url")) - assert status == 404 - assert isinstance(res, BasicError) - - def test_500(self) -> None: - "Tests for 500 status" - res, status = func(TypeError) - assert status == 500 - assert isinstance(res, BasicError) - - -class TestDownloadSchemaFileAsJsonLD: - "tests download_schema_file_as_jsonld" - - def test_success(self, test_schema_url: str) -> None: - "tests for successful download" - file_path = download_schema_file_as_jsonld(test_schema_url) - assert file_path - - def test_failure(self) -> None: - "tests for exception" - with pytest.raises( - InvalidSchemaURL, match="The provided URL is incorrectly formatted: xxx" - ): - download_schema_file_as_jsonld("xxx") - - with pytest.raises( - InvalidSchemaURL, - match="The provided URL could not be found: https://raw.github.com/model.jsonld", - ): - download_schema_file_as_jsonld("https://raw.github.com/model.jsonld") - - -class TestCalculateByteSize: - """Tests calculate_byte_size""" - - def test_success(self) -> None: - """Tests sucessful results""" - assert calculate_byte_size("1B") == 1 - assert calculate_byte_size("2B") == 2 - assert calculate_byte_size("1K") == 1024 - assert calculate_byte_size("1.0K") == 1024 - assert calculate_byte_size("1.1K") == 1127 - assert calculate_byte_size("0") == 0 - - def test_errors(self) -> None: - """Tests for raised exceptions""" - - with pytest.raises(ValueError): - calculate_byte_size("1") - - with pytest.raises(ValueError): - calculate_byte_size("1X") - - -class TestCalculateDatetime: - """Tests calculate_datetime""" - - def test_with_defaults(self) -> None: - """Tests sucessful results""" - assert isinstance(calculate_datetime(0), datetime) - assert isinstance(calculate_datetime(1), datetime) - assert isinstance(calculate_datetime(-1), datetime) - - def test_with_input_datetime(self) -> None: - """Tests sucessful results""" - datetime1 = datetime(2024, 1, 15, 10, 0) - datetime2 = datetime(2024, 1, 15, 9, 50) - assert calculate_datetime(0, datetime1) == datetime1 - assert calculate_datetime(10, datetime1) == datetime2 - assert calculate_datetime(-10, datetime2) == datetime1 diff --git a/apps/schematic/api/schematic_api/test/test_version_endpoints.py b/apps/schematic/api/schematic_api/test/test_version_endpoints.py deleted file mode 100644 index 2439f5cdc..000000000 --- a/apps/schematic/api/schematic_api/test/test_version_endpoints.py +++ /dev/null @@ -1,41 +0,0 @@ -"""Tests for version endpoints""" - -# pylint: disable=duplicate-code - -import importlib.metadata -from unittest.mock import patch - -from schematic_api.test import BaseTestCase - - -HEADERS = { - "Accept": "application/json", - "Authorization": "Bearer xxx", -} -SCHEMATIC_VERSION_URL = "/api/v1/schematicVersion" - - -class TestGetSchematicVersion(BaseTestCase): - """Test case for schematic version endpoint""" - - def test_success(self) -> None: - """Test for successful result""" - - response = self.client.open( - SCHEMATIC_VERSION_URL, method="GET", headers=HEADERS - ) - self.assert200(response, f"Response body is : {response.data.decode('utf-8')}") - - def test_500(self) -> None: - """Test for 500 result""" - with patch.object( - importlib.metadata, - "version", - side_effect=TypeError, - ): - response = self.client.open( - SCHEMATIC_VERSION_URL, method="GET", headers=HEADERS - ) - self.assert500( - response, f"Response body is : {response.data.decode('utf-8')}" - ) diff --git a/apps/schematic/api/schematic_api/typing_utils.py b/apps/schematic/api/schematic_api/typing_utils.py deleted file mode 100644 index f2f1bd3e6..000000000 --- a/apps/schematic/api/schematic_api/typing_utils.py +++ /dev/null @@ -1,32 +0,0 @@ -# coding: utf-8 - -import sys - -if sys.version_info < (3, 7): - import typing - - def is_generic(klass): - """Determine whether klass is a generic class""" - return type(klass) == typing.GenericMeta - - def is_dict(klass): - """Determine whether klass is a Dict""" - return klass.__extra__ == dict - - def is_list(klass): - """Determine whether klass is a List""" - return klass.__extra__ == list - -else: - - def is_generic(klass): - """Determine whether klass is a generic class""" - return hasattr(klass, "__origin__") - - def is_dict(klass): - """Determine whether klass is a Dict""" - return klass.__origin__ == dict - - def is_list(klass): - """Determine whether klass is a List""" - return klass.__origin__ == list diff --git a/apps/schematic/api/schematic_api/util.py b/apps/schematic/api/schematic_api/util.py deleted file mode 100644 index aa9256985..000000000 --- a/apps/schematic/api/schematic_api/util.py +++ /dev/null @@ -1,150 +0,0 @@ -import datetime - -import six -import typing -from schematic_api import typing_utils - - -def _deserialize(data, klass): - """Deserializes dict, list, str into an object. - - :param data: dict, list or str. - :param klass: class literal, or string of class name. - - :return: object. - """ - if data is None: - return None - - if klass in six.integer_types or klass in (float, str, bool, bytearray): - return _deserialize_primitive(data, klass) - elif klass == object: - return _deserialize_object(data) - elif klass == datetime.date: - return deserialize_date(data) - elif klass == datetime.datetime: - return deserialize_datetime(data) - elif typing_utils.is_generic(klass): - if typing_utils.is_list(klass): - return _deserialize_list(data, klass.__args__[0]) - if typing_utils.is_dict(klass): - return _deserialize_dict(data, klass.__args__[1]) - else: - return deserialize_model(data, klass) - - -def _deserialize_primitive(data, klass): - """Deserializes to primitive type. - - :param data: data to deserialize. - :param klass: class literal. - - :return: int, long, float, str, bool. - :rtype: int | long | float | str | bool - """ - try: - value = klass(data) - except UnicodeEncodeError: - value = six.u(data) - except TypeError: - value = data - return value - - -def _deserialize_object(value): - """Return an original value. - - :return: object. - """ - return value - - -def deserialize_date(string): - """Deserializes string to date. - - :param string: str. - :type string: str - :return: date. - :rtype: date - """ - if string is None: - return None - - try: - from dateutil.parser import parse - - return parse(string).date() - except ImportError: - return string - - -def deserialize_datetime(string): - """Deserializes string to datetime. - - The string should be in iso8601 datetime format. - - :param string: str. - :type string: str - :return: datetime. - :rtype: datetime - """ - if string is None: - return None - - try: - from dateutil.parser import parse - - return parse(string) - except ImportError: - return string - - -def deserialize_model(data, klass): - """Deserializes list or dict to model. - - :param data: dict, list. - :type data: dict | list - :param klass: class literal. - :return: model object. - """ - instance = klass() - - if not instance.openapi_types: - return data - - for attr, attr_type in six.iteritems(instance.openapi_types): - if ( - data is not None - and instance.attribute_map[attr] in data - and isinstance(data, (list, dict)) - ): - value = data[instance.attribute_map[attr]] - setattr(instance, attr, _deserialize(value, attr_type)) - - return instance - - -def _deserialize_list(data, boxed_type): - """Deserializes a list and its elements. - - :param data: list to deserialize. - :type data: list - :param boxed_type: class literal. - - :return: deserialized list. - :rtype: list - """ - return [_deserialize(sub_data, boxed_type) for sub_data in data] - - -def _deserialize_dict(data, boxed_type): - """Deserializes a dict and its elements. - - :param data: dict to deserialize. - :type data: dict - :param boxed_type: class literal. - - :return: deserialized dict. - :rtype: dict - """ - return {k: _deserialize(v, boxed_type) for k, v in six.iteritems(data)} diff --git a/apps/schematic/api/self-signed.conf b/apps/schematic/api/self-signed.conf deleted file mode 100644 index 0ea774915..000000000 --- a/apps/schematic/api/self-signed.conf +++ /dev/null @@ -1,6 +0,0 @@ -# The ultimate goal is to set up a self-signed SSL certificate for Nginx to use -# A self-signed certificate is required to encrypt the communication between flask API server and any clients -# This configuration file is needed to configure nginx to use SSL certificates - -ssl_certificate /etc/ssl/certs/localhost.crt; -ssl_certificate_key /etc/ssl/private/localhost.key; \ No newline at end of file diff --git a/apps/schematic/api/ssl-params.conf b/apps/schematic/api/ssl-params.conf deleted file mode 100644 index cec80f7ed..000000000 --- a/apps/schematic/api/ssl-params.conf +++ /dev/null @@ -1,21 +0,0 @@ -# The configuration file was borrowed from: https://www.digitalocean.com/community/tutorials/how-to-create-a-self-signed-ssl-certificate-for-nginx-in-ubuntu-22-04 -# This configuration file is needed for setting up nginx securely. - -ssl_protocols TLSv1.2; -ssl_prefer_server_ciphers on; -ssl_dhparam /etc/ssl/dhparam.pem; -ssl_ciphers ECDHE-RSA-AES256-GCM-SHA512:DHE-RSA-AES256-GCM-SHA512:ECDHE-RSA-AES256-GCM-SHA384:DHE-RSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-SHA384; -ssl_ecdh_curve secp384r1; # Requires nginx >= 1.1.0 -ssl_session_timeout 10m; -ssl_session_cache shared:SSL:10m; -ssl_session_tickets off; # Requires nginx >= 1.5.9 -ssl_stapling on; # Requires nginx >= 1.3.7 -ssl_stapling_verify on; # Requires nginx => 1.3.7 -resolver 8.8.8.8 8.8.4.4 valid=300s; -resolver_timeout 5s; -# Disable strict transport security for now. You can uncomment the following -# line if you understand the implications. -# add_header Strict-Transport-Security "max-age=63072000; includeSubDomains; preload"; -add_header X-Frame-Options DENY; -add_header X-Content-Type-Options nosniff; -add_header X-XSS-Protection "1; mode=block"; \ No newline at end of file diff --git a/apps/schematic/api/templates/AUTHORS.md b/apps/schematic/api/templates/AUTHORS.md deleted file mode 100644 index 01c10d9f0..000000000 --- a/apps/schematic/api/templates/AUTHORS.md +++ /dev/null @@ -1,11 +0,0 @@ -# Authors - -Ordered by first contribution. - -- [Thomas Schaffter](https://github.com/tschaffter) - - - - - diff --git a/apps/schematic/api/templates/config.yaml b/apps/schematic/api/templates/config.yaml deleted file mode 100644 index 8ecff1150..000000000 --- a/apps/schematic/api/templates/config.yaml +++ /dev/null @@ -1,2 +0,0 @@ -files: - AUTHORS.md: {} diff --git a/apps/schematic/api/templates/controller.mustache b/apps/schematic/api/templates/controller.mustache deleted file mode 100644 index ece7cb416..000000000 --- a/apps/schematic/api/templates/controller.mustache +++ /dev/null @@ -1,116 +0,0 @@ -import connexion -import six -from typing import Dict -from typing import Tuple -from typing import Union - -{{#imports}}{{import}} # noqa: E501 -{{/imports}} -from {{packageName}} import util -from {{packageName}}.controllers import {{#lambda.snakecase}}{{baseName}}{{/lambda.snakecase}}_controller_impl -{{#operations}} -{{#operation}} - - -def {{operationId}}({{#allParams}}{{paramName}}{{^required}}=None{{/required}}{{^-last}}, {{/-last}}{{/allParams}}): # noqa: E501 - """{{summary}}{{^summary}}{{operationId}}{{/summary}} - - {{notes}} # noqa: E501 - - {{#allParams}} - :param {{paramName}}: {{description}} - {{^isContainer}} - {{#isPrimitiveType}} - :type {{paramName}}: {{>param_type}} - {{/isPrimitiveType}} - {{#isUuid}} - :type {{paramName}}: {{>param_type}} - {{/isUuid}} - {{^isPrimitiveType}} - {{#isFile}} - :type {{paramName}}: werkzeug.datastructures.FileStorage - {{/isFile}} - {{^isFile}} - {{^isUuid}} - :type {{paramName}}: dict | bytes - {{/isUuid}} - {{/isFile}} - {{/isPrimitiveType}} - {{/isContainer}} - {{#isArray}} - {{#items}} - {{#isPrimitiveType}} - :type {{paramName}}: List[{{>param_type}}] - {{/isPrimitiveType}} - {{^isPrimitiveType}} - :type {{paramName}}: list | bytes - {{/isPrimitiveType}} - {{/items}} - {{/isArray}} - {{#isMap}} - {{#items}} - {{#isPrimitiveType}} - :type {{paramName}}: Dict[str, {{>param_type}}] - {{/isPrimitiveType}} - {{^isPrimitiveType}} - :type {{paramName}}: dict | bytes - {{/isPrimitiveType}} - {{/items}} - {{/isMap}} - {{/allParams}} - - :rtype: Union[{{returnType}}{{^returnType}}None{{/returnType}}, Tuple[{{returnType}}{{^returnType}}None{{/returnType}}, int], Tuple[{{returnType}}{{^returnType}}None{{/returnType}}, int, Dict[str, str]] - """ - {{#allParams}} - {{^isContainer}} - {{#isDate}} - {{paramName}} = util.deserialize_date({{paramName}}) - {{/isDate}} - {{#isDateTime}} - {{paramName}} = util.deserialize_datetime({{paramName}}) - {{/isDateTime}} - {{^isPrimitiveType}} - {{^isFile}} - {{^isUuid}} - if connexion.request.is_json: - {{paramName}} = {{baseType}}{{^baseType}}{{#dataType}} {{.}}{{/dataType}}{{/baseType}}.from_dict(connexion.request.get_json()) # noqa: E501 - {{/isUuid}} - {{/isFile}} - {{/isPrimitiveType}} - {{/isContainer}} - {{#isArray}} - {{#items}} - {{#isDate}} - if connexion.request.is_json: - {{paramName}} = [util.deserialize_date(s) for s in connexion.request.get_json()] # noqa: E501 - {{/isDate}} - {{#isDateTime}} - if connexion.request.is_json: - {{paramName}} = [util.deserialize_datetime(s) for s in connexion.request.get_json()] # noqa: E501 - {{/isDateTime}} - {{#complexType}} - if connexion.request.is_json: - {{paramName}} = [{{complexType}}.from_dict(d) for d in connexion.request.get_json()] # noqa: E501 - {{/complexType}} - {{/items}} - {{/isArray}} - {{#isMap}} - {{#items}} - {{#isDate}} - if connexion.request.is_json: - {{paramName}} = {k: util.deserialize_date(v) for k, v in six.iteritems(connexion.request.get_json())} # noqa: E501 - {{/isDate}} - {{#isDateTime}} - if connexion.request.is_json: - {{paramName}} = {k: util.deserialize_datetime(v) for k, v in six.iteritems(connexion.request.get_json())} # noqa: E501 - {{/isDateTime}} - {{#complexType}} - if connexion.request.is_json: - {{paramName}} = {k: {{baseType}}.from_dict(v) for k, v in six.iteritems(connexion.request.get_json())} # noqa: E501 - {{/complexType}} - {{/items}} - {{/isMap}} - {{/allParams}} - return {{#lambda.snakecase}}{{baseName}}{{/lambda.snakecase}}_controller_impl.{{operationId}}({{#allParams}}{{paramName}}{{^-last}}, {{/-last}}{{/allParams}}) -{{/operation}} -{{/operations}} diff --git a/apps/schematic/api/tox.ini b/apps/schematic/api/tox.ini deleted file mode 100644 index df5b9bab7..000000000 --- a/apps/schematic/api/tox.ini +++ /dev/null @@ -1,9 +0,0 @@ -[tox] -envlist = py310 -skipsdist = True - -[testenv:py310] -allowlist_externals=poetry -commands = - poetry install - pytest --cov-report xml --cov-report html --cov=schematic_api diff --git a/apps/schematic/api/uwsgi-nginx-entrypoint.sh b/apps/schematic/api/uwsgi-nginx-entrypoint.sh deleted file mode 100644 index 8b42bf101..000000000 --- a/apps/schematic/api/uwsgi-nginx-entrypoint.sh +++ /dev/null @@ -1,53 +0,0 @@ -#! /usr/bin/env sh -set -e -/uwsgi-nginx-entrypoint.sh - -# save private key and certificate from environment variable -if [ -n "$SECRETS_MANAGER_SECRETS" ]; then - python3 /save_key_certificate.py - - # Ensure the private key file is securely accessible - chmod 600 /etc/ssl/private/localhost.key -fi - -# Get the URL for static files from the environment variable -USE_STATIC_URL=${STATIC_URL:-'/static'} -# Get the absolute path of the static files from the environment variable -USE_STATIC_PATH=${STATIC_PATH:-'/app/static'} -# Get the listen port for Nginx, default to 80 -USE_LISTEN_PORT=${LISTEN_PORT:-80} - -if [ -f /app/nginx.conf ]; then - cp /app/nginx.conf /etc/nginx/nginx.conf -else - content_server='server {\n' - content_server=$content_server" listen ${USE_LISTEN_PORT};\n" - content_server=$content_server' location / {\n' - content_server=$content_server' try_files $uri @app;\n' - content_server=$content_server' }\n' - content_server=$content_server' location @app {\n' - content_server=$content_server' include uwsgi_params;\n' - content_server=$content_server' uwsgi_pass unix:///tmp/uwsgi.sock;\n' - content_server=$content_server' }\n' - content_server=$content_server" location $USE_STATIC_URL {\n" - content_server=$content_server" alias $USE_STATIC_PATH;\n" - content_server=$content_server' }\n' - # If STATIC_INDEX is 1, serve / with /static/index.html directly (or the static URL configured) - if [ "$STATIC_INDEX" = 1 ] ; then - content_server=$content_server' location = / {\n' - content_server=$content_server" index $USE_STATIC_URL/index.html;\n" - content_server=$content_server' }\n' - fi - content_server=$content_server'}\n' - # Save generated server /etc/nginx/conf.d/nginx.conf - printf "$content_server" > /etc/nginx/conf.d/nginx.conf -fi - -# For Alpine: -# Explicitly add installed Python packages and uWSGI Python packages to PYTHONPATH -# Otherwise uWSGI can't import Flask -if [ -n "$ALPINEPYTHON" ] ; then - export PYTHONPATH=$PYTHONPATH:/usr/local/lib/$ALPINEPYTHON/site-packages:/usr/lib/$ALPINEPYTHON/site-packages -fi - -exec "$@" \ No newline at end of file diff --git a/apps/schematic/api/uwsgi.ini b/apps/schematic/api/uwsgi.ini deleted file mode 100644 index cd6de421a..000000000 --- a/apps/schematic/api/uwsgi.ini +++ /dev/null @@ -1,24 +0,0 @@ -[uwsgi] -# The variables http and *-socket are passed as command line arguments and -# must not be specified in this file. -wsgi-file = schematic_api/__main__.py -callable = app -uid = www-data -gid = www-data -processes = 1 # Number of concurrent processes / workers -threads = 1 # Number of threads per process -master = true -chmod-sock = 660 -vacuum = true -die-on-term = true -thunder-lock = true -http-keepalive = true -harakiri-verbose = true -http-timeout = 300 # necessary for preventing time-out -uwsgi_read_timeout = 300 # necessary for preventing time-out -uwsgi_send_timeout = 300 # necessary for preventing time-out -buffer-size = 32768 # for dealing with long token in DCA and DFA -# for dealing with OSError: write error -ignore-sigpipe=true -ignore-write-errors=true -disable-write-exception=true \ No newline at end of file diff --git a/apps/schematic/notebook/.env.example b/apps/schematic/notebook/.env.example deleted file mode 100644 index a32f1080e..000000000 --- a/apps/schematic/notebook/.env.example +++ /dev/null @@ -1,2 +0,0 @@ -JUPYTER_ENABLE_LAB=yes -JUPYTER_TOKEN=changeme \ No newline at end of file diff --git a/apps/schematic/notebook/.gitignore b/apps/schematic/notebook/.gitignore deleted file mode 100644 index 58461f254..000000000 --- a/apps/schematic/notebook/.gitignore +++ /dev/null @@ -1 +0,0 @@ -.ipynb_checkpoints \ No newline at end of file diff --git a/apps/schematic/notebook/.python-version b/apps/schematic/notebook/.python-version deleted file mode 100644 index 2009c7dfa..000000000 --- a/apps/schematic/notebook/.python-version +++ /dev/null @@ -1 +0,0 @@ -3.9.2 diff --git a/apps/schematic/notebook/Dockerfile b/apps/schematic/notebook/Dockerfile deleted file mode 100644 index 7af198951..000000000 --- a/apps/schematic/notebook/Dockerfile +++ /dev/null @@ -1 +0,0 @@ -FROM jupyter/datascience-notebook:2022-12-26 \ No newline at end of file diff --git a/apps/schematic/notebook/docker-compose.yml b/apps/schematic/notebook/docker-compose.yml deleted file mode 100644 index 25d09c57f..000000000 --- a/apps/schematic/notebook/docker-compose.yml +++ /dev/null @@ -1,19 +0,0 @@ -version: '3.8' - -services: - schematic-notebook: - image: ghcr.io/sage-bionetworks/schematic-notebook:local - container_name: schematic-notebook - restart: always - env_file: - - .env - volumes: - - ./notebooks:/home/jovyan/notebooks - networks: - - schematic - ports: - - '7888:8888' - -networks: - schematic: - name: schematic diff --git a/apps/schematic/notebook/jupyter_lab_config.py b/apps/schematic/notebook/jupyter_lab_config.py deleted file mode 100644 index eaf42ac15..000000000 --- a/apps/schematic/notebook/jupyter_lab_config.py +++ /dev/null @@ -1,1084 +0,0 @@ -# Configuration file for lab. - -c = get_config() # noqa - -# ------------------------------------------------------------------------------ -# Application(SingletonConfigurable) configuration -# ------------------------------------------------------------------------------ -## This is an application. - -## The date format used by logging formatters for %(asctime)s -# Default: '%Y-%m-%d %H:%M:%S' -# c.Application.log_datefmt = '%Y-%m-%d %H:%M:%S' - -## The Logging format template -# Default: '[%(name)s]%(highlevel)s %(message)s' -# c.Application.log_format = '[%(name)s]%(highlevel)s %(message)s' - -## Set the log level by value or name. -# Choices: any of [0, 10, 20, 30, 40, 50, 'DEBUG', 'INFO', 'WARN', 'ERROR', 'CRITICAL'] -# Default: 30 -# c.Application.log_level = 30 - -## Configure additional log handlers. -# -# The default stderr logs handler is configured by the log_level, log_datefmt -# and log_format settings. -# -# This configuration can be used to configure additional handlers (e.g. to -# output the log to a file) or for finer control over the default handlers. -# -# If provided this should be a logging configuration dictionary, for more -# information see: -# https://docs.python.org/3/library/logging.config.html#logging-config- -# dictschema -# -# This dictionary is merged with the base logging configuration which defines -# the following: -# -# * A logging formatter intended for interactive use called -# ``console``. -# * A logging handler that writes to stderr called -# ``console`` which uses the formatter ``console``. -# * A logger with the name of this application set to ``DEBUG`` -# level. -# -# This example adds a new handler that writes to a file: -# -# .. code-block:: python -# -# c.Application.logging_config = { -# 'handlers': { -# 'file': { -# 'class': 'logging.FileHandler', -# 'level': 'DEBUG', -# 'filename': '', -# } -# }, -# 'loggers': { -# '': { -# 'level': 'DEBUG', -# # NOTE: if you don't list the default "console" -# # handler here then it will be disabled -# 'handlers': ['console', 'file'], -# }, -# } -# } -# Default: {} -# c.Application.logging_config = {} - -## Instead of starting the Application, dump configuration to stdout -# Default: False -# c.Application.show_config = False - -## Instead of starting the Application, dump configuration to stdout (as JSON) -# Default: False -# c.Application.show_config_json = False - -# ------------------------------------------------------------------------------ -# JupyterApp(Application) configuration -# ------------------------------------------------------------------------------ -## Base class for Jupyter applications - -## Answer yes to any prompts. -# Default: False -# c.JupyterApp.answer_yes = False - -## Full path of a config file. -# Default: '' -# c.JupyterApp.config_file = '' - -## Specify a config file to load. -# Default: '' -# c.JupyterApp.config_file_name = '' - -## Generate default config file. -# Default: False -# c.JupyterApp.generate_config = False - -## The date format used by logging formatters for %(asctime)s -# See also: Application.log_datefmt -# c.JupyterApp.log_datefmt = '%Y-%m-%d %H:%M:%S' - -## The Logging format template -# See also: Application.log_format -# c.JupyterApp.log_format = '[%(name)s]%(highlevel)s %(message)s' - -## Set the log level by value or name. -# See also: Application.log_level -# c.JupyterApp.log_level = 30 - -## -# See also: Application.logging_config -# c.JupyterApp.logging_config = {} - -## Instead of starting the Application, dump configuration to stdout -# See also: Application.show_config -# c.JupyterApp.show_config = False - -## Instead of starting the Application, dump configuration to stdout (as JSON) -# See also: Application.show_config_json -# c.JupyterApp.show_config_json = False - -# ------------------------------------------------------------------------------ -# ExtensionApp(JupyterApp) configuration -# ------------------------------------------------------------------------------ -## Base class for configurable Jupyter Server Extension Applications. -# -# ExtensionApp subclasses can be initialized two ways: -# 1. Extension is listed as a jpserver_extension, and ServerApp calls -# its load_jupyter_server_extension classmethod. This is the -# classic way of loading a server extension. -# 2. Extension is launched directly by calling its `launch_instance` -# class method. This method can be set as a entry_point in -# the extensions setup.py - -## Answer yes to any prompts. -# See also: JupyterApp.answer_yes -# c.ExtensionApp.answer_yes = False - -## Full path of a config file. -# See also: JupyterApp.config_file -# c.ExtensionApp.config_file = '' - -## Specify a config file to load. -# See also: JupyterApp.config_file_name -# c.ExtensionApp.config_file_name = '' - -# Default: '' -# c.ExtensionApp.default_url = '' - -## Generate default config file. -# See also: JupyterApp.generate_config -# c.ExtensionApp.generate_config = False - -## Handlers appended to the server. -# Default: [] -# c.ExtensionApp.handlers = [] - -## The date format used by logging formatters for %(asctime)s -# See also: Application.log_datefmt -# c.ExtensionApp.log_datefmt = '%Y-%m-%d %H:%M:%S' - -## The Logging format template -# See also: Application.log_format -# c.ExtensionApp.log_format = '[%(name)s]%(highlevel)s %(message)s' - -## Set the log level by value or name. -# See also: Application.log_level -# c.ExtensionApp.log_level = 30 - -## -# See also: Application.logging_config -# c.ExtensionApp.logging_config = {} - -## Whether to open in a browser after starting. -# The specific browser used is platform dependent and -# determined by the python standard library `webbrowser` -# module, unless it is overridden using the --browser -# (ServerApp.browser) configuration option. -# Default: False -# c.ExtensionApp.open_browser = False - -## Settings that will passed to the server. -# Default: {} -# c.ExtensionApp.settings = {} - -## Instead of starting the Application, dump configuration to stdout -# See also: Application.show_config -# c.ExtensionApp.show_config = False - -## Instead of starting the Application, dump configuration to stdout (as JSON) -# See also: Application.show_config_json -# c.ExtensionApp.show_config_json = False - -## paths to search for serving static files. -# -# This allows adding javascript/css to be available from the notebook server machine, -# or overriding individual files in the IPython -# Default: [] -# c.ExtensionApp.static_paths = [] - -## Url where the static assets for the extension are served. -# Default: '' -# c.ExtensionApp.static_url_prefix = '' - -## Paths to search for serving jinja templates. -# -# Can be used to override templates from notebook.templates. -# Default: [] -# c.ExtensionApp.template_paths = [] - -# ------------------------------------------------------------------------------ -# LabServerApp(ExtensionApp) configuration -# ------------------------------------------------------------------------------ -## A Lab Server Application that runs out-of-the-box - -## "A list of comma-separated URIs to get the allowed extensions list -# -# .. versionchanged:: 2.0.0 -# `LabServerApp.whitetlist_uris` renamed to `allowed_extensions_uris` -# Default: '' -# c.LabServerApp.allowed_extensions_uris = '' - -## Answer yes to any prompts. -# See also: JupyterApp.answer_yes -# c.LabServerApp.answer_yes = False - -## The application settings directory. -# Default: '' -# c.LabServerApp.app_settings_dir = '' - -## The url path for the application. -# Default: '/lab' -# c.LabServerApp.app_url = '/lab' - -## Deprecated, use `LabServerApp.blocked_extensions_uris` -# Default: '' -# c.LabServerApp.blacklist_uris = '' - -## A list of comma-separated URIs to get the blocked extensions list -# -# .. versionchanged:: 2.0.0 -# `LabServerApp.blacklist_uris` renamed to `blocked_extensions_uris` -# Default: '' -# c.LabServerApp.blocked_extensions_uris = '' - -## Whether to cache files on the server. This should be `True` except in dev -# mode. -# Default: True -# c.LabServerApp.cache_files = True - -## Full path of a config file. -# See also: JupyterApp.config_file -# c.LabServerApp.config_file = '' - -## Specify a config file to load. -# See also: JupyterApp.config_file_name -# c.LabServerApp.config_file_name = '' - -## Extra paths to look for federated JupyterLab extensions -# Default: [] -# c.LabServerApp.extra_labextensions_path = [] - -## Generate default config file. -# See also: JupyterApp.generate_config -# c.LabServerApp.generate_config = False - -## Handlers appended to the server. -# See also: ExtensionApp.handlers -# c.LabServerApp.handlers = [] - -## Options to pass to the jinja2 environment for this -# Default: {} -# c.LabServerApp.jinja2_options = {} - -## The standard paths to look in for federated JupyterLab extensions -# Default: [] -# c.LabServerApp.labextensions_path = [] - -## The url for federated JupyterLab extensions -# Default: '' -# c.LabServerApp.labextensions_url = '' - -## The interval delay in seconds to refresh the lists -# Default: 3600 -# c.LabServerApp.listings_refresh_seconds = 3600 - -## The optional kwargs to use for the listings HTTP requests as -# described on https://2.python-requests.org/en/v2.7.0/api/#requests.request -# Default: {} -# c.LabServerApp.listings_request_options = {} - -## The listings url. -# Default: '' -# c.LabServerApp.listings_url = '' - -## The date format used by logging formatters for %(asctime)s -# See also: Application.log_datefmt -# c.LabServerApp.log_datefmt = '%Y-%m-%d %H:%M:%S' - -## The Logging format template -# See also: Application.log_format -# c.LabServerApp.log_format = '[%(name)s]%(highlevel)s %(message)s' - -## Set the log level by value or name. -# See also: Application.log_level -# c.LabServerApp.log_level = 30 - -## -# See also: Application.logging_config -# c.LabServerApp.logging_config = {} - -## Whether a notebook should start a kernel automatically. -# Default: True -# c.LabServerApp.notebook_starts_kernel = True - -## Whether to open in a browser after starting. -# See also: ExtensionApp.open_browser -# c.LabServerApp.open_browser = False - -## The optional location of the settings schemas directory. If given, a handler -# will be added for settings. -# Default: '' -# c.LabServerApp.schemas_dir = '' - -## Settings that will passed to the server. -# See also: ExtensionApp.settings -# c.LabServerApp.settings = {} - -## The url path of the settings handler. -# Default: '' -# c.LabServerApp.settings_url = '' - -## Instead of starting the Application, dump configuration to stdout -# See also: Application.show_config -# c.LabServerApp.show_config = False - -## Instead of starting the Application, dump configuration to stdout (as JSON) -# See also: Application.show_config_json -# c.LabServerApp.show_config_json = False - -## The optional location of local static files. If given, a static file handler -# will be added. -# Default: '' -# c.LabServerApp.static_dir = '' - -## paths to search for serving static files. -# See also: ExtensionApp.static_paths -# c.LabServerApp.static_paths = [] - -## Url where the static assets for the extension are served. -# See also: ExtensionApp.static_url_prefix -# c.LabServerApp.static_url_prefix = '' - -## Paths to search for serving jinja templates. -# See also: ExtensionApp.template_paths -# c.LabServerApp.template_paths = [] - -## The application templates directory. -# Default: '' -# c.LabServerApp.templates_dir = '' - -## The optional location of the themes directory. If given, a handler will be -# added for themes. -# Default: '' -# c.LabServerApp.themes_dir = '' - -## The theme url. -# Default: '' -# c.LabServerApp.themes_url = '' - -## The url path of the translations handler. -# Default: '' -# c.LabServerApp.translations_api_url = '' - -## The url path of the tree handler. -# Default: '' -# c.LabServerApp.tree_url = '' - -## The optional location of the user settings directory. -# Default: '' -# c.LabServerApp.user_settings_dir = '' - -## Deprecated, use `LabServerApp.allowed_extensions_uris` -# Default: '' -# c.LabServerApp.whitelist_uris = '' - -## The url path of the workspaces API. -# Default: '' -# c.LabServerApp.workspaces_api_url = '' - -## The optional location of the saved workspaces directory. If given, a handler -# will be added for workspaces. -# Default: '' -# c.LabServerApp.workspaces_dir = '' - -# ------------------------------------------------------------------------------ -# LabApp(LabServerApp) configuration -# ------------------------------------------------------------------------------ -## -# See also: LabServerApp.allowed_extensions_uris -# c.LabApp.allowed_extensions_uris = '' - -## Answer yes to any prompts. -# See also: JupyterApp.answer_yes -# c.LabApp.answer_yes = False - -## The app directory to launch JupyterLab from. -# Default: None -# c.LabApp.app_dir = None - -## The application settings directory. -# Default: '' -# c.LabApp.app_settings_dir = '' - -## The url path for the application. -# Default: '/lab' -# c.LabApp.app_url = '/lab' - -## Deprecated, use `LabServerApp.blocked_extensions_uris` -# See also: LabServerApp.blacklist_uris -# c.LabApp.blacklist_uris = '' - -## -# See also: LabServerApp.blocked_extensions_uris -# c.LabApp.blocked_extensions_uris = '' - -## Whether to cache files on the server. This should be `True` except in dev -# mode. -# Default: True -# c.LabApp.cache_files = True - -## Whether to enable collaborative mode (experimental). -# Default: False -# c.LabApp.collaborative = False - -## Full path of a config file. -# See also: JupyterApp.config_file -# c.LabApp.config_file = '' - -## Specify a config file to load. -# See also: JupyterApp.config_file_name -# c.LabApp.config_file_name = '' - -## Whether to start the app in core mode. In this mode, JupyterLab -# will run using the JavaScript assets that are within the installed -# JupyterLab Python package. In core mode, third party extensions are disabled. -# The `--dev-mode` flag is an alias to this to be used when the Python package -# itself is installed in development mode (`pip install -e .`). -# Default: False -# c.LabApp.core_mode = False - -## The default URL to redirect to from `/` -# Default: '/lab' -# c.LabApp.default_url = '/lab' - -## Whether to start the app in dev mode. Uses the unpublished local -# JavaScript packages in the `dev_mode` folder. In this case JupyterLab will -# show a red stripe at the top of the page. It can only be used if JupyterLab -# is installed as `pip install -e .`. -# Default: False -# c.LabApp.dev_mode = False - -## Whether to expose the global app instance to browser via window.jupyterlab -# Default: False -# c.LabApp.expose_app_in_browser = False - -## Whether to load prebuilt extensions in dev mode. This may be -# useful to run and test prebuilt extensions in development installs of -# JupyterLab. APIs in a JupyterLab development install may be -# incompatible with published packages, so prebuilt extensions compiled -# against published packages may not work correctly. -# Default: False -# c.LabApp.extensions_in_dev_mode = False - -## Extra paths to look for federated JupyterLab extensions -# Default: [] -# c.LabApp.extra_labextensions_path = [] - -## Generate default config file. -# See also: JupyterApp.generate_config -# c.LabApp.generate_config = False - -## Handlers appended to the server. -# See also: ExtensionApp.handlers -# c.LabApp.handlers = [] - -## Options to pass to the jinja2 environment for this -# Default: {} -# c.LabApp.jinja2_options = {} - -## The standard paths to look in for federated JupyterLab extensions -# Default: [] -# c.LabApp.labextensions_path = [] - -## The url for federated JupyterLab extensions -# Default: '' -# c.LabApp.labextensions_url = '' - -## The interval delay in seconds to refresh the lists -# See also: LabServerApp.listings_refresh_seconds -# c.LabApp.listings_refresh_seconds = 3600 - -## The optional kwargs to use for the listings HTTP requests as -# described on https://2.python-requests.org/en/v2.7.0/api/#requests.request -# See also: LabServerApp.listings_request_options -# c.LabApp.listings_request_options = {} - -## The listings url. -# Default: '' -# c.LabApp.listings_url = '' - -## The date format used by logging formatters for %(asctime)s -# See also: Application.log_datefmt -# c.LabApp.log_datefmt = '%Y-%m-%d %H:%M:%S' - -## The Logging format template -# See also: Application.log_format -# c.LabApp.log_format = '[%(name)s]%(highlevel)s %(message)s' - -## Set the log level by value or name. -# See also: Application.log_level -# c.LabApp.log_level = 30 - -## -# See also: Application.logging_config -# c.LabApp.logging_config = {} - -## Whether a notebook should start a kernel automatically. -# Default: True -# c.LabApp.notebook_starts_kernel = True - -## Whether to open in a browser after starting. -# See also: ExtensionApp.open_browser -# c.LabApp.open_browser = False - -## The override url for static lab assets, typically a CDN. -# Default: '' -# c.LabApp.override_static_url = '' - -## The override url for static lab theme assets, typically a CDN. -# Default: '' -# c.LabApp.override_theme_url = '' - -## The optional location of the settings schemas directory. If given, a handler -# will be added for settings. -# Default: '' -# c.LabApp.schemas_dir = '' - -## Settings that will passed to the server. -# See also: ExtensionApp.settings -# c.LabApp.settings = {} - -## The url path of the settings handler. -# Default: '' -# c.LabApp.settings_url = '' - -## Instead of starting the Application, dump configuration to stdout -# See also: Application.show_config -# c.LabApp.show_config = False - -## Instead of starting the Application, dump configuration to stdout (as JSON) -# See also: Application.show_config_json -# c.LabApp.show_config_json = False - -## Splice source packages into app directory. -# Default: False -# c.LabApp.splice_source = False - -## The optional location of local static files. If given, a static file handler -# will be added. -# Default: '' -# c.LabApp.static_dir = '' - -## paths to search for serving static files. -# See also: ExtensionApp.static_paths -# c.LabApp.static_paths = [] - -## Url where the static assets for the extension are served. -# See also: ExtensionApp.static_url_prefix -# c.LabApp.static_url_prefix = '' - -## Paths to search for serving jinja templates. -# See also: ExtensionApp.template_paths -# c.LabApp.template_paths = [] - -## The application templates directory. -# Default: '' -# c.LabApp.templates_dir = '' - -## The optional location of the themes directory. If given, a handler will be -# added for themes. -# Default: '' -# c.LabApp.themes_dir = '' - -## The theme url. -# Default: '' -# c.LabApp.themes_url = '' - -## The url path of the translations handler. -# Default: '' -# c.LabApp.translations_api_url = '' - -## The url path of the tree handler. -# Default: '' -# c.LabApp.tree_url = '' - -## The directory for user settings. -# Default: '/home/vscode/.jupyter/lab/user-settings' -# c.LabApp.user_settings_dir = '/home/vscode/.jupyter/lab/user-settings' - -## Whether to serve the app in watch mode -# Default: False -# c.LabApp.watch = False - -## Deprecated, use `LabServerApp.allowed_extensions_uris` -# See also: LabServerApp.whitelist_uris -# c.LabApp.whitelist_uris = '' - -## The url path of the workspaces API. -# Default: '' -# c.LabApp.workspaces_api_url = '' - -## The directory for workspaces -# Default: '/home/vscode/.jupyter/lab/workspaces' -# c.LabApp.workspaces_dir = '/home/vscode/.jupyter/lab/workspaces' - -# ------------------------------------------------------------------------------ -# ServerApp(JupyterApp) configuration -# ------------------------------------------------------------------------------ -## Set the Access-Control-Allow-Credentials: true header -# Default: False -# c.ServerApp.allow_credentials = False - -## Set the Access-Control-Allow-Origin header -# -# Use '*' to allow any origin to access your server. -# -# Takes precedence over allow_origin_pat. -# Default: '' -# c.ServerApp.allow_origin = '' - -## Use a regular expression for the Access-Control-Allow-Origin header -# -# Requests from an origin matching the expression will get replies with: -# -# Access-Control-Allow-Origin: origin -# -# where `origin` is the origin of the request. -# -# Ignored if allow_origin is set. -# Default: '' -# c.ServerApp.allow_origin_pat = '' - -## DEPRECATED in 2.0. Use PasswordIdentityProvider.allow_password_change -# Default: True -# c.ServerApp.allow_password_change = True - -## Allow requests where the Host header doesn't point to a local server -# -# By default, requests get a 403 forbidden response if the 'Host' header -# shows that the browser thinks it's on a non-local domain. -# Setting this option to True disables this check. -# -# This protects against 'DNS rebinding' attacks, where a remote web server -# serves you a page and then changes its DNS to send later requests to a -# local IP, bypassing same-origin checks. -# -# Local IP addresses (such as 127.0.0.1 and ::1) are allowed as local, -# along with hostnames configured in local_hostnames. -# Default: False -# c.ServerApp.allow_remote_access = False - -## Whether to allow the user to run the server as root. -# Default: False -# c.ServerApp.allow_root = False - -## Answer yes to any prompts. -# See also: JupyterApp.answer_yes -# c.ServerApp.answer_yes = False - -## " -# Require authentication to access prometheus metrics. -# Default: True -# c.ServerApp.authenticate_prometheus = True - -## The authorizer class to use. -# Default: 'jupyter_server.auth.authorizer.AllowAllAuthorizer' -# c.ServerApp.authorizer_class = 'jupyter_server.auth.authorizer.AllowAllAuthorizer' - -## Reload the webapp when changes are made to any Python src files. -# Default: False -# c.ServerApp.autoreload = False - -## The base URL for the Jupyter server. -# -# Leading and trailing slashes can be omitted, -# and will automatically be added. -# Default: '/' -# c.ServerApp.base_url = '/' - -## Specify what command to use to invoke a web -# browser when starting the server. If not specified, the -# default browser will be determined by the `webbrowser` -# standard library module, which allows setting of the -# BROWSER environment variable to override it. -# Default: '' -# c.ServerApp.browser = '' - -## The full path to an SSL/TLS certificate file. -# Default: '' -# c.ServerApp.certfile = '' - -## The full path to a certificate authority certificate for SSL/TLS client -# authentication. -# Default: '' -# c.ServerApp.client_ca = '' - -## Full path of a config file. -# See also: JupyterApp.config_file -# c.ServerApp.config_file = '' - -## Specify a config file to load. -# See also: JupyterApp.config_file_name -# c.ServerApp.config_file_name = '' - -## The config manager class to use -# Default: 'jupyter_server.services.config.manager.ConfigManager' -# c.ServerApp.config_manager_class = 'jupyter_server.services.config.manager.ConfigManager' - -## The content manager class to use. -# Default: 'jupyter_server.services.contents.largefilemanager.AsyncLargeFileManager' -# c.ServerApp.contents_manager_class = 'jupyter_server.services.contents.largefilemanager.AsyncLargeFileManager' - -## DEPRECATED. Use IdentityProvider.cookie_options -# Default: {} -# c.ServerApp.cookie_options = {} - -## The random bytes used to secure cookies. -# By default this is a new random number every time you start the server. -# Set it to a value in a config file to enable logins to persist across server sessions. -# -# Note: Cookie secrets should be kept private, do not share config files with -# cookie_secret stored in plaintext (you can read the value from a file). -# Default: b'' -# c.ServerApp.cookie_secret = b'' - -## The file where the cookie secret is stored. -# Default: '' -# c.ServerApp.cookie_secret_file = '' - -## Override URL shown to users. -# -# Replace actual URL, including protocol, address, port and base URL, -# with the given value when displaying URL to the users. Do not change -# the actual connection URL. If authentication token is enabled, the -# token is added to the custom URL automatically. -# -# This option is intended to be used when the URL to display to the user -# cannot be determined reliably by the Jupyter server (proxified -# or containerized setups for example). -# Default: '' -# c.ServerApp.custom_display_url = '' - -## The default URL to redirect to from `/` -# Default: '/' -# c.ServerApp.default_url = '/' - -## Disable cross-site-request-forgery protection -# -# Jupyter server includes protection from cross-site request forgeries, -# requiring API requests to either: -# -# - originate from pages served by this server (validated with XSRF cookie and token), or -# - authenticate with a token -# -# Some anonymous compute resources still desire the ability to run code, -# completely without authentication. -# These services can disable all authentication and security checks, -# with the full knowledge of what that implies. -# Default: False -# c.ServerApp.disable_check_xsrf = False - -## handlers that should be loaded at higher priority than the default services -# Default: [] -# c.ServerApp.extra_services = [] - -## Extra paths to search for serving static files. -# -# This allows adding javascript/css to be available from the Jupyter server machine, -# or overriding individual files in the IPython -# Default: [] -# c.ServerApp.extra_static_paths = [] - -## Extra paths to search for serving jinja templates. -# -# Can be used to override templates from jupyter_server.templates. -# Default: [] -# c.ServerApp.extra_template_paths = [] - -## Open the named file when the application is launched. -# Default: '' -# c.ServerApp.file_to_run = '' - -## The URL prefix where files are opened directly. -# Default: 'notebooks' -# c.ServerApp.file_url_prefix = 'notebooks' - -## Generate default config file. -# See also: JupyterApp.generate_config -# c.ServerApp.generate_config = False - -## DEPRECATED. Use IdentityProvider.get_secure_cookie_kwargs -# Default: {} -# c.ServerApp.get_secure_cookie_kwargs = {} - -## The identity provider class to use. -# Default: 'jupyter_server.auth.identity.PasswordIdentityProvider' -# c.ServerApp.identity_provider_class = 'jupyter_server.auth.identity.PasswordIdentityProvider' - -## DEPRECATED. Use ZMQChannelsWebsocketConnection.iopub_data_rate_limit -# Default: 0.0 -# c.ServerApp.iopub_data_rate_limit = 0.0 - -## DEPRECATED. Use ZMQChannelsWebsocketConnection.iopub_msg_rate_limit -# Default: 0.0 -# c.ServerApp.iopub_msg_rate_limit = 0.0 - -## The IP address the Jupyter server will listen on. -# Default: 'localhost' -# c.ServerApp.ip = 'localhost' - -## Supply extra arguments that will be passed to Jinja environment. -# Default: {} -# c.ServerApp.jinja_environment_options = {} - -## Extra variables to supply to jinja templates when rendering. -# Default: {} -# c.ServerApp.jinja_template_vars = {} - -## Dict of Python modules to load as Jupyter server extensions.Entry values can -# be used to enable and disable the loading ofthe extensions. The extensions -# will be loaded in alphabetical order. -# Default: {} -# c.ServerApp.jpserver_extensions = {} - -## The kernel manager class to use. -# Default: 'jupyter_server.services.kernels.kernelmanager.MappingKernelManager' -# c.ServerApp.kernel_manager_class = 'jupyter_server.services.kernels.kernelmanager.MappingKernelManager' - -## The kernel spec manager class to use. Should be a subclass of -# `jupyter_client.kernelspec.KernelSpecManager`. -# -# The Api of KernelSpecManager is provisional and might change without warning -# between this version of Jupyter and the next stable one. -# Default: 'builtins.object' -# c.ServerApp.kernel_spec_manager_class = 'builtins.object' - -## The kernel websocket connection class to use. -# Default: 'jupyter_server.services.kernels.connection.channels.ZMQChannelsWebsocketConnection' -# c.ServerApp.kernel_websocket_connection_class = 'jupyter_server.services.kernels.connection.channels.ZMQChannelsWebsocketConnection' - -## DEPRECATED. Use ZMQChannelsWebsocketConnection.kernel_ws_protocol -# Default: '' -# c.ServerApp.kernel_ws_protocol = '' - -## The full path to a private key file for usage with SSL/TLS. -# Default: '' -# c.ServerApp.keyfile = '' - -## DEPRECATED. Use ZMQChannelsWebsocketConnection.limit_rate -# Default: False -# c.ServerApp.limit_rate = False - -## Hostnames to allow as local when allow_remote_access is False. -# -# Local IP addresses (such as 127.0.0.1 and ::1) are automatically accepted -# as local as well. -# Default: ['localhost'] -# c.ServerApp.local_hostnames = ['localhost'] - -## The date format used by logging formatters for %(asctime)s -# See also: Application.log_datefmt -# c.ServerApp.log_datefmt = '%Y-%m-%d %H:%M:%S' - -## The Logging format template -# See also: Application.log_format -# c.ServerApp.log_format = '[%(name)s]%(highlevel)s %(message)s' - -## Set the log level by value or name. -# See also: Application.log_level -# c.ServerApp.log_level = 30 - -## -# See also: Application.logging_config -# c.ServerApp.logging_config = {} - -## The login handler class to use. -# Default: 'jupyter_server.auth.login.LegacyLoginHandler' -# c.ServerApp.login_handler_class = 'jupyter_server.auth.login.LegacyLoginHandler' - -## The logout handler class to use. -# Default: 'jupyter_server.auth.logout.LogoutHandler' -# c.ServerApp.logout_handler_class = 'jupyter_server.auth.logout.LogoutHandler' - -## Sets the maximum allowed size of the client request body, specified in the -# Content-Length request header field. If the size in a request exceeds the -# configured value, a malformed HTTP message is returned to the client. -# -# Note: max_body_size is applied even in streaming mode. -# Default: 536870912 -# c.ServerApp.max_body_size = 536870912 - -## Gets or sets the maximum amount of memory, in bytes, that is allocated for use -# by the buffer manager. -# Default: 536870912 -# c.ServerApp.max_buffer_size = 536870912 - -## Gets or sets a lower bound on the open file handles process resource limit. -# This may need to be increased if you run into an OSError: [Errno 24] Too many -# open files. This is not applicable when running on Windows. -# Default: 0 -# c.ServerApp.min_open_files_limit = 0 - -## DEPRECATED, use root_dir. -# Default: '' -# c.ServerApp.notebook_dir = '' - -## Whether to open in a browser after starting. -# The specific browser used is platform dependent and -# determined by the python standard library `webbrowser` -# module, unless it is overridden using the --browser -# (ServerApp.browser) configuration option. -# Default: False -# c.ServerApp.open_browser = False - -## DEPRECATED in 2.0. Use PasswordIdentityProvider.hashed_password -# Default: '' -# c.ServerApp.password = '' - -## DEPRECATED in 2.0. Use PasswordIdentityProvider.password_required -# Default: False -# c.ServerApp.password_required = False - -## The port the server will listen on (env: JUPYTER_PORT). -# Default: 0 -c.ServerApp.port = 7888 - -## The number of additional ports to try if the specified port is not available -# (env: JUPYTER_PORT_RETRIES). -# Default: 50 -# c.ServerApp.port_retries = 50 - -## Preferred starting directory to use for notebooks and kernels. -# Default: '' -# c.ServerApp.preferred_dir = '' - -## DISABLED: use %pylab or %matplotlib in the notebook to enable matplotlib. -# Default: 'disabled' -# c.ServerApp.pylab = 'disabled' - -## If True, display controls to shut down the Jupyter server, such as menu items -# or buttons. -# Default: True -# c.ServerApp.quit_button = True - -## DEPRECATED. Use ZMQChannelsWebsocketConnection.rate_limit_window -# Default: 0.0 -# c.ServerApp.rate_limit_window = 0.0 - -## Reraise exceptions encountered loading server extensions? -# Default: False -# c.ServerApp.reraise_server_extension_failures = False - -## The directory to use for notebooks and kernels. -# Default: '' -# c.ServerApp.root_dir = '' - -## The session manager class to use. -# Default: 'builtins.object' -# c.ServerApp.session_manager_class = 'builtins.object' - -## Instead of starting the Application, dump configuration to stdout -# See also: Application.show_config -# c.ServerApp.show_config = False - -## Instead of starting the Application, dump configuration to stdout (as JSON) -# See also: Application.show_config_json -# c.ServerApp.show_config_json = False - -## Shut down the server after N seconds with no kernelsrunning and no activity. -# This can be used together with culling idle kernels -# (MappingKernelManager.cull_idle_timeout) to shutdown the Jupyter server when -# it's not in use. This is not precisely timed: it may shut down up to a minute -# later. 0 (the default) disables this automatic shutdown. -# Default: 0 -# c.ServerApp.shutdown_no_activity_timeout = 0 - -## The UNIX socket the Jupyter server will listen on. -# Default: '' -# c.ServerApp.sock = '' - -## The permissions mode for UNIX socket creation (default: 0600). -# Default: '0600' -# c.ServerApp.sock_mode = '0600' - -## Supply SSL options for the tornado HTTPServer. -# See the tornado docs for details. -# Default: {} -# c.ServerApp.ssl_options = {} - -## Supply overrides for terminado. Currently only supports "shell_command". -# Default: {} -# c.ServerApp.terminado_settings = {} - -## Set to False to disable terminals. -# -# This does *not* make the server more secure by itself. -# Anything the user can in a terminal, they can also do in a notebook. -# -# Terminals may also be automatically disabled if the terminado package -# is not available. -# Default: False -# c.ServerApp.terminals_enabled = False - -## DEPRECATED. Use IdentityProvider.token -# Default: '' -# c.ServerApp.token = '' -c.IdentityProvider.token = "changeme" - -## Supply overrides for the tornado.web.Application that the Jupyter server uses. -# Default: {} -# c.ServerApp.tornado_settings = {} - -## Whether to trust or not X-Scheme/X-Forwarded-Proto and X-Real-Ip/X-Forwarded- -# For headerssent by the upstream reverse proxy. Necessary if the proxy handles -# SSL -# Default: False -# c.ServerApp.trust_xheaders = False - -## Disable launching browser by redirect file -# For versions of notebook > 5.7.2, a security feature measure was added that -# prevented the authentication token used to launch the browser from being visible. -# This feature makes it difficult for other users on a multi-user system from -# running code in your Jupyter session as you. -# However, some environments (like Windows Subsystem for Linux (WSL) and Chromebooks), -# launching a browser using a redirect file can lead the browser failing to load. -# This is because of the difference in file structures/paths between the runtime and -# the browser. -# -# Disabling this setting to False will disable this behavior, allowing the browser -# to launch by using a URL and visible token (as before). -# Default: True -# c.ServerApp.use_redirect_file = True - -## Specify where to open the server on startup. This is the -# `new` argument passed to the standard library method `webbrowser.open`. -# The behaviour is not guaranteed, but depends on browser support. Valid -# values are: -# -# - 2 opens a new tab, -# - 1 opens a new window, -# - 0 opens in an existing window. -# -# See the `webbrowser.open` documentation for details. -# Default: 2 -# c.ServerApp.webbrowser_open_new = 2 - -## Set the tornado compression options for websocket connections. -# -# This value will be returned from -# :meth:`WebSocketHandler.get_compression_options`. None (default) will disable -# compression. A dict (even an empty one) will enable compression. -# -# See the tornado docs for WebSocketHandler.get_compression_options for details. -# Default: None -# c.ServerApp.websocket_compression_options = None - -## The base URL for websockets, -# if it differs from the HTTP server (hint: it almost certainly doesn't). -# -# Should be in the form of an HTTP origin: ws[s]://hostname[:port] -# Default: '' -# c.ServerApp.websocket_url = '' diff --git a/apps/schematic/notebook/notebooks/schematic-api.ipynb b/apps/schematic/notebook/notebooks/schematic-api.ipynb deleted file mode 100644 index 9288981c0..000000000 --- a/apps/schematic/notebook/notebooks/schematic-api.ipynb +++ /dev/null @@ -1,146 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "id": "34a39b07-385c-49b7-931f-3631a1aff129", - "metadata": { - "tags": [] - }, - "source": [ - "# Schematic REST API Example" - ] - }, - { - "cell_type": "markdown", - "id": "ed72ae29-13e0-4533-8e7e-f13c9b34da1a", - "metadata": {}, - "source": [ - "## Overview" - ] - }, - { - "cell_type": "markdown", - "id": "b876938b-ae3a-49ba-b416-b9fd59f88ae7", - "metadata": {}, - "source": [ - "This notebook shows how to use Schematic API client for Python to get a list of datasets from Schematic REST API." - ] - }, - { - "cell_type": "markdown", - "id": "7e95e24d-3f3a-47d0-bb52-35e718eb7ac6", - "metadata": {}, - "source": [ - "## Requirements" - ] - }, - { - "cell_type": "markdown", - "id": "cc586a81-e5b4-4021-9c6b-aa8a69ef221c", - "metadata": {}, - "source": [ - "The Schematic REST API must be already running. See the project `schematic-api` located in `apps/schematic/api` for more information." - ] - }, - { - "cell_type": "markdown", - "id": "c90710c2-f053-44ae-a3c2-610eecff9073", - "metadata": {}, - "source": [ - "## List storage datasets" - ] - }, - { - "cell_type": "code", - "execution_count": 14, - "id": "a9f3da43-6f09-4774-9fe7-59b83dbb147e", - "metadata": {}, - "outputs": [], - "source": [ - "import schematic_client\n", - "from pprint import pprint\n", - "from schematic_client.api import storage_api" - ] - }, - { - "cell_type": "code", - "execution_count": 12, - "id": "8fc3ac4c-2ceb-4bbc-bdb8-3bb8be08dfc6", - "metadata": {}, - "outputs": [], - "source": [ - "# Defining the host is optional and defaults to http://localhost/api/v1\n", - "# See configuration.py for a list of all supported configuration parameters.\n", - "configuration = schematic_client.Configuration(\n", - " host = \"http://schematic-api:7080/api/v1\"\n", - ")" - ] - }, - { - "cell_type": "markdown", - "id": "2f855989-f0a9-4fa0-a8f8-63edba2fdb38", - "metadata": {}, - "source": [ - "> **Note** The hostname `schematic-api` is defined in `/etc/hosts`. The dev container provided with this project is responsible for definining the required hostnames. By default, the hostnames are mapped to `127.0.0.1`." - ] - }, - { - "cell_type": "code", - "execution_count": 18, - "id": "b2f952f5-9140-4702-8a96-3457ca4df841", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "{'datasets': [{'name': 'dataset-1'},\n", - " {'name': 'dataset-2'},\n", - " {'name': 'dataset-3'}],\n", - " 'has_next': False,\n", - " 'has_previous': False,\n", - " 'number': 0,\n", - " 'size': 100,\n", - " 'total_elements': 3,\n", - " 'total_pages': 1}\n" - ] - } - ], - "source": [ - "# Enter a context with an instance of the API client\n", - "with schematic_client.ApiClient(configuration) as api_client:\n", - " # Create an instance of the API class\n", - " api_instance = storage_api.StorageApi(api_client)\n", - " project_id = \"syn26251192\" # str | The Synapse ID of a storage project.\n", - "\n", - " try:\n", - " # Gets all datasets in folder under a given storage project that the current user has access to.\n", - " api_response = api_instance.list_storage_project_datasets(project_id)\n", - " pprint(api_response)\n", - " except openapi_client.ApiException as e:\n", - " print(\"Exception when calling StorageApi->list_storage_project_datasets: %s\\n\" % e)" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3 (ipykernel)", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.9.2" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/apps/schematic/notebook/poetry.lock b/apps/schematic/notebook/poetry.lock deleted file mode 100644 index 3af6ab3ac..000000000 --- a/apps/schematic/notebook/poetry.lock +++ /dev/null @@ -1,1914 +0,0 @@ -# This file is automatically @generated by Poetry and should not be changed by hand. - -[[package]] -name = "anyio" -version = "3.6.2" -description = "High level compatibility layer for multiple asynchronous event loop implementations" -category = "dev" -optional = false -python-versions = ">=3.6.2" -files = [ - {file = "anyio-3.6.2-py3-none-any.whl", hash = "sha256:fbbe32bd270d2a2ef3ed1c5d45041250284e31fc0a4df4a5a6071842051a51e3"}, - {file = "anyio-3.6.2.tar.gz", hash = "sha256:25ea0d673ae30af41a0c442f81cf3b38c7e79fdc7b60335a4c14e05eb0947421"}, -] - -[package.dependencies] -idna = ">=2.8" -sniffio = ">=1.1" - -[package.extras] -doc = ["packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] -test = ["contextlib2", "coverage[toml] (>=4.5)", "hypothesis (>=4.0)", "mock (>=4)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (<0.15)", "uvloop (>=0.15)"] -trio = ["trio (>=0.16,<0.22)"] - -[[package]] -name = "appnope" -version = "0.1.3" -description = "Disable App Nap on macOS >= 10.9" -category = "dev" -optional = false -python-versions = "*" -files = [ - {file = "appnope-0.1.3-py2.py3-none-any.whl", hash = "sha256:265a455292d0bd8a72453494fa24df5a11eb18373a60c7c0430889f22548605e"}, - {file = "appnope-0.1.3.tar.gz", hash = "sha256:02bd91c4de869fbb1e1c50aafc4098827a7a54ab2f39d9dcba6c9547ed920e24"}, -] - -[[package]] -name = "argon2-cffi" -version = "21.3.0" -description = "The secure Argon2 password hashing algorithm." -category = "dev" -optional = false -python-versions = ">=3.6" -files = [ - {file = "argon2-cffi-21.3.0.tar.gz", hash = "sha256:d384164d944190a7dd7ef22c6aa3ff197da12962bd04b17f64d4e93d934dba5b"}, - {file = "argon2_cffi-21.3.0-py3-none-any.whl", hash = "sha256:8c976986f2c5c0e5000919e6de187906cfd81fb1c72bf9d88c01177e77da7f80"}, -] - -[package.dependencies] -argon2-cffi-bindings = "*" - -[package.extras] -dev = ["cogapp", "coverage[toml] (>=5.0.2)", "furo", "hypothesis", "pre-commit", "pytest", "sphinx", "sphinx-notfound-page", "tomli"] -docs = ["furo", "sphinx", "sphinx-notfound-page"] -tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pytest"] - -[[package]] -name = "argon2-cffi-bindings" -version = "21.2.0" -description = "Low-level CFFI bindings for Argon2" -category = "dev" -optional = false -python-versions = ">=3.6" -files = [ - {file = "argon2-cffi-bindings-21.2.0.tar.gz", hash = "sha256:bb89ceffa6c791807d1305ceb77dbfacc5aa499891d2c55661c6459651fc39e3"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:ccb949252cb2ab3a08c02024acb77cfb179492d5701c7cbdbfd776124d4d2367"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9524464572e12979364b7d600abf96181d3541da11e23ddf565a32e70bd4dc0d"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b746dba803a79238e925d9046a63aa26bf86ab2a2fe74ce6b009a1c3f5c8f2ae"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:58ed19212051f49a523abb1dbe954337dc82d947fb6e5a0da60f7c8471a8476c"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:bd46088725ef7f58b5a1ef7ca06647ebaf0eb4baff7d1d0d177c6cc8744abd86"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_i686.whl", hash = "sha256:8cd69c07dd875537a824deec19f978e0f2078fdda07fd5c42ac29668dda5f40f"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:f1152ac548bd5b8bcecfb0b0371f082037e47128653df2e8ba6e914d384f3c3e"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-win32.whl", hash = "sha256:603ca0aba86b1349b147cab91ae970c63118a0f30444d4bc80355937c950c082"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-win_amd64.whl", hash = "sha256:b2ef1c30440dbbcba7a5dc3e319408b59676e2e039e2ae11a8775ecf482b192f"}, - {file = "argon2_cffi_bindings-21.2.0-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:e415e3f62c8d124ee16018e491a009937f8cf7ebf5eb430ffc5de21b900dad93"}, - {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3e385d1c39c520c08b53d63300c3ecc28622f076f4c2b0e6d7e796e9f6502194"}, - {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c3e3cc67fdb7d82c4718f19b4e7a87123caf8a93fde7e23cf66ac0337d3cb3f"}, - {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6a22ad9800121b71099d0fb0a65323810a15f2e292f2ba450810a7316e128ee5"}, - {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f9f8b450ed0547e3d473fdc8612083fd08dd2120d6ac8f73828df9b7d45bb351"}, - {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:93f9bf70084f97245ba10ee36575f0c3f1e7d7724d67d8e5b08e61787c320ed7"}, - {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3b9ef65804859d335dc6b31582cad2c5166f0c3e7975f324d9ffaa34ee7e6583"}, - {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4966ef5848d820776f5f562a7d45fdd70c2f330c961d0d745b784034bd9f48d"}, - {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20ef543a89dee4db46a1a6e206cd015360e5a75822f76df533845c3cbaf72670"}, - {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ed2937d286e2ad0cc79a7087d3c272832865f779430e0cc2b4f3718d3159b0cb"}, - {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:5e00316dabdaea0b2dd82d141cc66889ced0cdcbfa599e8b471cf22c620c329a"}, -] - -[package.dependencies] -cffi = ">=1.0.1" - -[package.extras] -dev = ["cogapp", "pre-commit", "pytest", "wheel"] -tests = ["pytest"] - -[[package]] -name = "arrow" -version = "1.2.3" -description = "Better dates & times for Python" -category = "dev" -optional = false -python-versions = ">=3.6" -files = [ - {file = "arrow-1.2.3-py3-none-any.whl", hash = "sha256:5a49ab92e3b7b71d96cd6bfcc4df14efefc9dfa96ea19045815914a6ab6b1fe2"}, - {file = "arrow-1.2.3.tar.gz", hash = "sha256:3934b30ca1b9f292376d9db15b19446088d12ec58629bc3f0da28fd55fb633a1"}, -] - -[package.dependencies] -python-dateutil = ">=2.7.0" - -[[package]] -name = "asttokens" -version = "2.2.1" -description = "Annotate AST trees with source code positions" -category = "dev" -optional = false -python-versions = "*" -files = [ - {file = "asttokens-2.2.1-py2.py3-none-any.whl", hash = "sha256:6b0ac9e93fb0335014d382b8fa9b3afa7df546984258005da0b9e7095b3deb1c"}, - {file = "asttokens-2.2.1.tar.gz", hash = "sha256:4622110b2a6f30b77e1473affaa97e711bc2f07d3f10848420ff1898edbe94f3"}, -] - -[package.dependencies] -six = "*" - -[package.extras] -test = ["astroid", "pytest"] - -[[package]] -name = "attrs" -version = "22.2.0" -description = "Classes Without Boilerplate" -category = "dev" -optional = false -python-versions = ">=3.6" -files = [ - {file = "attrs-22.2.0-py3-none-any.whl", hash = "sha256:29e95c7f6778868dbd49170f98f8818f78f3dc5e0e37c0b1f474e3561b240836"}, - {file = "attrs-22.2.0.tar.gz", hash = "sha256:c9227bfc2f01993c03f68db37d1d15c9690188323c067c641f1a35ca58185f99"}, -] - -[package.extras] -cov = ["attrs[tests]", "coverage-enable-subprocess", "coverage[toml] (>=5.3)"] -dev = ["attrs[docs,tests]"] -docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope.interface"] -tests = ["attrs[tests-no-zope]", "zope.interface"] -tests-no-zope = ["cloudpickle", "cloudpickle", "hypothesis", "hypothesis", "mypy (>=0.971,<0.990)", "mypy (>=0.971,<0.990)", "pympler", "pympler", "pytest (>=4.3.0)", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-mypy-plugins", "pytest-xdist[psutil]", "pytest-xdist[psutil]"] - -[[package]] -name = "babel" -version = "2.11.0" -description = "Internationalization utilities" -category = "dev" -optional = false -python-versions = ">=3.6" -files = [ - {file = "Babel-2.11.0-py3-none-any.whl", hash = "sha256:1ad3eca1c885218f6dce2ab67291178944f810a10a9b5f3cb8382a5a232b64fe"}, - {file = "Babel-2.11.0.tar.gz", hash = "sha256:5ef4b3226b0180dedded4229651c8b0e1a3a6a2837d45a073272f313e4cf97f6"}, -] - -[package.dependencies] -pytz = ">=2015.7" - -[[package]] -name = "backcall" -version = "0.2.0" -description = "Specifications for callback functions passed in to an API" -category = "dev" -optional = false -python-versions = "*" -files = [ - {file = "backcall-0.2.0-py2.py3-none-any.whl", hash = "sha256:fbbce6a29f263178a1f7915c1940bde0ec2b2a967566fe1c65c1dfb7422bd255"}, - {file = "backcall-0.2.0.tar.gz", hash = "sha256:5cbdbf27be5e7cfadb448baf0aa95508f91f2bbc6c6437cd9cd06e2a4c215e1e"}, -] - -[[package]] -name = "beautifulsoup4" -version = "4.11.1" -description = "Screen-scraping library" -category = "dev" -optional = false -python-versions = ">=3.6.0" -files = [ - {file = "beautifulsoup4-4.11.1-py3-none-any.whl", hash = "sha256:58d5c3d29f5a36ffeb94f02f0d786cd53014cf9b3b3951d42e0080d8a9498d30"}, - {file = "beautifulsoup4-4.11.1.tar.gz", hash = "sha256:ad9aa55b65ef2808eb405f46cf74df7fcb7044d5cbc26487f96eb2ef2e436693"}, -] - -[package.dependencies] -soupsieve = ">1.2" - -[package.extras] -html5lib = ["html5lib"] -lxml = ["lxml"] - -[[package]] -name = "bleach" -version = "5.0.1" -description = "An easy safelist-based HTML-sanitizing tool." -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ - {file = "bleach-5.0.1-py3-none-any.whl", hash = "sha256:085f7f33c15bd408dd9b17a4ad77c577db66d76203e5984b1bd59baeee948b2a"}, - {file = "bleach-5.0.1.tar.gz", hash = "sha256:0d03255c47eb9bd2f26aa9bb7f2107732e7e8fe195ca2f64709fcf3b0a4a085c"}, -] - -[package.dependencies] -six = ">=1.9.0" -webencodings = "*" - -[package.extras] -css = ["tinycss2 (>=1.1.0,<1.2)"] -dev = ["Sphinx (==4.3.2)", "black (==22.3.0)", "build (==0.8.0)", "flake8 (==4.0.1)", "hashin (==0.17.0)", "mypy (==0.961)", "pip-tools (==6.6.2)", "pytest (==7.1.2)", "tox (==3.25.0)", "twine (==4.0.1)", "wheel (==0.37.1)"] - -[[package]] -name = "certifi" -version = "2022.12.7" -description = "Python package for providing Mozilla's CA Bundle." -category = "dev" -optional = false -python-versions = ">=3.6" -files = [ - {file = "certifi-2022.12.7-py3-none-any.whl", hash = "sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18"}, - {file = "certifi-2022.12.7.tar.gz", hash = "sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3"}, -] - -[[package]] -name = "cffi" -version = "1.15.1" -description = "Foreign Function Interface for Python calling C code." -category = "dev" -optional = false -python-versions = "*" -files = [ - {file = "cffi-1.15.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2"}, - {file = "cffi-1.15.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2"}, - {file = "cffi-1.15.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914"}, - {file = "cffi-1.15.1-cp27-cp27m-win32.whl", hash = "sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3"}, - {file = "cffi-1.15.1-cp27-cp27m-win_amd64.whl", hash = "sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e"}, - {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162"}, - {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b"}, - {file = "cffi-1.15.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21"}, - {file = "cffi-1.15.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4"}, - {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01"}, - {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e"}, - {file = "cffi-1.15.1-cp310-cp310-win32.whl", hash = "sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2"}, - {file = "cffi-1.15.1-cp310-cp310-win_amd64.whl", hash = "sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d"}, - {file = "cffi-1.15.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac"}, - {file = "cffi-1.15.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83"}, - {file = "cffi-1.15.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9"}, - {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c"}, - {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325"}, - {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c"}, - {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef"}, - {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8"}, - {file = "cffi-1.15.1-cp311-cp311-win32.whl", hash = "sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d"}, - {file = "cffi-1.15.1-cp311-cp311-win_amd64.whl", hash = "sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104"}, - {file = "cffi-1.15.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e"}, - {file = "cffi-1.15.1-cp36-cp36m-win32.whl", hash = "sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf"}, - {file = "cffi-1.15.1-cp36-cp36m-win_amd64.whl", hash = "sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497"}, - {file = "cffi-1.15.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426"}, - {file = "cffi-1.15.1-cp37-cp37m-win32.whl", hash = "sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9"}, - {file = "cffi-1.15.1-cp37-cp37m-win_amd64.whl", hash = "sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045"}, - {file = "cffi-1.15.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192"}, - {file = "cffi-1.15.1-cp38-cp38-win32.whl", hash = "sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314"}, - {file = "cffi-1.15.1-cp38-cp38-win_amd64.whl", hash = "sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5"}, - {file = "cffi-1.15.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585"}, - {file = "cffi-1.15.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27"}, - {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76"}, - {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3"}, - {file = "cffi-1.15.1-cp39-cp39-win32.whl", hash = "sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee"}, - {file = "cffi-1.15.1-cp39-cp39-win_amd64.whl", hash = "sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c"}, - {file = "cffi-1.15.1.tar.gz", hash = "sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9"}, -] - -[package.dependencies] -pycparser = "*" - -[[package]] -name = "charset-normalizer" -version = "2.1.1" -description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." -category = "dev" -optional = false -python-versions = ">=3.6.0" -files = [ - {file = "charset-normalizer-2.1.1.tar.gz", hash = "sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845"}, - {file = "charset_normalizer-2.1.1-py3-none-any.whl", hash = "sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f"}, -] - -[package.extras] -unicode-backport = ["unicodedata2"] - -[[package]] -name = "colorama" -version = "0.4.6" -description = "Cross-platform colored terminal text." -category = "dev" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" -files = [ - {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, - {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, -] - -[[package]] -name = "comm" -version = "0.1.2" -description = "Jupyter Python Comm implementation, for usage in ipykernel, xeus-python etc." -category = "dev" -optional = false -python-versions = ">=3.6" -files = [ - {file = "comm-0.1.2-py3-none-any.whl", hash = "sha256:9f3abf3515112fa7c55a42a6a5ab358735c9dccc8b5910a9d8e3ef5998130666"}, - {file = "comm-0.1.2.tar.gz", hash = "sha256:3e2f5826578e683999b93716285b3b1f344f157bf75fa9ce0a797564e742f062"}, -] - -[package.dependencies] -traitlets = ">=5.3" - -[package.extras] -test = ["pytest"] - -[[package]] -name = "debugpy" -version = "1.6.4" -description = "An implementation of the Debug Adapter Protocol for Python" -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ - {file = "debugpy-1.6.4-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:6ae238943482c78867ac707c09122688efb700372b617ffd364261e5e41f7a2f"}, - {file = "debugpy-1.6.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2a39e7da178e1f22f4bc04b57f085e785ed1bcf424aaf318835a1a7129eefe35"}, - {file = "debugpy-1.6.4-cp310-cp310-win32.whl", hash = "sha256:143f79d0798a9acea21cd1d111badb789f19d414aec95fa6389cfea9485ddfb1"}, - {file = "debugpy-1.6.4-cp310-cp310-win_amd64.whl", hash = "sha256:563f148f94434365ec0ce94739c749aabf60bf67339e68a9446499f3582d62f3"}, - {file = "debugpy-1.6.4-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:1caee68f7e254267df908576c0d0938f8f88af16383f172cb9f0602e24c30c01"}, - {file = "debugpy-1.6.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40e2a83d31a16b83666f19fa06d97b2cc311af88e6266590579737949971a17e"}, - {file = "debugpy-1.6.4-cp37-cp37m-win32.whl", hash = "sha256:82229790442856962aec4767b98ba2559fe0998f897e9f21fb10b4fd24b6c436"}, - {file = "debugpy-1.6.4-cp37-cp37m-win_amd64.whl", hash = "sha256:67edf033f9e512958f7b472975ff9d9b7ff64bf4440f6f6ae44afdc66b89e6b6"}, - {file = "debugpy-1.6.4-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:4ab5e938925e5d973f567d6ef32751b17d10f3be3a8c4d73c52f53e727f69bf1"}, - {file = "debugpy-1.6.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8df268e9f72fc06efc2e75e8dc8e2b881d6a397356faec26efb2ee70b6863b7"}, - {file = "debugpy-1.6.4-cp38-cp38-win32.whl", hash = "sha256:86bd25f38f8b6c5d430a5e2931eebbd5f580c640f4819fcd236d0498790c7204"}, - {file = "debugpy-1.6.4-cp38-cp38-win_amd64.whl", hash = "sha256:62ba4179b372a62abf9c89b56997d70a4100c6dea6c2a4e0e4be5f45920b3253"}, - {file = "debugpy-1.6.4-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:d2968e589bda4e485a9c61f113754a28e48d88c5152ed8e0b2564a1fadbe50a5"}, - {file = "debugpy-1.6.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e62b8034ede98932b92268669318848a0d42133d857087a3b9cec03bb844c615"}, - {file = "debugpy-1.6.4-cp39-cp39-win32.whl", hash = "sha256:3d9c31baf64bf959a593996c108e911c5a9aa1693a296840e5469473f064bcec"}, - {file = "debugpy-1.6.4-cp39-cp39-win_amd64.whl", hash = "sha256:ea4bf208054e6d41749f17612066da861dff10102729d32c85b47f155223cf2b"}, - {file = "debugpy-1.6.4-py2.py3-none-any.whl", hash = "sha256:e886a1296cd20a10172e94788009ce74b759e54229ebd64a43fa5c2b4e62cd76"}, - {file = "debugpy-1.6.4.zip", hash = "sha256:d5ab9bd3f4e7faf3765fd52c7c43c074104ab1e109621dc73219099ed1a5399d"}, -] - -[[package]] -name = "decorator" -version = "5.1.1" -description = "Decorators for Humans" -category = "dev" -optional = false -python-versions = ">=3.5" -files = [ - {file = "decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"}, - {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"}, -] - -[[package]] -name = "defusedxml" -version = "0.7.1" -description = "XML bomb protection for Python stdlib modules" -category = "dev" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -files = [ - {file = "defusedxml-0.7.1-py2.py3-none-any.whl", hash = "sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61"}, - {file = "defusedxml-0.7.1.tar.gz", hash = "sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69"}, -] - -[[package]] -name = "entrypoints" -version = "0.4" -description = "Discover and load entry points from installed packages." -category = "dev" -optional = false -python-versions = ">=3.6" -files = [ - {file = "entrypoints-0.4-py3-none-any.whl", hash = "sha256:f174b5ff827504fd3cd97cc3f8649f3693f51538c7e4bdf3ef002c8429d42f9f"}, - {file = "entrypoints-0.4.tar.gz", hash = "sha256:b706eddaa9218a19ebcd67b56818f05bb27589b1ca9e8d797b74affad4ccacd4"}, -] - -[[package]] -name = "executing" -version = "1.2.0" -description = "Get the currently executing AST node of a frame, and other information" -category = "dev" -optional = false -python-versions = "*" -files = [ - {file = "executing-1.2.0-py2.py3-none-any.whl", hash = "sha256:0314a69e37426e3608aada02473b4161d4caf5a4b244d1d0c48072b8fee7bacc"}, - {file = "executing-1.2.0.tar.gz", hash = "sha256:19da64c18d2d851112f09c287f8d3dbbdf725ab0e569077efb6cdcbd3497c107"}, -] - -[package.extras] -tests = ["asttokens", "littleutils", "pytest", "rich"] - -[[package]] -name = "fastjsonschema" -version = "2.16.2" -description = "Fastest Python implementation of JSON schema" -category = "dev" -optional = false -python-versions = "*" -files = [ - {file = "fastjsonschema-2.16.2-py3-none-any.whl", hash = "sha256:21f918e8d9a1a4ba9c22e09574ba72267a6762d47822db9add95f6454e51cc1c"}, - {file = "fastjsonschema-2.16.2.tar.gz", hash = "sha256:01e366f25d9047816fe3d288cbfc3e10541daf0af2044763f3d0ade42476da18"}, -] - -[package.extras] -devel = ["colorama", "json-spec", "jsonschema", "pylint", "pytest", "pytest-benchmark", "pytest-cache", "validictory"] - -[[package]] -name = "fqdn" -version = "1.5.1" -description = "Validates fully-qualified domain names against RFC 1123, so that they are acceptable to modern bowsers" -category = "dev" -optional = false -python-versions = ">=2.7, !=3.0, !=3.1, !=3.2, !=3.3, !=3.4, <4" -files = [ - {file = "fqdn-1.5.1-py3-none-any.whl", hash = "sha256:3a179af3761e4df6eb2e026ff9e1a3033d3587bf980a0b1b2e1e5d08d7358014"}, - {file = "fqdn-1.5.1.tar.gz", hash = "sha256:105ed3677e767fb5ca086a0c1f4bb66ebc3c100be518f0e0d755d9eae164d89f"}, -] - -[[package]] -name = "idna" -version = "3.4" -description = "Internationalized Domain Names in Applications (IDNA)" -category = "dev" -optional = false -python-versions = ">=3.5" -files = [ - {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"}, - {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, -] - -[[package]] -name = "importlib-metadata" -version = "5.2.0" -description = "Read metadata from Python packages" -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ - {file = "importlib_metadata-5.2.0-py3-none-any.whl", hash = "sha256:0eafa39ba42bf225fc00e67f701d71f85aead9f878569caf13c3724f704b970f"}, - {file = "importlib_metadata-5.2.0.tar.gz", hash = "sha256:404d48d62bba0b7a77ff9d405efd91501bef2e67ff4ace0bed40a0cf28c3c7cd"}, -] - -[package.dependencies] -zipp = ">=0.5" - -[package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -perf = ["ipython"] -testing = ["flake8 (<5)", "flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)"] - -[[package]] -name = "ipykernel" -version = "6.19.4" -description = "IPython Kernel for Jupyter" -category = "dev" -optional = false -python-versions = ">=3.8" -files = [ - {file = "ipykernel-6.19.4-py3-none-any.whl", hash = "sha256:0ecdae0060da61c5222ad221681f3b99b5bef739e11a3b1eb5778aa47f056f1f"}, - {file = "ipykernel-6.19.4.tar.gz", hash = "sha256:4140c282a6c71cdde59abe5eae2c71bf1eeb4a69316ab76e1c4c25150a49722b"}, -] - -[package.dependencies] -appnope = {version = "*", markers = "platform_system == \"Darwin\""} -comm = ">=0.1.1" -debugpy = ">=1.0" -ipython = ">=7.23.1" -jupyter-client = ">=6.1.12" -matplotlib-inline = ">=0.1" -nest-asyncio = "*" -packaging = "*" -psutil = "*" -pyzmq = ">=17" -tornado = ">=6.1" -traitlets = ">=5.4.0" - -[package.extras] -cov = ["coverage[toml]", "curio", "matplotlib", "pytest-cov", "trio"] -docs = ["myst-parser", "pydata-sphinx-theme", "sphinx", "sphinxcontrib-github-alt"] -test = ["flaky", "ipyparallel", "pre-commit", "pytest (>=7.0)", "pytest-asyncio", "pytest-cov", "pytest-timeout"] - -[[package]] -name = "ipython" -version = "8.7.0" -description = "IPython: Productive Interactive Computing" -category = "dev" -optional = false -python-versions = ">=3.8" -files = [ - {file = "ipython-8.7.0-py3-none-any.whl", hash = "sha256:352042ddcb019f7c04e48171b4dd78e4c4bb67bf97030d170e154aac42b656d9"}, - {file = "ipython-8.7.0.tar.gz", hash = "sha256:882899fe78d5417a0aa07f995db298fa28b58faeba2112d2e3a4c95fe14bb738"}, -] - -[package.dependencies] -appnope = {version = "*", markers = "sys_platform == \"darwin\""} -backcall = "*" -colorama = {version = "*", markers = "sys_platform == \"win32\""} -decorator = "*" -jedi = ">=0.16" -matplotlib-inline = "*" -pexpect = {version = ">4.3", markers = "sys_platform != \"win32\""} -pickleshare = "*" -prompt-toolkit = ">=3.0.11,<3.1.0" -pygments = ">=2.4.0" -stack-data = "*" -traitlets = ">=5" - -[package.extras] -all = ["black", "curio", "docrepr", "ipykernel", "ipyparallel", "ipywidgets", "matplotlib", "matplotlib (!=3.2.0)", "nbconvert", "nbformat", "notebook", "numpy (>=1.20)", "pandas", "pytest (<7)", "pytest (<7.1)", "pytest-asyncio", "qtconsole", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "trio", "typing-extensions"] -black = ["black"] -doc = ["docrepr", "ipykernel", "matplotlib", "pytest (<7)", "pytest (<7.1)", "pytest-asyncio", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "typing-extensions"] -kernel = ["ipykernel"] -nbconvert = ["nbconvert"] -nbformat = ["nbformat"] -notebook = ["ipywidgets", "notebook"] -parallel = ["ipyparallel"] -qtconsole = ["qtconsole"] -test = ["pytest (<7.1)", "pytest-asyncio", "testpath"] -test-extra = ["curio", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.20)", "pandas", "pytest (<7.1)", "pytest-asyncio", "testpath", "trio"] - -[[package]] -name = "ipython-genutils" -version = "0.2.0" -description = "Vestigial utilities from IPython" -category = "dev" -optional = false -python-versions = "*" -files = [ - {file = "ipython_genutils-0.2.0-py2.py3-none-any.whl", hash = "sha256:72dd37233799e619666c9f639a9da83c34013a73e8bbc79a7a6348d93c61fab8"}, - {file = "ipython_genutils-0.2.0.tar.gz", hash = "sha256:eb2e116e75ecef9d4d228fdc66af54269afa26ab4463042e33785b887c628ba8"}, -] - -[[package]] -name = "isoduration" -version = "20.11.0" -description = "Operations with ISO 8601 durations" -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ - {file = "isoduration-20.11.0-py3-none-any.whl", hash = "sha256:b2904c2a4228c3d44f409c8ae8e2370eb21a26f7ac2ec5446df141dde3452042"}, - {file = "isoduration-20.11.0.tar.gz", hash = "sha256:ac2f9015137935279eac671f94f89eb00584f940f5dc49462a0c4ee692ba1bd9"}, -] - -[package.dependencies] -arrow = ">=0.15.0" - -[[package]] -name = "jedi" -version = "0.18.2" -description = "An autocompletion tool for Python that can be used for text editors." -category = "dev" -optional = false -python-versions = ">=3.6" -files = [ - {file = "jedi-0.18.2-py2.py3-none-any.whl", hash = "sha256:203c1fd9d969ab8f2119ec0a3342e0b49910045abe6af0a3ae83a5764d54639e"}, - {file = "jedi-0.18.2.tar.gz", hash = "sha256:bae794c30d07f6d910d32a7048af09b5a39ed740918da923c6b780790ebac612"}, -] - -[package.dependencies] -parso = ">=0.8.0,<0.9.0" - -[package.extras] -docs = ["Jinja2 (==2.11.3)", "MarkupSafe (==1.1.1)", "Pygments (==2.8.1)", "alabaster (==0.7.12)", "babel (==2.9.1)", "chardet (==4.0.0)", "commonmark (==0.8.1)", "docutils (==0.17.1)", "future (==0.18.2)", "idna (==2.10)", "imagesize (==1.2.0)", "mock (==1.0.1)", "packaging (==20.9)", "pyparsing (==2.4.7)", "pytz (==2021.1)", "readthedocs-sphinx-ext (==2.1.4)", "recommonmark (==0.5.0)", "requests (==2.25.1)", "six (==1.15.0)", "snowballstemmer (==2.1.0)", "sphinx (==1.8.5)", "sphinx-rtd-theme (==0.4.3)", "sphinxcontrib-serializinghtml (==1.1.4)", "sphinxcontrib-websupport (==1.2.4)", "urllib3 (==1.26.4)"] -qa = ["flake8 (==3.8.3)", "mypy (==0.782)"] -testing = ["Django (<3.1)", "attrs", "colorama", "docopt", "pytest (<7.0.0)"] - -[[package]] -name = "jinja2" -version = "3.1.2" -description = "A very fast and expressive template engine." -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ - {file = "Jinja2-3.1.2-py3-none-any.whl", hash = "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"}, - {file = "Jinja2-3.1.2.tar.gz", hash = "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852"}, -] - -[package.dependencies] -MarkupSafe = ">=2.0" - -[package.extras] -i18n = ["Babel (>=2.7)"] - -[[package]] -name = "json5" -version = "0.9.10" -description = "A Python implementation of the JSON5 data format." -category = "dev" -optional = false -python-versions = "*" -files = [ - {file = "json5-0.9.10-py2.py3-none-any.whl", hash = "sha256:993189671e7412e9cdd8be8dc61cf402e8e579b35f1d1bb20ae6b09baa78bbce"}, - {file = "json5-0.9.10.tar.gz", hash = "sha256:ad9f048c5b5a4c3802524474ce40a622fae789860a86f10cc4f7e5f9cf9b46ab"}, -] - -[package.extras] -dev = ["hypothesis"] - -[[package]] -name = "jsonpointer" -version = "2.3" -description = "Identify specific nodes in a JSON document (RFC 6901)" -category = "dev" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -files = [ - {file = "jsonpointer-2.3-py2.py3-none-any.whl", hash = "sha256:51801e558539b4e9cd268638c078c6c5746c9ac96bc38152d443400e4f3793e9"}, - {file = "jsonpointer-2.3.tar.gz", hash = "sha256:97cba51526c829282218feb99dab1b1e6bdf8efd1c43dc9d57be093c0d69c99a"}, -] - -[[package]] -name = "jsonschema" -version = "4.17.3" -description = "An implementation of JSON Schema validation for Python" -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ - {file = "jsonschema-4.17.3-py3-none-any.whl", hash = "sha256:a870ad254da1a8ca84b6a2905cac29d265f805acc57af304784962a2aa6508f6"}, - {file = "jsonschema-4.17.3.tar.gz", hash = "sha256:0f864437ab8b6076ba6707453ef8f98a6a0d512a80e93f8abdb676f737ecb60d"}, -] - -[package.dependencies] -attrs = ">=17.4.0" -fqdn = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} -idna = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} -isoduration = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} -jsonpointer = {version = ">1.13", optional = true, markers = "extra == \"format-nongpl\""} -pyrsistent = ">=0.14.0,<0.17.0 || >0.17.0,<0.17.1 || >0.17.1,<0.17.2 || >0.17.2" -rfc3339-validator = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} -rfc3986-validator = {version = ">0.1.0", optional = true, markers = "extra == \"format-nongpl\""} -uri-template = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} -webcolors = {version = ">=1.11", optional = true, markers = "extra == \"format-nongpl\""} - -[package.extras] -format = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3987", "uri-template", "webcolors (>=1.11)"] -format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "uri-template", "webcolors (>=1.11)"] - -[[package]] -name = "jupyter-client" -version = "7.4.8" -description = "Jupyter protocol implementation and client libraries" -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ - {file = "jupyter_client-7.4.8-py3-none-any.whl", hash = "sha256:d4a67ae86ee014bcb96bd8190714f6af921f2b0f52f4208b086aa5acfd9f8d65"}, - {file = "jupyter_client-7.4.8.tar.gz", hash = "sha256:109a3c33b62a9cf65aa8325850a0999a795fac155d9de4f7555aef5f310ee35a"}, -] - -[package.dependencies] -entrypoints = "*" -jupyter-core = ">=4.9.2" -nest-asyncio = ">=1.5.4" -python-dateutil = ">=2.8.2" -pyzmq = ">=23.0" -tornado = ">=6.2" -traitlets = "*" - -[package.extras] -doc = ["ipykernel", "myst-parser", "sphinx (>=1.3.6)", "sphinx-rtd-theme", "sphinxcontrib-github-alt"] -test = ["codecov", "coverage", "ipykernel (>=6.12)", "ipython", "mypy", "pre-commit", "pytest", "pytest-asyncio (>=0.18)", "pytest-cov", "pytest-timeout"] - -[[package]] -name = "jupyter-core" -version = "5.1.1" -description = "Jupyter core package. A base package on which Jupyter projects rely." -category = "dev" -optional = false -python-versions = ">=3.8" -files = [ - {file = "jupyter_core-5.1.1-py3-none-any.whl", hash = "sha256:f1038179d0f179b0e92c8fa2289c012b29dafdc9484b41821079f1a496f5a0f2"}, - {file = "jupyter_core-5.1.1.tar.gz", hash = "sha256:f342d29eb6edb06f8dffa69adea987b3a9ee2b6702338a8cb6911516ea0b432d"}, -] - -[package.dependencies] -platformdirs = ">=2.5" -pywin32 = {version = ">=1.0", markers = "sys_platform == \"win32\" and platform_python_implementation != \"PyPy\""} -traitlets = ">=5.3" - -[package.extras] -docs = ["myst-parser", "sphinxcontrib-github-alt", "traitlets"] -test = ["ipykernel", "pre-commit", "pytest", "pytest-cov", "pytest-timeout"] - -[[package]] -name = "jupyter-events" -version = "0.5.0" -description = "Jupyter Event System library" -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ - {file = "jupyter_events-0.5.0-py3-none-any.whl", hash = "sha256:6f7b67bf42b8a370c992187194ed02847dfa02307a7aebe9913e2d3979b9b6b8"}, - {file = "jupyter_events-0.5.0.tar.gz", hash = "sha256:e27ffdd6138699d47d42cb65ae6d79334ff7c0d923694381c991ce56a140f2cd"}, -] - -[package.dependencies] -jsonschema = {version = ">=4.3.0", extras = ["format-nongpl"]} -python-json-logger = "*" -pyyaml = "*" -traitlets = "*" - -[package.extras] -cli = ["click", "rich"] -test = ["click", "coverage", "pre-commit", "pytest (>=6.1.0)", "pytest-asyncio (>=0.19.0)", "pytest-console-scripts", "pytest-cov", "rich"] - -[[package]] -name = "jupyter-server" -version = "2.0.5" -description = "The backend—i.e. core services, APIs, and REST endpoints—to Jupyter web applications." -category = "dev" -optional = false -python-versions = ">=3.8" -files = [ - {file = "jupyter_server-2.0.5-py3-none-any.whl", hash = "sha256:18a952c0b75c18f801e91221798fd3e91aef09562896f03877db39db332f7cd2"}, - {file = "jupyter_server-2.0.5.tar.gz", hash = "sha256:0d9e63d40412334dfa348cef8bfe8415b973752d12ca3afd64f0ea478e52aa4a"}, -] - -[package.dependencies] -anyio = ">=3.1.0,<4" -argon2-cffi = "*" -jinja2 = "*" -jupyter-client = ">=7.4.4" -jupyter-core = ">=4.12,<5.0.0 || >=5.1.0" -jupyter-events = ">=0.4.0" -jupyter-server-terminals = "*" -nbconvert = ">=6.4.4" -nbformat = ">=5.3.0" -packaging = "*" -prometheus-client = "*" -pywinpty = {version = "*", markers = "os_name == \"nt\""} -pyzmq = ">=24" -send2trash = "*" -terminado = ">=0.8.3" -tornado = ">=6.2.0" -traitlets = ">=5.6.0" -websocket-client = "*" - -[package.extras] -docs = ["docutils (<0.20)", "ipykernel", "jinja2", "jupyter-client", "jupyter-server", "mistune (<1.0.0)", "myst-parser", "nbformat", "prometheus-client", "pydata-sphinx-theme", "send2trash", "sphinxcontrib-github-alt", "sphinxcontrib-openapi", "sphinxemoji", "tornado"] -test = ["ipykernel", "pre-commit", "pytest (>=7.0)", "pytest-console-scripts", "pytest-jupyter[server] (>=0.4)", "pytest-timeout", "requests"] - -[[package]] -name = "jupyter-server-terminals" -version = "0.4.3" -description = "A Jupyter Server Extension Providing Terminals." -category = "dev" -optional = false -python-versions = ">=3.8" -files = [ - {file = "jupyter_server_terminals-0.4.3-py3-none-any.whl", hash = "sha256:ec67d3f1895d25cfb586a87a50b8eee13b709898a4afd721058e551e0a0f480d"}, - {file = "jupyter_server_terminals-0.4.3.tar.gz", hash = "sha256:8421438d95a1f1f6994c48dd5dc10ad167ea7c196972bb5d1d7a9da1e30fde02"}, -] - -[package.dependencies] -pywinpty = {version = ">=2.0.3", markers = "os_name == \"nt\""} -terminado = ">=0.8.3" - -[package.extras] -docs = ["jinja2", "jupyter-server", "mistune (<3.0)", "myst-parser", "nbformat", "packaging", "pydata-sphinx-theme", "sphinxcontrib-github-alt", "sphinxcontrib-openapi", "sphinxemoji", "tornado"] -test = ["coverage", "jupyter-server (>=2.0.0)", "pytest (>=7.0)", "pytest-cov", "pytest-jupyter[server] (>=0.5.3)", "pytest-timeout"] - -[[package]] -name = "jupyterlab" -version = "3.5.2" -description = "JupyterLab computational environment" -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ - {file = "jupyterlab-3.5.2-py3-none-any.whl", hash = "sha256:16e9b8320dcec469c70bb883e993e0bb84c4ea1a734063731f66922cf72add1b"}, - {file = "jupyterlab-3.5.2.tar.gz", hash = "sha256:10ac094215ffb872ddffbe2982bf1c039a79fecc326e191e7cc5efd84f331dad"}, -] - -[package.dependencies] -ipython = "*" -jinja2 = ">=2.1" -jupyter-core = "*" -jupyter-server = ">=1.16.0,<3" -jupyterlab-server = ">=2.10,<3.0" -nbclassic = "*" -notebook = "<7" -packaging = "*" -tomli = "*" -tornado = ">=6.1.0" - -[package.extras] -test = ["check-manifest", "coverage", "jupyterlab-server[test]", "pre-commit", "pytest (>=6.0)", "pytest-check-links (>=0.5)", "pytest-console-scripts", "pytest-cov", "pytest-jupyter (>=0.6.0)", "pytest-tornasync", "requests", "requests-cache", "virtualenv"] - -[[package]] -name = "jupyterlab-pygments" -version = "0.2.2" -description = "Pygments theme using JupyterLab CSS variables" -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ - {file = "jupyterlab_pygments-0.2.2-py2.py3-none-any.whl", hash = "sha256:2405800db07c9f770863bcf8049a529c3dd4d3e28536638bd7c1c01d2748309f"}, - {file = "jupyterlab_pygments-0.2.2.tar.gz", hash = "sha256:7405d7fde60819d905a9fa8ce89e4cd830e318cdad22a0030f7a901da705585d"}, -] - -[[package]] -name = "jupyterlab-server" -version = "2.17.0" -description = "A set of server components for JupyterLab and JupyterLab like applications." -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ - {file = "jupyterlab_server-2.17.0-py3-none-any.whl", hash = "sha256:9d74283291ec88b3e53b450a5007ac0fe531656bc219084e1ace91b546a90335"}, - {file = "jupyterlab_server-2.17.0.tar.gz", hash = "sha256:a5b6923e2a9ea299f98de5b50cde90722d74ef8f0713a75cbcbfbfd3f41ad058"}, -] - -[package.dependencies] -babel = ">=2.10" -importlib-metadata = {version = ">=4.8.3", markers = "python_version < \"3.10\""} -jinja2 = ">=3.0.3" -json5 = ">=0.9.0" -jsonschema = ">=4.17.3" -jupyter-server = ">=1.21,<3" -packaging = ">=21.3" -requests = ">=2.28" - -[package.extras] -docs = ["autodoc-traits", "docutils (<0.20)", "jinja2 (<3.2.0)", "mistune (<3)", "myst-parser", "pydata-sphinx-theme", "sphinx", "sphinx-copybutton", "sphinxcontrib-openapi"] -openapi = ["openapi-core (>=0.16.1)", "ruamel-yaml"] -test = ["codecov", "ipykernel", "jupyterlab-server[openapi]", "openapi-spec-validator (>=0.5.1)", "pytest (>=7.0)", "pytest-console-scripts", "pytest-cov", "pytest-jupyter[server] (>=0.6)", "pytest-timeout", "requests-mock", "sphinxcontrib-spelling", "strict-rfc3339", "werkzeug"] - -[[package]] -name = "markupsafe" -version = "2.1.1" -description = "Safely add untrusted strings to HTML/XML markup." -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ - {file = "MarkupSafe-2.1.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:86b1f75c4e7c2ac2ccdaec2b9022845dbb81880ca318bb7a0a01fbf7813e3812"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f121a1420d4e173a5d96e47e9a0c0dcff965afdf1626d28de1460815f7c4ee7a"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a49907dd8420c5685cfa064a1335b6754b74541bbb3706c259c02ed65b644b3e"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10c1bfff05d95783da83491be968e8fe789263689c02724e0c691933c52994f5"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b7bd98b796e2b6553da7225aeb61f447f80a1ca64f41d83612e6139ca5213aa4"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b09bf97215625a311f669476f44b8b318b075847b49316d3e28c08e41a7a573f"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:694deca8d702d5db21ec83983ce0bb4b26a578e71fbdbd4fdcd387daa90e4d5e"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:efc1913fd2ca4f334418481c7e595c00aad186563bbc1ec76067848c7ca0a933"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-win32.whl", hash = "sha256:4a33dea2b688b3190ee12bd7cfa29d39c9ed176bda40bfa11099a3ce5d3a7ac6"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:dda30ba7e87fbbb7eab1ec9f58678558fd9a6b8b853530e176eabd064da81417"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:671cd1187ed5e62818414afe79ed29da836dde67166a9fac6d435873c44fdd02"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3799351e2336dc91ea70b034983ee71cf2f9533cdff7c14c90ea126bfd95d65a"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e72591e9ecd94d7feb70c1cbd7be7b3ebea3f548870aa91e2732960fa4d57a37"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6fbf47b5d3728c6aea2abb0589b5d30459e369baa772e0f37a0320185e87c980"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d5ee4f386140395a2c818d149221149c54849dfcfcb9f1debfe07a8b8bd63f9a"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:bcb3ed405ed3222f9904899563d6fc492ff75cce56cba05e32eff40e6acbeaa3"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e1c0b87e09fa55a220f058d1d49d3fb8df88fbfab58558f1198e08c1e1de842a"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-win32.whl", hash = "sha256:8dc1c72a69aa7e082593c4a203dcf94ddb74bb5c8a731e4e1eb68d031e8498ff"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:97a68e6ada378df82bc9f16b800ab77cbf4b2fada0081794318520138c088e4a"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e8c843bbcda3a2f1e3c2ab25913c80a3c5376cd00c6e8c4a86a89a28c8dc5452"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0212a68688482dc52b2d45013df70d169f542b7394fc744c02a57374a4207003"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e576a51ad59e4bfaac456023a78f6b5e6e7651dcd383bcc3e18d06f9b55d6d1"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b9fe39a2ccc108a4accc2676e77da025ce383c108593d65cc909add5c3bd601"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:96e37a3dc86e80bf81758c152fe66dbf60ed5eca3d26305edf01892257049925"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6d0072fea50feec76a4c418096652f2c3238eaa014b2f94aeb1d56a66b41403f"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:089cf3dbf0cd6c100f02945abeb18484bd1ee57a079aefd52cffd17fba910b88"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6a074d34ee7a5ce3effbc526b7083ec9731bb3cbf921bbe1d3005d4d2bdb3a63"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-win32.whl", hash = "sha256:421be9fbf0ffe9ffd7a378aafebbf6f4602d564d34be190fc19a193232fd12b1"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:fc7b548b17d238737688817ab67deebb30e8073c95749d55538ed473130ec0c7"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e04e26803c9c3851c931eac40c695602c6295b8d432cbe78609649ad9bd2da8a"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b87db4360013327109564f0e591bd2a3b318547bcef31b468a92ee504d07ae4f"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99a2a507ed3ac881b975a2976d59f38c19386d128e7a9a18b7df6fff1fd4c1d6"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56442863ed2b06d19c37f94d999035e15ee982988920e12a5b4ba29b62ad1f77"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3ce11ee3f23f79dbd06fb3d63e2f6af7b12db1d46932fe7bd8afa259a5996603"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:33b74d289bd2f5e527beadcaa3f401e0df0a89927c1559c8566c066fa4248ab7"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:43093fb83d8343aac0b1baa75516da6092f58f41200907ef92448ecab8825135"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8e3dcf21f367459434c18e71b2a9532d96547aef8a871872a5bd69a715c15f96"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-win32.whl", hash = "sha256:d4306c36ca495956b6d568d276ac11fdd9c30a36f1b6eb928070dc5360b22e1c"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:46d00d6cfecdde84d40e572d63735ef81423ad31184100411e6e3388d405e247"}, - {file = "MarkupSafe-2.1.1.tar.gz", hash = "sha256:7f91197cc9e48f989d12e4e6fbc46495c446636dfc81b9ccf50bb0ec74b91d4b"}, -] - -[[package]] -name = "matplotlib-inline" -version = "0.1.6" -description = "Inline Matplotlib backend for Jupyter" -category = "dev" -optional = false -python-versions = ">=3.5" -files = [ - {file = "matplotlib-inline-0.1.6.tar.gz", hash = "sha256:f887e5f10ba98e8d2b150ddcf4702c1e5f8b3a20005eb0f74bfdbd360ee6f304"}, - {file = "matplotlib_inline-0.1.6-py3-none-any.whl", hash = "sha256:f1f41aab5328aa5aaea9b16d083b128102f8712542f819fe7e6a420ff581b311"}, -] - -[package.dependencies] -traitlets = "*" - -[[package]] -name = "mistune" -version = "2.0.4" -description = "A sane Markdown parser with useful plugins and renderers" -category = "dev" -optional = false -python-versions = "*" -files = [ - {file = "mistune-2.0.4-py2.py3-none-any.whl", hash = "sha256:182cc5ee6f8ed1b807de6b7bb50155df7b66495412836b9a74c8fbdfc75fe36d"}, - {file = "mistune-2.0.4.tar.gz", hash = "sha256:9ee0a66053e2267aba772c71e06891fa8f1af6d4b01d5e84e267b4570d4d9808"}, -] - -[[package]] -name = "nbclassic" -version = "0.4.8" -description = "A web-based notebook environment for interactive computing" -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ - {file = "nbclassic-0.4.8-py3-none-any.whl", hash = "sha256:cbf05df5842b420d5cece0143462380ea9d308ff57c2dc0eb4d6e035b18fbfb3"}, - {file = "nbclassic-0.4.8.tar.gz", hash = "sha256:c74d8a500f8e058d46b576a41e5bc640711e1032cf7541dde5f73ea49497e283"}, -] - -[package.dependencies] -argon2-cffi = "*" -ipykernel = "*" -ipython-genutils = "*" -jinja2 = "*" -jupyter-client = ">=6.1.1" -jupyter-core = ">=4.6.1" -jupyter-server = ">=1.8" -nbconvert = ">=5" -nbformat = "*" -nest-asyncio = ">=1.5" -notebook-shim = ">=0.1.0" -prometheus-client = "*" -pyzmq = ">=17" -Send2Trash = ">=1.8.0" -terminado = ">=0.8.3" -tornado = ">=6.1" -traitlets = ">=4.2.1" - -[package.extras] -docs = ["myst-parser", "nbsphinx", "sphinx", "sphinx-rtd-theme", "sphinxcontrib-github-alt"] -json-logging = ["json-logging"] -test = ["coverage", "nbval", "pytest", "pytest-cov", "pytest-playwright", "pytest-tornasync", "requests", "requests-unixsocket", "testpath"] - -[[package]] -name = "nbclient" -version = "0.7.2" -description = "A client library for executing notebooks. Formerly nbconvert's ExecutePreprocessor." -category = "dev" -optional = false -python-versions = ">=3.7.0" -files = [ - {file = "nbclient-0.7.2-py3-none-any.whl", hash = "sha256:d97ac6257de2794f5397609df754fcbca1a603e94e924eb9b99787c031ae2e7c"}, - {file = "nbclient-0.7.2.tar.gz", hash = "sha256:884a3f4a8c4fc24bb9302f263e0af47d97f0d01fe11ba714171b320c8ac09547"}, -] - -[package.dependencies] -jupyter-client = ">=6.1.12" -jupyter-core = ">=4.12,<5.0.0 || >=5.1.0" -nbformat = ">=5.1" -traitlets = ">=5.3" - -[package.extras] -dev = ["pre-commit"] -docs = ["autodoc-traits", "mock", "moto", "myst-parser", "nbclient[test]", "sphinx (>=1.7)", "sphinx-book-theme"] -test = ["ipykernel", "ipython", "ipywidgets", "nbconvert (>=7.0.0)", "pytest (>=7.0)", "pytest-asyncio", "pytest-cov (>=4.0)", "testpath", "xmltodict"] - -[[package]] -name = "nbconvert" -version = "7.2.7" -description = "Converting Jupyter Notebooks" -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ - {file = "nbconvert-7.2.7-py3-none-any.whl", hash = "sha256:e057f1f87a6ac50629b724d9a46b40e2ba394d6f20ee7f33f4acef1928a15af3"}, - {file = "nbconvert-7.2.7.tar.gz", hash = "sha256:8b727b0503bf4e0ff3907c8bea030d3fc4015fbee8669ac6ac2a5a6668b49d5e"}, -] - -[package.dependencies] -beautifulsoup4 = "*" -bleach = "*" -defusedxml = "*" -importlib-metadata = {version = ">=3.6", markers = "python_version < \"3.10\""} -jinja2 = ">=3.0" -jupyter-core = ">=4.7" -jupyterlab-pygments = "*" -markupsafe = ">=2.0" -mistune = ">=2.0.3,<3" -nbclient = ">=0.5.0" -nbformat = ">=5.1" -packaging = "*" -pandocfilters = ">=1.4.1" -pygments = ">=2.4.1" -tinycss2 = "*" -traitlets = ">=5.0" - -[package.extras] -all = ["nbconvert[docs,qtpdf,serve,test,webpdf]"] -docs = ["ipykernel", "ipython", "myst-parser", "nbsphinx (>=0.2.12)", "pydata-sphinx-theme", "sphinx (==5.0.2)"] -qtpdf = ["nbconvert[qtpng]"] -qtpng = ["pyqtwebengine (>=5.15)"] -serve = ["tornado (>=6.1)"] -test = ["ipykernel", "ipywidgets (>=7)", "pre-commit", "pytest", "pytest-dependency"] -webpdf = ["pyppeteer (>=1,<1.1)"] - -[[package]] -name = "nbformat" -version = "5.7.1" -description = "The Jupyter Notebook format" -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ - {file = "nbformat-5.7.1-py3-none-any.whl", hash = "sha256:e52ab802ce7f7a2863861e914642f021b9d7c23ad9726d14c36df92a79acd754"}, - {file = "nbformat-5.7.1.tar.gz", hash = "sha256:3810a0130453ed031970521d20989b8a592f3c2e73283a8280ae34ae1f75b3f8"}, -] - -[package.dependencies] -fastjsonschema = "*" -jsonschema = ">=2.6" -jupyter-core = "*" -traitlets = ">=5.1" - -[package.extras] -docs = ["myst-parser", "pydata-sphinx-theme", "sphinx", "sphinxcontrib-github-alt"] -test = ["pep440", "pre-commit", "pytest", "testpath"] - -[[package]] -name = "nest-asyncio" -version = "1.5.6" -description = "Patch asyncio to allow nested event loops" -category = "dev" -optional = false -python-versions = ">=3.5" -files = [ - {file = "nest_asyncio-1.5.6-py3-none-any.whl", hash = "sha256:b9a953fb40dceaa587d109609098db21900182b16440652454a146cffb06e8b8"}, - {file = "nest_asyncio-1.5.6.tar.gz", hash = "sha256:d267cc1ff794403f7df692964d1d2a3fa9418ffea2a3f6859a439ff482fef290"}, -] - -[[package]] -name = "notebook" -version = "6.5.2" -description = "A web-based notebook environment for interactive computing" -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ - {file = "notebook-6.5.2-py3-none-any.whl", hash = "sha256:e04f9018ceb86e4fa841e92ea8fb214f8d23c1cedfde530cc96f92446924f0e4"}, - {file = "notebook-6.5.2.tar.gz", hash = "sha256:c1897e5317e225fc78b45549a6ab4b668e4c996fd03a04e938fe5e7af2bfffd0"}, -] - -[package.dependencies] -argon2-cffi = "*" -ipykernel = "*" -ipython-genutils = "*" -jinja2 = "*" -jupyter-client = ">=5.3.4" -jupyter-core = ">=4.6.1" -nbclassic = ">=0.4.7" -nbconvert = ">=5" -nbformat = "*" -nest-asyncio = ">=1.5" -prometheus-client = "*" -pyzmq = ">=17" -Send2Trash = ">=1.8.0" -terminado = ">=0.8.3" -tornado = ">=6.1" -traitlets = ">=4.2.1" - -[package.extras] -docs = ["myst-parser", "nbsphinx", "sphinx", "sphinx-rtd-theme", "sphinxcontrib-github-alt"] -json-logging = ["json-logging"] -test = ["coverage", "nbval", "pytest", "pytest-cov", "requests", "requests-unixsocket", "selenium (==4.1.5)", "testpath"] - -[[package]] -name = "notebook-shim" -version = "0.2.2" -description = "A shim layer for notebook traits and config" -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ - {file = "notebook_shim-0.2.2-py3-none-any.whl", hash = "sha256:9c6c30f74c4fbea6fce55c1be58e7fd0409b1c681b075dcedceb005db5026949"}, - {file = "notebook_shim-0.2.2.tar.gz", hash = "sha256:090e0baf9a5582ff59b607af523ca2db68ff216da0c69956b62cab2ef4fc9c3f"}, -] - -[package.dependencies] -jupyter-server = ">=1.8,<3" - -[package.extras] -test = ["pytest", "pytest-console-scripts", "pytest-tornasync"] - -[[package]] -name = "packaging" -version = "22.0" -description = "Core utilities for Python packages" -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ - {file = "packaging-22.0-py3-none-any.whl", hash = "sha256:957e2148ba0e1a3b282772e791ef1d8083648bc131c8ab0c1feba110ce1146c3"}, - {file = "packaging-22.0.tar.gz", hash = "sha256:2198ec20bd4c017b8f9717e00f0c8714076fc2fd93816750ab48e2c41de2cfd3"}, -] - -[[package]] -name = "pandocfilters" -version = "1.5.0" -description = "Utilities for writing pandoc filters in python" -category = "dev" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -files = [ - {file = "pandocfilters-1.5.0-py2.py3-none-any.whl", hash = "sha256:33aae3f25fd1a026079f5d27bdd52496f0e0803b3469282162bafdcbdf6ef14f"}, - {file = "pandocfilters-1.5.0.tar.gz", hash = "sha256:0b679503337d233b4339a817bfc8c50064e2eff681314376a47cb582305a7a38"}, -] - -[[package]] -name = "parso" -version = "0.8.3" -description = "A Python Parser" -category = "dev" -optional = false -python-versions = ">=3.6" -files = [ - {file = "parso-0.8.3-py2.py3-none-any.whl", hash = "sha256:c001d4636cd3aecdaf33cbb40aebb59b094be2a74c556778ef5576c175e19e75"}, - {file = "parso-0.8.3.tar.gz", hash = "sha256:8c07be290bb59f03588915921e29e8a50002acaf2cdc5fa0e0114f91709fafa0"}, -] - -[package.extras] -qa = ["flake8 (==3.8.3)", "mypy (==0.782)"] -testing = ["docopt", "pytest (<6.0.0)"] - -[[package]] -name = "pexpect" -version = "4.8.0" -description = "Pexpect allows easy control of interactive console applications." -category = "dev" -optional = false -python-versions = "*" -files = [ - {file = "pexpect-4.8.0-py2.py3-none-any.whl", hash = "sha256:0b48a55dcb3c05f3329815901ea4fc1537514d6ba867a152b581d69ae3710937"}, - {file = "pexpect-4.8.0.tar.gz", hash = "sha256:fc65a43959d153d0114afe13997d439c22823a27cefceb5ff35c2178c6784c0c"}, -] - -[package.dependencies] -ptyprocess = ">=0.5" - -[[package]] -name = "pickleshare" -version = "0.7.5" -description = "Tiny 'shelve'-like database with concurrency support" -category = "dev" -optional = false -python-versions = "*" -files = [ - {file = "pickleshare-0.7.5-py2.py3-none-any.whl", hash = "sha256:9649af414d74d4df115d5d718f82acb59c9d418196b7b4290ed47a12ce62df56"}, - {file = "pickleshare-0.7.5.tar.gz", hash = "sha256:87683d47965c1da65cdacaf31c8441d12b8044cdec9aca500cd78fc2c683afca"}, -] - -[[package]] -name = "platformdirs" -version = "2.6.2" -description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ - {file = "platformdirs-2.6.2-py3-none-any.whl", hash = "sha256:83c8f6d04389165de7c9b6f0c682439697887bca0aa2f1c87ef1826be3584490"}, - {file = "platformdirs-2.6.2.tar.gz", hash = "sha256:e1fea1fe471b9ff8332e229df3cb7de4f53eeea4998d3b6bfff542115e998bd2"}, -] - -[package.extras] -docs = ["furo (>=2022.12.7)", "proselint (>=0.13)", "sphinx (>=5.3)", "sphinx-autodoc-typehints (>=1.19.5)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.2.2)", "pytest (>=7.2)", "pytest-cov (>=4)", "pytest-mock (>=3.10)"] - -[[package]] -name = "prometheus-client" -version = "0.15.0" -description = "Python client for the Prometheus monitoring system." -category = "dev" -optional = false -python-versions = ">=3.6" -files = [ - {file = "prometheus_client-0.15.0-py3-none-any.whl", hash = "sha256:db7c05cbd13a0f79975592d112320f2605a325969b270a94b71dcabc47b931d2"}, - {file = "prometheus_client-0.15.0.tar.gz", hash = "sha256:be26aa452490cfcf6da953f9436e95a9f2b4d578ca80094b4458930e5f584ab1"}, -] - -[package.extras] -twisted = ["twisted"] - -[[package]] -name = "prompt-toolkit" -version = "3.0.36" -description = "Library for building powerful interactive command lines in Python" -category = "dev" -optional = false -python-versions = ">=3.6.2" -files = [ - {file = "prompt_toolkit-3.0.36-py3-none-any.whl", hash = "sha256:aa64ad242a462c5ff0363a7b9cfe696c20d55d9fc60c11fd8e632d064804d305"}, - {file = "prompt_toolkit-3.0.36.tar.gz", hash = "sha256:3e163f254bef5a03b146397d7c1963bd3e2812f0964bb9a24e6ec761fd28db63"}, -] - -[package.dependencies] -wcwidth = "*" - -[[package]] -name = "psutil" -version = "5.9.4" -description = "Cross-platform lib for process and system monitoring in Python." -category = "dev" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -files = [ - {file = "psutil-5.9.4-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:c1ca331af862803a42677c120aff8a814a804e09832f166f226bfd22b56feee8"}, - {file = "psutil-5.9.4-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:68908971daf802203f3d37e78d3f8831b6d1014864d7a85937941bb35f09aefe"}, - {file = "psutil-5.9.4-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:3ff89f9b835100a825b14c2808a106b6fdcc4b15483141482a12c725e7f78549"}, - {file = "psutil-5.9.4-cp27-cp27m-win32.whl", hash = "sha256:852dd5d9f8a47169fe62fd4a971aa07859476c2ba22c2254d4a1baa4e10b95ad"}, - {file = "psutil-5.9.4-cp27-cp27m-win_amd64.whl", hash = "sha256:9120cd39dca5c5e1c54b59a41d205023d436799b1c8c4d3ff71af18535728e94"}, - {file = "psutil-5.9.4-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:6b92c532979bafc2df23ddc785ed116fced1f492ad90a6830cf24f4d1ea27d24"}, - {file = "psutil-5.9.4-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:efeae04f9516907be44904cc7ce08defb6b665128992a56957abc9b61dca94b7"}, - {file = "psutil-5.9.4-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:54d5b184728298f2ca8567bf83c422b706200bcbbfafdc06718264f9393cfeb7"}, - {file = "psutil-5.9.4-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:16653106f3b59386ffe10e0bad3bb6299e169d5327d3f187614b1cb8f24cf2e1"}, - {file = "psutil-5.9.4-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:54c0d3d8e0078b7666984e11b12b88af2db11d11249a8ac8920dd5ef68a66e08"}, - {file = "psutil-5.9.4-cp36-abi3-win32.whl", hash = "sha256:149555f59a69b33f056ba1c4eb22bb7bf24332ce631c44a319cec09f876aaeff"}, - {file = "psutil-5.9.4-cp36-abi3-win_amd64.whl", hash = "sha256:fd8522436a6ada7b4aad6638662966de0d61d241cb821239b2ae7013d41a43d4"}, - {file = "psutil-5.9.4-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:6001c809253a29599bc0dfd5179d9f8a5779f9dffea1da0f13c53ee568115e1e"}, - {file = "psutil-5.9.4.tar.gz", hash = "sha256:3d7f9739eb435d4b1338944abe23f49584bde5395f27487d2ee25ad9a8774a62"}, -] - -[package.extras] -test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"] - -[[package]] -name = "ptyprocess" -version = "0.7.0" -description = "Run a subprocess in a pseudo terminal" -category = "dev" -optional = false -python-versions = "*" -files = [ - {file = "ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35"}, - {file = "ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220"}, -] - -[[package]] -name = "pure-eval" -version = "0.2.2" -description = "Safely evaluate AST nodes without side effects" -category = "dev" -optional = false -python-versions = "*" -files = [ - {file = "pure_eval-0.2.2-py3-none-any.whl", hash = "sha256:01eaab343580944bc56080ebe0a674b39ec44a945e6d09ba7db3cb8cec289350"}, - {file = "pure_eval-0.2.2.tar.gz", hash = "sha256:2b45320af6dfaa1750f543d714b6d1c520a1688dec6fd24d339063ce0aaa9ac3"}, -] - -[package.extras] -tests = ["pytest"] - -[[package]] -name = "py" -version = "1.11.0" -description = "library with cross-python path, ini-parsing, io, code, log facilities" -category = "dev" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -files = [ - {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, - {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, -] - -[[package]] -name = "pycparser" -version = "2.21" -description = "C parser in Python" -category = "dev" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -files = [ - {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"}, - {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, -] - -[[package]] -name = "pygments" -version = "2.13.0" -description = "Pygments is a syntax highlighting package written in Python." -category = "dev" -optional = false -python-versions = ">=3.6" -files = [ - {file = "Pygments-2.13.0-py3-none-any.whl", hash = "sha256:f643f331ab57ba3c9d89212ee4a2dabc6e94f117cf4eefde99a0574720d14c42"}, - {file = "Pygments-2.13.0.tar.gz", hash = "sha256:56a8508ae95f98e2b9bdf93a6be5ae3f7d8af858b43e02c5a2ff083726be40c1"}, -] - -[package.extras] -plugins = ["importlib-metadata"] - -[[package]] -name = "pyrsistent" -version = "0.19.2" -description = "Persistent/Functional/Immutable data structures" -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ - {file = "pyrsistent-0.19.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d6982b5a0237e1b7d876b60265564648a69b14017f3b5f908c5be2de3f9abb7a"}, - {file = "pyrsistent-0.19.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:187d5730b0507d9285a96fca9716310d572e5464cadd19f22b63a6976254d77a"}, - {file = "pyrsistent-0.19.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:055ab45d5911d7cae397dc418808d8802fb95262751872c841c170b0dbf51eed"}, - {file = "pyrsistent-0.19.2-cp310-cp310-win32.whl", hash = "sha256:456cb30ca8bff00596519f2c53e42c245c09e1a4543945703acd4312949bfd41"}, - {file = "pyrsistent-0.19.2-cp310-cp310-win_amd64.whl", hash = "sha256:b39725209e06759217d1ac5fcdb510e98670af9e37223985f330b611f62e7425"}, - {file = "pyrsistent-0.19.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:2aede922a488861de0ad00c7630a6e2d57e8023e4be72d9d7147a9fcd2d30712"}, - {file = "pyrsistent-0.19.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:879b4c2f4d41585c42df4d7654ddffff1239dc4065bc88b745f0341828b83e78"}, - {file = "pyrsistent-0.19.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c43bec251bbd10e3cb58ced80609c5c1eb238da9ca78b964aea410fb820d00d6"}, - {file = "pyrsistent-0.19.2-cp37-cp37m-win32.whl", hash = "sha256:d690b18ac4b3e3cab73b0b7aa7dbe65978a172ff94970ff98d82f2031f8971c2"}, - {file = "pyrsistent-0.19.2-cp37-cp37m-win_amd64.whl", hash = "sha256:3ba4134a3ff0fc7ad225b6b457d1309f4698108fb6b35532d015dca8f5abed73"}, - {file = "pyrsistent-0.19.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:a178209e2df710e3f142cbd05313ba0c5ebed0a55d78d9945ac7a4e09d923308"}, - {file = "pyrsistent-0.19.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e371b844cec09d8dc424d940e54bba8f67a03ebea20ff7b7b0d56f526c71d584"}, - {file = "pyrsistent-0.19.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:111156137b2e71f3a9936baf27cb322e8024dac3dc54ec7fb9f0bcf3249e68bb"}, - {file = "pyrsistent-0.19.2-cp38-cp38-win32.whl", hash = "sha256:e5d8f84d81e3729c3b506657dddfe46e8ba9c330bf1858ee33108f8bb2adb38a"}, - {file = "pyrsistent-0.19.2-cp38-cp38-win_amd64.whl", hash = "sha256:9cd3e9978d12b5d99cbdc727a3022da0430ad007dacf33d0bf554b96427f33ab"}, - {file = "pyrsistent-0.19.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f1258f4e6c42ad0b20f9cfcc3ada5bd6b83374516cd01c0960e3cb75fdca6770"}, - {file = "pyrsistent-0.19.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:21455e2b16000440e896ab99e8304617151981ed40c29e9507ef1c2e4314ee95"}, - {file = "pyrsistent-0.19.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bfd880614c6237243ff53a0539f1cb26987a6dc8ac6e66e0c5a40617296a045e"}, - {file = "pyrsistent-0.19.2-cp39-cp39-win32.whl", hash = "sha256:71d332b0320642b3261e9fee47ab9e65872c2bd90260e5d225dabeed93cbd42b"}, - {file = "pyrsistent-0.19.2-cp39-cp39-win_amd64.whl", hash = "sha256:dec3eac7549869365fe263831f576c8457f6c833937c68542d08fde73457d291"}, - {file = "pyrsistent-0.19.2-py3-none-any.whl", hash = "sha256:ea6b79a02a28550c98b6ca9c35b9f492beaa54d7c5c9e9949555893c8a9234d0"}, - {file = "pyrsistent-0.19.2.tar.gz", hash = "sha256:bfa0351be89c9fcbcb8c9879b826f4353be10f58f8a677efab0c017bf7137ec2"}, -] - -[[package]] -name = "python-dateutil" -version = "2.8.2" -description = "Extensions to the standard Python datetime module" -category = "main" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" -files = [ - {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, - {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, -] - -[package.dependencies] -six = ">=1.5" - -[[package]] -name = "python-json-logger" -version = "2.0.4" -description = "A python library adding a json log formatter" -category = "dev" -optional = false -python-versions = ">=3.5" -files = [ - {file = "python-json-logger-2.0.4.tar.gz", hash = "sha256:764d762175f99fcc4630bd4853b09632acb60a6224acb27ce08cd70f0b1b81bd"}, - {file = "python_json_logger-2.0.4-py3-none-any.whl", hash = "sha256:3b03487b14eb9e4f77e4fc2a023358b5394b82fd89cecf5586259baed57d8c6f"}, -] - -[[package]] -name = "pytz" -version = "2022.7" -description = "World timezone definitions, modern and historical" -category = "dev" -optional = false -python-versions = "*" -files = [ - {file = "pytz-2022.7-py2.py3-none-any.whl", hash = "sha256:93007def75ae22f7cd991c84e02d434876818661f8df9ad5df9e950ff4e52cfd"}, - {file = "pytz-2022.7.tar.gz", hash = "sha256:7ccfae7b4b2c067464a6733c6261673fdb8fd1be905460396b97a073e9fa683a"}, -] - -[[package]] -name = "pywin32" -version = "305" -description = "Python for Window Extensions" -category = "dev" -optional = false -python-versions = "*" -files = [ - {file = "pywin32-305-cp310-cp310-win32.whl", hash = "sha256:421f6cd86e84bbb696d54563c48014b12a23ef95a14e0bdba526be756d89f116"}, - {file = "pywin32-305-cp310-cp310-win_amd64.whl", hash = "sha256:73e819c6bed89f44ff1d690498c0a811948f73777e5f97c494c152b850fad478"}, - {file = "pywin32-305-cp310-cp310-win_arm64.whl", hash = "sha256:742eb905ce2187133a29365b428e6c3b9001d79accdc30aa8969afba1d8470f4"}, - {file = "pywin32-305-cp311-cp311-win32.whl", hash = "sha256:19ca459cd2e66c0e2cc9a09d589f71d827f26d47fe4a9d09175f6aa0256b51c2"}, - {file = "pywin32-305-cp311-cp311-win_amd64.whl", hash = "sha256:326f42ab4cfff56e77e3e595aeaf6c216712bbdd91e464d167c6434b28d65990"}, - {file = "pywin32-305-cp311-cp311-win_arm64.whl", hash = "sha256:4ecd404b2c6eceaca52f8b2e3e91b2187850a1ad3f8b746d0796a98b4cea04db"}, - {file = "pywin32-305-cp36-cp36m-win32.whl", hash = "sha256:48d8b1659284f3c17b68587af047d110d8c44837736b8932c034091683e05863"}, - {file = "pywin32-305-cp36-cp36m-win_amd64.whl", hash = "sha256:13362cc5aa93c2beaf489c9c9017c793722aeb56d3e5166dadd5ef82da021fe1"}, - {file = "pywin32-305-cp37-cp37m-win32.whl", hash = "sha256:a55db448124d1c1484df22fa8bbcbc45c64da5e6eae74ab095b9ea62e6d00496"}, - {file = "pywin32-305-cp37-cp37m-win_amd64.whl", hash = "sha256:109f98980bfb27e78f4df8a51a8198e10b0f347257d1e265bb1a32993d0c973d"}, - {file = "pywin32-305-cp38-cp38-win32.whl", hash = "sha256:9dd98384da775afa009bc04863426cb30596fd78c6f8e4e2e5bbf4edf8029504"}, - {file = "pywin32-305-cp38-cp38-win_amd64.whl", hash = "sha256:56d7a9c6e1a6835f521788f53b5af7912090674bb84ef5611663ee1595860fc7"}, - {file = "pywin32-305-cp39-cp39-win32.whl", hash = "sha256:9d968c677ac4d5cbdaa62fd3014ab241718e619d8e36ef8e11fb930515a1e918"}, - {file = "pywin32-305-cp39-cp39-win_amd64.whl", hash = "sha256:50768c6b7c3f0b38b7fb14dd4104da93ebced5f1a50dc0e834594bff6fbe1271"}, -] - -[[package]] -name = "pywinpty" -version = "2.0.9" -description = "Pseudo terminal support for Windows from Python." -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ - {file = "pywinpty-2.0.9-cp310-none-win_amd64.whl", hash = "sha256:30a7b371446a694a6ce5ef906d70ac04e569de5308c42a2bdc9c3bc9275ec51f"}, - {file = "pywinpty-2.0.9-cp311-none-win_amd64.whl", hash = "sha256:d78ef6f4bd7a6c6f94dc1a39ba8fb028540cc39f5cb593e756506db17843125f"}, - {file = "pywinpty-2.0.9-cp37-none-win_amd64.whl", hash = "sha256:5ed36aa087e35a3a183f833631b3e4c1ae92fe2faabfce0fa91b77ed3f0f1382"}, - {file = "pywinpty-2.0.9-cp38-none-win_amd64.whl", hash = "sha256:2352f44ee913faaec0a02d3c112595e56b8af7feeb8100efc6dc1a8685044199"}, - {file = "pywinpty-2.0.9-cp39-none-win_amd64.whl", hash = "sha256:ba75ec55f46c9e17db961d26485b033deb20758b1731e8e208e1e8a387fcf70c"}, - {file = "pywinpty-2.0.9.tar.gz", hash = "sha256:01b6400dd79212f50a2f01af1c65b781290ff39610853db99bf03962eb9a615f"}, -] - -[[package]] -name = "pyyaml" -version = "6.0" -description = "YAML parser and emitter for Python" -category = "dev" -optional = false -python-versions = ">=3.6" -files = [ - {file = "PyYAML-6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53"}, - {file = "PyYAML-6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c"}, - {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc"}, - {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a80a78046a72361de73f8f395f1f1e49f956c6be882eed58505a15f3e430962b"}, - {file = "PyYAML-6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5"}, - {file = "PyYAML-6.0-cp310-cp310-win32.whl", hash = "sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513"}, - {file = "PyYAML-6.0-cp310-cp310-win_amd64.whl", hash = "sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a"}, - {file = "PyYAML-6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d4b0ba9512519522b118090257be113b9468d804b19d63c71dbcf4a48fa32358"}, - {file = "PyYAML-6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:81957921f441d50af23654aa6c5e5eaf9b06aba7f0a19c18a538dc7ef291c5a1"}, - {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:afa17f5bc4d1b10afd4466fd3a44dc0e245382deca5b3c353d8b757f9e3ecb8d"}, - {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dbad0e9d368bb989f4515da330b88a057617d16b6a8245084f1b05400f24609f"}, - {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:432557aa2c09802be39460360ddffd48156e30721f5e8d917f01d31694216782"}, - {file = "PyYAML-6.0-cp311-cp311-win32.whl", hash = "sha256:bfaef573a63ba8923503d27530362590ff4f576c626d86a9fed95822a8255fd7"}, - {file = "PyYAML-6.0-cp311-cp311-win_amd64.whl", hash = "sha256:01b45c0191e6d66c470b6cf1b9531a771a83c1c4208272ead47a3ae4f2f603bf"}, - {file = "PyYAML-6.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86"}, - {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f"}, - {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92"}, - {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:98c4d36e99714e55cfbaaee6dd5badbc9a1ec339ebfc3b1f52e293aee6bb71a4"}, - {file = "PyYAML-6.0-cp36-cp36m-win32.whl", hash = "sha256:0283c35a6a9fbf047493e3a0ce8d79ef5030852c51e9d911a27badfde0605293"}, - {file = "PyYAML-6.0-cp36-cp36m-win_amd64.whl", hash = "sha256:07751360502caac1c067a8132d150cf3d61339af5691fe9e87803040dbc5db57"}, - {file = "PyYAML-6.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:819b3830a1543db06c4d4b865e70ded25be52a2e0631ccd2f6a47a2822f2fd7c"}, - {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:473f9edb243cb1935ab5a084eb238d842fb8f404ed2193a915d1784b5a6b5fc0"}, - {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0ce82d761c532fe4ec3f87fc45688bdd3a4c1dc5e0b4a19814b9009a29baefd4"}, - {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:231710d57adfd809ef5d34183b8ed1eeae3f76459c18fb4a0b373ad56bedcdd9"}, - {file = "PyYAML-6.0-cp37-cp37m-win32.whl", hash = "sha256:c5687b8d43cf58545ade1fe3e055f70eac7a5a1a0bf42824308d868289a95737"}, - {file = "PyYAML-6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:d15a181d1ecd0d4270dc32edb46f7cb7733c7c508857278d3d378d14d606db2d"}, - {file = "PyYAML-6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0b4624f379dab24d3725ffde76559cff63d9ec94e1736b556dacdfebe5ab6d4b"}, - {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:213c60cd50106436cc818accf5baa1aba61c0189ff610f64f4a3e8c6726218ba"}, - {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9fa600030013c4de8165339db93d182b9431076eb98eb40ee068700c9c813e34"}, - {file = "PyYAML-6.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:277a0ef2981ca40581a47093e9e2d13b3f1fbbeffae064c1d21bfceba2030287"}, - {file = "PyYAML-6.0-cp38-cp38-win32.whl", hash = "sha256:d4eccecf9adf6fbcc6861a38015c2a64f38b9d94838ac1810a9023a0609e1b78"}, - {file = "PyYAML-6.0-cp38-cp38-win_amd64.whl", hash = "sha256:1e4747bc279b4f613a09eb64bba2ba602d8a6664c6ce6396a4d0cd413a50ce07"}, - {file = "PyYAML-6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:055d937d65826939cb044fc8c9b08889e8c743fdc6a32b33e2390f66013e449b"}, - {file = "PyYAML-6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e61ceaab6f49fb8bdfaa0f92c4b57bcfbea54c09277b1b4f7ac376bfb7a7c174"}, - {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d67d839ede4ed1b28a4e8909735fc992a923cdb84e618544973d7dfc71540803"}, - {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cba8c411ef271aa037d7357a2bc8f9ee8b58b9965831d9e51baf703280dc73d3"}, - {file = "PyYAML-6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:40527857252b61eacd1d9af500c3337ba8deb8fc298940291486c465c8b46ec0"}, - {file = "PyYAML-6.0-cp39-cp39-win32.whl", hash = "sha256:b5b9eccad747aabaaffbc6064800670f0c297e52c12754eb1d976c57e4f74dcb"}, - {file = "PyYAML-6.0-cp39-cp39-win_amd64.whl", hash = "sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c"}, - {file = "PyYAML-6.0.tar.gz", hash = "sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2"}, -] - -[[package]] -name = "pyzmq" -version = "24.0.1" -description = "Python bindings for 0MQ" -category = "dev" -optional = false -python-versions = ">=3.6" -files = [ - {file = "pyzmq-24.0.1-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:28b119ba97129d3001673a697b7cce47fe6de1f7255d104c2f01108a5179a066"}, - {file = "pyzmq-24.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bcbebd369493d68162cddb74a9c1fcebd139dfbb7ddb23d8f8e43e6c87bac3a6"}, - {file = "pyzmq-24.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ae61446166983c663cee42c852ed63899e43e484abf080089f771df4b9d272ef"}, - {file = "pyzmq-24.0.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:87f7ac99b15270db8d53f28c3c7b968612993a90a5cf359da354efe96f5372b4"}, - {file = "pyzmq-24.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9dca7c3956b03b7663fac4d150f5e6d4f6f38b2462c1e9afd83bcf7019f17913"}, - {file = "pyzmq-24.0.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:8c78bfe20d4c890cb5580a3b9290f700c570e167d4cdcc55feec07030297a5e3"}, - {file = "pyzmq-24.0.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:48f721f070726cd2a6e44f3c33f8ee4b24188e4b816e6dd8ba542c8c3bb5b246"}, - {file = "pyzmq-24.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:afe1f3bc486d0ce40abb0a0c9adb39aed3bbac36ebdc596487b0cceba55c21c1"}, - {file = "pyzmq-24.0.1-cp310-cp310-win32.whl", hash = "sha256:3e6192dbcefaaa52ed81be88525a54a445f4b4fe2fffcae7fe40ebb58bd06bfd"}, - {file = "pyzmq-24.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:86de64468cad9c6d269f32a6390e210ca5ada568c7a55de8e681ca3b897bb340"}, - {file = "pyzmq-24.0.1-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:838812c65ed5f7c2bd11f7b098d2e5d01685a3f6d1f82849423b570bae698c00"}, - {file = "pyzmq-24.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:dfb992dbcd88d8254471760879d48fb20836d91baa90f181c957122f9592b3dc"}, - {file = "pyzmq-24.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7abddb2bd5489d30ffeb4b93a428130886c171b4d355ccd226e83254fcb6b9ef"}, - {file = "pyzmq-24.0.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:94010bd61bc168c103a5b3b0f56ed3b616688192db7cd5b1d626e49f28ff51b3"}, - {file = "pyzmq-24.0.1-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:8242543c522d84d033fe79be04cb559b80d7eb98ad81b137ff7e0a9020f00ace"}, - {file = "pyzmq-24.0.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ccb94342d13e3bf3ffa6e62f95b5e3f0bc6bfa94558cb37f4b3d09d6feb536ff"}, - {file = "pyzmq-24.0.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6640f83df0ae4ae1104d4c62b77e9ef39be85ebe53f636388707d532bee2b7b8"}, - {file = "pyzmq-24.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a180dbd5ea5d47c2d3b716d5c19cc3fb162d1c8db93b21a1295d69585bfddac1"}, - {file = "pyzmq-24.0.1-cp311-cp311-win32.whl", hash = "sha256:624321120f7e60336be8ec74a172ae7fba5c3ed5bf787cc85f7e9986c9e0ebc2"}, - {file = "pyzmq-24.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:1724117bae69e091309ffb8255412c4651d3f6355560d9af312d547f6c5bc8b8"}, - {file = "pyzmq-24.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:15975747462ec49fdc863af906bab87c43b2491403ab37a6d88410635786b0f4"}, - {file = "pyzmq-24.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b947e264f0e77d30dcbccbb00f49f900b204b922eb0c3a9f0afd61aaa1cedc3d"}, - {file = "pyzmq-24.0.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0ec91f1bad66f3ee8c6deb65fa1fe418e8ad803efedd69c35f3b5502f43bd1dc"}, - {file = "pyzmq-24.0.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:db03704b3506455d86ec72c3358a779e9b1d07b61220dfb43702b7b668edcd0d"}, - {file = "pyzmq-24.0.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:e7e66b4e403c2836ac74f26c4b65d8ac0ca1eef41dfcac2d013b7482befaad83"}, - {file = "pyzmq-24.0.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:7a23ccc1083c260fa9685c93e3b170baba45aeed4b524deb3f426b0c40c11639"}, - {file = "pyzmq-24.0.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:fa0ae3275ef706c0309556061185dd0e4c4cd3b7d6f67ae617e4e677c7a41e2e"}, - {file = "pyzmq-24.0.1-cp36-cp36m-win32.whl", hash = "sha256:f01de4ec083daebf210531e2cca3bdb1608dbbbe00a9723e261d92087a1f6ebc"}, - {file = "pyzmq-24.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:de4217b9eb8b541cf2b7fde4401ce9d9a411cc0af85d410f9d6f4333f43640be"}, - {file = "pyzmq-24.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:78068e8678ca023594e4a0ab558905c1033b2d3e806a0ad9e3094e231e115a33"}, - {file = "pyzmq-24.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77c2713faf25a953c69cf0f723d1b7dd83827b0834e6c41e3fb3bbc6765914a1"}, - {file = "pyzmq-24.0.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8bb4af15f305056e95ca1bd086239b9ebc6ad55e9f49076d27d80027f72752f6"}, - {file = "pyzmq-24.0.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:0f14cffd32e9c4c73da66db97853a6aeceaac34acdc0fae9e5bbc9370281864c"}, - {file = "pyzmq-24.0.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:0108358dab8c6b27ff6b985c2af4b12665c1bc659648284153ee501000f5c107"}, - {file = "pyzmq-24.0.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:d66689e840e75221b0b290b0befa86f059fb35e1ee6443bce51516d4d61b6b99"}, - {file = "pyzmq-24.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ae08ac90aa8fa14caafc7a6251bd218bf6dac518b7bff09caaa5e781119ba3f2"}, - {file = "pyzmq-24.0.1-cp37-cp37m-win32.whl", hash = "sha256:8421aa8c9b45ea608c205db9e1c0c855c7e54d0e9c2c2f337ce024f6843cab3b"}, - {file = "pyzmq-24.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:54d8b9c5e288362ec8595c1d98666d36f2070fd0c2f76e2b3c60fbad9bd76227"}, - {file = "pyzmq-24.0.1-cp38-cp38-macosx_10_15_universal2.whl", hash = "sha256:acbd0a6d61cc954b9f535daaa9ec26b0a60a0d4353c5f7c1438ebc88a359a47e"}, - {file = "pyzmq-24.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:47b11a729d61a47df56346283a4a800fa379ae6a85870d5a2e1e4956c828eedc"}, - {file = "pyzmq-24.0.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:abe6eb10122f0d746a0d510c2039ae8edb27bc9af29f6d1b05a66cc2401353ff"}, - {file = "pyzmq-24.0.1-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:07bec1a1b22dacf718f2c0e71b49600bb6a31a88f06527dfd0b5aababe3fa3f7"}, - {file = "pyzmq-24.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f0d945a85b70da97ae86113faf9f1b9294efe66bd4a5d6f82f2676d567338b66"}, - {file = "pyzmq-24.0.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:1b7928bb7580736ffac5baf814097be342ba08d3cfdfb48e52773ec959572287"}, - {file = "pyzmq-24.0.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:b946da90dc2799bcafa682692c1d2139b2a96ec3c24fa9fc6f5b0da782675330"}, - {file = "pyzmq-24.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:c8840f064b1fb377cffd3efeaad2b190c14d4c8da02316dae07571252d20b31f"}, - {file = "pyzmq-24.0.1-cp38-cp38-win32.whl", hash = "sha256:4854f9edc5208f63f0841c0c667260ae8d6846cfa233c479e29fdc85d42ebd58"}, - {file = "pyzmq-24.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:42d4f97b9795a7aafa152a36fe2ad44549b83a743fd3e77011136def512e6c2a"}, - {file = "pyzmq-24.0.1-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:52afb0ac962963fff30cf1be775bc51ae083ef4c1e354266ab20e5382057dd62"}, - {file = "pyzmq-24.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8bad8210ad4df68c44ff3685cca3cda448ee46e20d13edcff8909eba6ec01ca4"}, - {file = "pyzmq-24.0.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:dabf1a05318d95b1537fd61d9330ef4313ea1216eea128a17615038859da3b3b"}, - {file = "pyzmq-24.0.1-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:5bd3d7dfd9cd058eb68d9a905dec854f86649f64d4ddf21f3ec289341386c44b"}, - {file = "pyzmq-24.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8012bce6836d3f20a6c9599f81dfa945f433dab4dbd0c4917a6fb1f998ab33d"}, - {file = "pyzmq-24.0.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:c31805d2c8ade9b11feca4674eee2b9cce1fec3e8ddb7bbdd961a09dc76a80ea"}, - {file = "pyzmq-24.0.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:3104f4b084ad5d9c0cb87445cc8cfd96bba710bef4a66c2674910127044df209"}, - {file = "pyzmq-24.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:df0841f94928f8af9c7a1f0aaaffba1fb74607af023a152f59379c01c53aee58"}, - {file = "pyzmq-24.0.1-cp39-cp39-win32.whl", hash = "sha256:a435ef8a3bd95c8a2d316d6e0ff70d0db524f6037411652803e118871d703333"}, - {file = "pyzmq-24.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:2032d9cb994ce3b4cba2b8dfae08c7e25bc14ba484c770d4d3be33c27de8c45b"}, - {file = "pyzmq-24.0.1-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:bb5635c851eef3a7a54becde6da99485eecf7d068bd885ac8e6d173c4ecd68b0"}, - {file = "pyzmq-24.0.1-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:83ea1a398f192957cb986d9206ce229efe0ee75e3c6635baff53ddf39bd718d5"}, - {file = "pyzmq-24.0.1-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:941fab0073f0a54dc33d1a0460cb04e0d85893cb0c5e1476c785000f8b359409"}, - {file = "pyzmq-24.0.1-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0e8f482c44ccb5884bf3f638f29bea0f8dc68c97e38b2061769c4cb697f6140d"}, - {file = "pyzmq-24.0.1-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:613010b5d17906c4367609e6f52e9a2595e35d5cc27d36ff3f1b6fa6e954d944"}, - {file = "pyzmq-24.0.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:65c94410b5a8355cfcf12fd600a313efee46ce96a09e911ea92cf2acf6708804"}, - {file = "pyzmq-24.0.1-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:20e7eeb1166087db636c06cae04a1ef59298627f56fb17da10528ab52a14c87f"}, - {file = "pyzmq-24.0.1-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:a2712aee7b3834ace51738c15d9ee152cc5a98dc7d57dd93300461b792ab7b43"}, - {file = "pyzmq-24.0.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a7c280185c4da99e0cc06c63bdf91f5b0b71deb70d8717f0ab870a43e376db8"}, - {file = "pyzmq-24.0.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:858375573c9225cc8e5b49bfac846a77b696b8d5e815711b8d4ba3141e6e8879"}, - {file = "pyzmq-24.0.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:80093b595921eed1a2cead546a683b9e2ae7f4a4592bb2ab22f70d30174f003a"}, - {file = "pyzmq-24.0.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f3f3154fde2b1ff3aa7b4f9326347ebc89c8ef425ca1db8f665175e6d3bd42f"}, - {file = "pyzmq-24.0.1-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abb756147314430bee5d10919b8493c0ccb109ddb7f5dfd2fcd7441266a25b75"}, - {file = "pyzmq-24.0.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:44e706bac34e9f50779cb8c39f10b53a4d15aebb97235643d3112ac20bd577b4"}, - {file = "pyzmq-24.0.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:687700f8371643916a1d2c61f3fdaa630407dd205c38afff936545d7b7466066"}, - {file = "pyzmq-24.0.1.tar.gz", hash = "sha256:216f5d7dbb67166759e59b0479bca82b8acf9bed6015b526b8eb10143fb08e77"}, -] - -[package.dependencies] -cffi = {version = "*", markers = "implementation_name == \"pypy\""} -py = {version = "*", markers = "implementation_name == \"pypy\""} - -[[package]] -name = "requests" -version = "2.28.1" -description = "Python HTTP for Humans." -category = "dev" -optional = false -python-versions = ">=3.7, <4" -files = [ - {file = "requests-2.28.1-py3-none-any.whl", hash = "sha256:8fefa2a1a1365bf5520aac41836fbee479da67864514bdb821f31ce07ce65349"}, - {file = "requests-2.28.1.tar.gz", hash = "sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983"}, -] - -[package.dependencies] -certifi = ">=2017.4.17" -charset-normalizer = ">=2,<3" -idna = ">=2.5,<4" -urllib3 = ">=1.21.1,<1.27" - -[package.extras] -socks = ["PySocks (>=1.5.6,!=1.5.7)"] -use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] - -[[package]] -name = "rfc3339-validator" -version = "0.1.4" -description = "A pure python RFC3339 validator" -category = "dev" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -files = [ - {file = "rfc3339_validator-0.1.4-py2.py3-none-any.whl", hash = "sha256:24f6ec1eda14ef823da9e36ec7113124b39c04d50a4d3d3a3c2859577e7791fa"}, - {file = "rfc3339_validator-0.1.4.tar.gz", hash = "sha256:138a2abdf93304ad60530167e51d2dfb9549521a836871b88d7f4695d0022f6b"}, -] - -[package.dependencies] -six = "*" - -[[package]] -name = "rfc3986-validator" -version = "0.1.1" -description = "Pure python rfc3986 validator" -category = "dev" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -files = [ - {file = "rfc3986_validator-0.1.1-py2.py3-none-any.whl", hash = "sha256:2f235c432ef459970b4306369336b9d5dbdda31b510ca1e327636e01f528bfa9"}, - {file = "rfc3986_validator-0.1.1.tar.gz", hash = "sha256:3d44bde7921b3b9ec3ae4e3adca370438eccebc676456449b145d533b240d055"}, -] - -[[package]] -name = "schematic-client" -version = "0.1.0" -description = "Schematic REST API client" -category = "main" -optional = false -python-versions = "3.9.2" -files = [] -develop = false - -[package.dependencies] -python-dateutil = "2.8.2" -urllib3 = "1.26.13" - -[package.source] -type = "directory" -url = "../../../libs/schematic/api-client-python" - -[[package]] -name = "send2trash" -version = "1.8.0" -description = "Send file to trash natively under Mac OS X, Windows and Linux." -category = "dev" -optional = false -python-versions = "*" -files = [ - {file = "Send2Trash-1.8.0-py3-none-any.whl", hash = "sha256:f20eaadfdb517eaca5ce077640cb261c7d2698385a6a0f072a4a5447fd49fa08"}, - {file = "Send2Trash-1.8.0.tar.gz", hash = "sha256:d2c24762fd3759860a0aff155e45871447ea58d2be6bdd39b5c8f966a0c99c2d"}, -] - -[package.extras] -nativelib = ["pyobjc-framework-Cocoa", "pywin32"] -objc = ["pyobjc-framework-Cocoa"] -win32 = ["pywin32"] - -[[package]] -name = "six" -version = "1.16.0" -description = "Python 2 and 3 compatibility utilities" -category = "main" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" -files = [ - {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, - {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, -] - -[[package]] -name = "sniffio" -version = "1.3.0" -description = "Sniff out which async library your code is running under" -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ - {file = "sniffio-1.3.0-py3-none-any.whl", hash = "sha256:eecefdce1e5bbfb7ad2eeaabf7c1eeb404d7757c379bd1f7e5cce9d8bf425384"}, - {file = "sniffio-1.3.0.tar.gz", hash = "sha256:e60305c5e5d314f5389259b7f22aaa33d8f7dee49763119234af3755c55b9101"}, -] - -[[package]] -name = "soupsieve" -version = "2.3.2.post1" -description = "A modern CSS selector implementation for Beautiful Soup." -category = "dev" -optional = false -python-versions = ">=3.6" -files = [ - {file = "soupsieve-2.3.2.post1-py3-none-any.whl", hash = "sha256:3b2503d3c7084a42b1ebd08116e5f81aadfaea95863628c80a3b774a11b7c759"}, - {file = "soupsieve-2.3.2.post1.tar.gz", hash = "sha256:fc53893b3da2c33de295667a0e19f078c14bf86544af307354de5fcf12a3f30d"}, -] - -[[package]] -name = "stack-data" -version = "0.6.2" -description = "Extract data from python stack frames and tracebacks for informative displays" -category = "dev" -optional = false -python-versions = "*" -files = [ - {file = "stack_data-0.6.2-py3-none-any.whl", hash = "sha256:cbb2a53eb64e5785878201a97ed7c7b94883f48b87bfb0bbe8b623c74679e4a8"}, - {file = "stack_data-0.6.2.tar.gz", hash = "sha256:32d2dd0376772d01b6cb9fc996f3c8b57a357089dec328ed4b6553d037eaf815"}, -] - -[package.dependencies] -asttokens = ">=2.1.0" -executing = ">=1.2.0" -pure-eval = "*" - -[package.extras] -tests = ["cython", "littleutils", "pygments", "pytest", "typeguard"] - -[[package]] -name = "terminado" -version = "0.17.1" -description = "Tornado websocket backend for the Xterm.js Javascript terminal emulator library." -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ - {file = "terminado-0.17.1-py3-none-any.whl", hash = "sha256:8650d44334eba354dd591129ca3124a6ba42c3d5b70df5051b6921d506fdaeae"}, - {file = "terminado-0.17.1.tar.gz", hash = "sha256:6ccbbcd3a4f8a25a5ec04991f39a0b8db52dfcd487ea0e578d977e6752380333"}, -] - -[package.dependencies] -ptyprocess = {version = "*", markers = "os_name != \"nt\""} -pywinpty = {version = ">=1.1.0", markers = "os_name == \"nt\""} -tornado = ">=6.1.0" - -[package.extras] -docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"] -test = ["pre-commit", "pytest (>=7.0)", "pytest-timeout"] - -[[package]] -name = "tinycss2" -version = "1.2.1" -description = "A tiny CSS parser" -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ - {file = "tinycss2-1.2.1-py3-none-any.whl", hash = "sha256:2b80a96d41e7c3914b8cda8bc7f705a4d9c49275616e886103dd839dfc847847"}, - {file = "tinycss2-1.2.1.tar.gz", hash = "sha256:8cff3a8f066c2ec677c06dbc7b45619804a6938478d9d73c284b29d14ecb0627"}, -] - -[package.dependencies] -webencodings = ">=0.4" - -[package.extras] -doc = ["sphinx", "sphinx_rtd_theme"] -test = ["flake8", "isort", "pytest"] - -[[package]] -name = "tomli" -version = "2.0.1" -description = "A lil' TOML parser" -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ - {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, - {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, -] - -[[package]] -name = "tornado" -version = "6.2" -description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed." -category = "dev" -optional = false -python-versions = ">= 3.7" -files = [ - {file = "tornado-6.2-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:20f638fd8cc85f3cbae3c732326e96addff0a15e22d80f049e00121651e82e72"}, - {file = "tornado-6.2-cp37-abi3-macosx_10_9_x86_64.whl", hash = "sha256:87dcafae3e884462f90c90ecc200defe5e580a7fbbb4365eda7c7c1eb809ebc9"}, - {file = "tornado-6.2-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba09ef14ca9893954244fd872798b4ccb2367c165946ce2dd7376aebdde8e3ac"}, - {file = "tornado-6.2-cp37-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b8150f721c101abdef99073bf66d3903e292d851bee51910839831caba341a75"}, - {file = "tornado-6.2-cp37-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d3a2f5999215a3a06a4fc218026cd84c61b8b2b40ac5296a6db1f1451ef04c1e"}, - {file = "tornado-6.2-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:5f8c52d219d4995388119af7ccaa0bcec289535747620116a58d830e7c25d8a8"}, - {file = "tornado-6.2-cp37-abi3-musllinux_1_1_i686.whl", hash = "sha256:6fdfabffd8dfcb6cf887428849d30cf19a3ea34c2c248461e1f7d718ad30b66b"}, - {file = "tornado-6.2-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:1d54d13ab8414ed44de07efecb97d4ef7c39f7438cf5e976ccd356bebb1b5fca"}, - {file = "tornado-6.2-cp37-abi3-win32.whl", hash = "sha256:5c87076709343557ef8032934ce5f637dbb552efa7b21d08e89ae7619ed0eb23"}, - {file = "tornado-6.2-cp37-abi3-win_amd64.whl", hash = "sha256:e5f923aa6a47e133d1cf87d60700889d7eae68988704e20c75fb2d65677a8e4b"}, - {file = "tornado-6.2.tar.gz", hash = "sha256:9b630419bde84ec666bfd7ea0a4cb2a8a651c2d5cccdbdd1972a0c859dfc3c13"}, -] - -[[package]] -name = "traitlets" -version = "5.8.0" -description = "Traitlets Python configuration system" -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ - {file = "traitlets-5.8.0-py3-none-any.whl", hash = "sha256:c864831efa0ba6576d09b44884b34e41defc18c0d7e720b4a2d6698c842cab3e"}, - {file = "traitlets-5.8.0.tar.gz", hash = "sha256:6cc57d6dc28c85d5365961726ffd19b538739347749e13ebe34e03323a0e8f84"}, -] - -[package.extras] -docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"] -test = ["argcomplete (>=2.0)", "pre-commit", "pytest", "pytest-mock"] - -[[package]] -name = "uri-template" -version = "1.2.0" -description = "RFC 6570 URI Template Processor" -category = "dev" -optional = false -python-versions = ">=3.6" -files = [ - {file = "uri_template-1.2.0-py3-none-any.whl", hash = "sha256:f1699c77b73b925cf4937eae31ab282a86dc885c333f2e942513f08f691fc7db"}, - {file = "uri_template-1.2.0.tar.gz", hash = "sha256:934e4d09d108b70eb8a24410af8615294d09d279ce0e7cbcdaef1bd21f932b06"}, -] - -[package.extras] -dev = ["flake8 (<4.0.0)", "flake8-annotations", "flake8-bugbear", "flake8-commas", "flake8-comprehensions", "flake8-continuation", "flake8-datetimez", "flake8-docstrings", "flake8-import-order", "flake8-literal", "flake8-noqa", "flake8-requirements", "flake8-type-annotations", "flake8-use-fstring", "mypy", "pep8-naming"] - -[[package]] -name = "urllib3" -version = "1.26.13" -description = "HTTP library with thread-safe connection pooling, file post, and more." -category = "main" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" -files = [ - {file = "urllib3-1.26.13-py2.py3-none-any.whl", hash = "sha256:47cc05d99aaa09c9e72ed5809b60e7ba354e64b59c9c173ac3018642d8bb41fc"}, - {file = "urllib3-1.26.13.tar.gz", hash = "sha256:c083dd0dce68dbfbe1129d5271cb90f9447dea7d52097c6e0126120c521ddea8"}, -] - -[package.extras] -brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] -secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] -socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] - -[[package]] -name = "wcwidth" -version = "0.2.5" -description = "Measures the displayed width of unicode strings in a terminal" -category = "dev" -optional = false -python-versions = "*" -files = [ - {file = "wcwidth-0.2.5-py2.py3-none-any.whl", hash = "sha256:beb4802a9cebb9144e99086eff703a642a13d6a0052920003a230f3294bbe784"}, - {file = "wcwidth-0.2.5.tar.gz", hash = "sha256:c4d647b99872929fdb7bdcaa4fbe7f01413ed3d98077df798530e5b04f116c83"}, -] - -[[package]] -name = "webcolors" -version = "1.12" -description = "A library for working with color names and color values formats defined by HTML and CSS." -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ - {file = "webcolors-1.12-py3-none-any.whl", hash = "sha256:d98743d81d498a2d3eaf165196e65481f0d2ea85281463d856b1e51b09f62dce"}, - {file = "webcolors-1.12.tar.gz", hash = "sha256:16d043d3a08fd6a1b1b7e3e9e62640d09790dce80d2bdd4792a175b35fe794a9"}, -] - -[[package]] -name = "webencodings" -version = "0.5.1" -description = "Character encoding aliases for legacy web content" -category = "dev" -optional = false -python-versions = "*" -files = [ - {file = "webencodings-0.5.1-py2.py3-none-any.whl", hash = "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78"}, - {file = "webencodings-0.5.1.tar.gz", hash = "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923"}, -] - -[[package]] -name = "websocket-client" -version = "1.4.2" -description = "WebSocket client for Python with low level API options" -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ - {file = "websocket-client-1.4.2.tar.gz", hash = "sha256:d6e8f90ca8e2dd4e8027c4561adeb9456b54044312dba655e7cae652ceb9ae59"}, - {file = "websocket_client-1.4.2-py3-none-any.whl", hash = "sha256:d6b06432f184438d99ac1f456eaf22fe1ade524c3dd16e661142dc54e9cba574"}, -] - -[package.extras] -docs = ["Sphinx (>=3.4)", "sphinx-rtd-theme (>=0.5)"] -optional = ["python-socks", "wsaccel"] -test = ["websockets"] - -[[package]] -name = "zipp" -version = "3.11.0" -description = "Backport of pathlib-compatible object wrapper for zip files" -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ - {file = "zipp-3.11.0-py3-none-any.whl", hash = "sha256:83a28fcb75844b5c0cdaf5aa4003c2d728c77e05f5aeabe8e95e56727005fbaa"}, - {file = "zipp-3.11.0.tar.gz", hash = "sha256:a7a22e05929290a67401440b39690ae6563279bced5f314609d9d03798f56766"}, -] - -[package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)"] -testing = ["flake8 (<5)", "func-timeout", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] - -[metadata] -lock-version = "2.0" -python-versions = "3.9.2" -content-hash = "5858f6427daa0934105273af841cefcd84627ec3b47e423e3e1dd9e154a7c5d3" diff --git a/apps/schematic/notebook/prepare-python.sh b/apps/schematic/notebook/prepare-python.sh deleted file mode 100755 index 3c3a68a53..000000000 --- a/apps/schematic/notebook/prepare-python.sh +++ /dev/null @@ -1,8 +0,0 @@ -#!/usr/bin/env bash - -pyenv install --skip-existing 3.9.2 -pyenv local 3.9.2 -poetry env use 3.9.2 -poetry run pip install "cython<3.0.0" -poetry run pip install --no-build-isolation pyyaml==5.4.1 -poetry install \ No newline at end of file diff --git a/apps/schematic/notebook/project.json b/apps/schematic/notebook/project.json deleted file mode 100644 index e55231070..000000000 --- a/apps/schematic/notebook/project.json +++ /dev/null @@ -1,32 +0,0 @@ -{ - "name": "schematic-notebook", - "$schema": "../../../node_modules/nx/schemas/project-schema.json", - "sourceRoot": "apps/schematic/notebook/src", - "projectType": "application", - "targets": { - "create-config": { - "executor": "nx:run-commands", - "options": { - "command": "cp -n .env.example .env", - "cwd": "{projectRoot}" - } - }, - "prepare": { - "executor": "nx:run-commands", - "options": { - "command": "./prepare-python.sh", - "cwd": "{projectRoot}" - } - }, - "serve": { - "executor": "nx:run-commands", - "options": { - "commands": ["poetry run jupyter lab --config=jupyter_lab_config.py"], - "cwd": "apps/schematic/notebook" - }, - "dependsOn": [] - } - }, - "tags": ["type:app", "scope:client"], - "implicitDependencies": ["schematic-api-client-python"] -} diff --git a/apps/schematic/notebook/pyproject.toml b/apps/schematic/notebook/pyproject.toml deleted file mode 100644 index 54af30751..000000000 --- a/apps/schematic/notebook/pyproject.toml +++ /dev/null @@ -1,19 +0,0 @@ -[tool.poetry] -name = "schematic-notebook" -version = "0.1.0" -description = "Schematic notebooks" -authors = ["Thomas Schaffter "] -readme = "README.md" -packages = [] - -[tool.poetry.dependencies] -python = "3.9.2" -schematic-client = {path = "../../../libs/schematic/api-client-python"} - - -[tool.poetry.group.dev.dependencies] -jupyterlab = "3.5.2" - -[build-system] -requires = ["poetry-core"] -build-backend = "poetry.core.masonry.api" diff --git a/dev-env.sh b/dev-env.sh index b1d0a7db2..680099167 100644 --- a/dev-env.sh +++ b/dev-env.sh @@ -105,10 +105,6 @@ function openchallenges-build-images { nx run-many --target=build-image --projects=openchallenges-* --parallel=3 } -function schematic-build-images { - nx run-many --target=build-image --projects=schematic-* --parallel=3 -} - function iatlas-build-images { nx run-many --target=build-image --projects=iatlas-* --parallel=3 } diff --git a/docker/schematic/networks.yml b/docker/schematic/networks.yml deleted file mode 100644 index 82aa8e571..000000000 --- a/docker/schematic/networks.yml +++ /dev/null @@ -1,4 +0,0 @@ -networks: - schematic: - name: schematic - driver: bridge diff --git a/docker/schematic/serve-detach.sh b/docker/schematic/serve-detach.sh deleted file mode 100755 index af38acb02..000000000 --- a/docker/schematic/serve-detach.sh +++ /dev/null @@ -1,14 +0,0 @@ -#!/usr/bin/env bash - -args=( - # List of services in alphanumeric order - --file docker/schematic/services/api-docs.yml - --file docker/schematic/services/api.yml - - --file docker/schematic/networks.yml - --file docker/schematic/volumes.yml - - up $1 --detach --remove-orphans -) - -docker compose "${args[@]}" diff --git a/docker/schematic/services/api-docs.yml b/docker/schematic/services/api-docs.yml deleted file mode 100644 index 3b88faa4b..000000000 --- a/docker/schematic/services/api-docs.yml +++ /dev/null @@ -1,15 +0,0 @@ -services: - schematic-api-docs: - image: ghcr.io/sage-bionetworks/schematic-api-docs:local - container_name: schematic-api-docs - restart: always - env_file: - - ../../../apps/schematic/api-docs/.env - networks: - - schematic - ports: - - '7010:7010' - deploy: - resources: - limits: - memory: 200M diff --git a/docker/schematic/services/api.yml b/docker/schematic/services/api.yml deleted file mode 100644 index c2be6ca53..000000000 --- a/docker/schematic/services/api.yml +++ /dev/null @@ -1,18 +0,0 @@ -services: - schematic-api: - image: ghcr.io/sage-bionetworks/schematic-api:${SCHEMATIC_VERSION:-local} - container_name: schematic-api - restart: always - env_file: - - ../../../apps/schematic/api/.env - networks: - - schematic - ports: - - '7443:7443' - # depends_on: - # openchallenges-config-server: - # condition: service_healthy - deploy: - resources: - limits: - memory: 1G diff --git a/docker/schematic/volumes.yml b/docker/schematic/volumes.yml deleted file mode 100644 index e69de29bb..000000000 diff --git a/docs/_archive/index.md b/docs/_archive/index.md index b7726a0b4..fd0343844 100644 --- a/docs/_archive/index.md +++ b/docs/_archive/index.md @@ -43,7 +43,6 @@ Then open your fork repo inside our dev container using these instructions: ## Projects - OpenChallenges -- Schematic (evaluation) - Synapse (evaluation) --- diff --git a/docs/index.md b/docs/index.md index 8dfdc95c1..45a0ff15e 100644 --- a/docs/index.md +++ b/docs/index.md @@ -26,9 +26,8 @@ increased morale, productivity, retention, and knowledge transfer. ## Current Projects -| Name | Description | -| ---------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| [Agora](https://agora.adknowledgeportal.org/genes) | Agora hosts high-dimensional human transcriptomic, proteomic, and metabolomic evidence for whether or not genes are associated with Alzheimer’s disease (AD). | -| [iAtlas](https://isb-cgc.shinyapps.io/iatlas/) | The iAtlas portal serves as an interactive tool for exploring and analyzing immuno-oncology data. | -| [OpenChallenges](https://openchallenges.io/home) | OpenChallenges (OC) aggregates biomedical challenges to accelerate citizen science and data benchmarking | -| [Schematic](https://github.com/Sage-Bionetworks/schematic) | SCHEMATIC (Schema Engine for Manifest Ingress and Curation) is a novel schema-based, metadata ingress ecosystem, meant to streamline the process of biomedical dataset annotation, metadata validation and submission to a data repository for various data contributors. | +| Name | Description | +| -------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| [Agora](https://agora.adknowledgeportal.org/genes) | Agora hosts high-dimensional human transcriptomic, proteomic, and metabolomic evidence for whether or not genes are associated with Alzheimer’s disease (AD). | +| [iAtlas](https://isb-cgc.shinyapps.io/iatlas/) | The iAtlas portal serves as an interactive tool for exploring and analyzing immuno-oncology data. | +| [OpenChallenges](https://openchallenges.io/home) | OpenChallenges (OC) aggregates biomedical challenges to accelerate citizen science and data benchmarking | diff --git a/libs/schematic/api-client-python/.gitignore b/libs/schematic/api-client-python/.gitignore deleted file mode 100644 index 43995bd42..000000000 --- a/libs/schematic/api-client-python/.gitignore +++ /dev/null @@ -1,66 +0,0 @@ -# Byte-compiled / optimized / DLL files -__pycache__/ -*.py[cod] -*$py.class - -# C extensions -*.so - -# Distribution / packaging -.Python -env/ -build/ -develop-eggs/ -dist/ -downloads/ -eggs/ -.eggs/ -lib/ -lib64/ -parts/ -sdist/ -var/ -*.egg-info/ -.installed.cfg -*.egg - -# PyInstaller -# Usually these files are written by a python script from a template -# before PyInstaller builds the exe, so as to inject date/other infos into it. -*.manifest -*.spec - -# Installer logs -pip-log.txt -pip-delete-this-directory.txt - -# Unit test / coverage reports -htmlcov/ -.tox/ -.coverage -.coverage.* -.cache -nosetests.xml -coverage.xml -*,cover -.hypothesis/ -venv/ -.venv/ -.python-version -.pytest_cache - -# Translations -*.mo -*.pot - -# Django stuff: -*.log - -# Sphinx documentation -docs/_build/ - -# PyBuilder -target/ - -#Ipython Notebook -.ipynb_checkpoints diff --git a/libs/schematic/api-client-python/.openapi-generator-ignore b/libs/schematic/api-client-python/.openapi-generator-ignore deleted file mode 100644 index b1733f9c7..000000000 --- a/libs/schematic/api-client-python/.openapi-generator-ignore +++ /dev/null @@ -1,29 +0,0 @@ -# OpenAPI Generator Ignore -# Generated by openapi-generator https://github.com/openapitools/openapi-generator - -# Use this file to prevent files from being overwritten by the generator. -# The patterns follow closely to .gitignore or .dockerignore. - -# As an example, the C# client generator defines ApiClient.cs. -# You can make changes and tell OpenAPI Generator to ignore just this file by uncommenting the following line: -#ApiClient.cs - -# You can match any string of characters against a directory, file or extension with a single asterisk (*): -#foo/*/qux -# The above matches foo/bar/qux and foo/baz/qux, but not foo/bar/baz/qux - -# You can recursively match patterns against a directory, file or extension with a double asterisk (**): -#foo/**/qux -# This matches foo/bar/qux, foo/baz/qux, and foo/bar/baz/qux - -# You can also negate patterns with an exclamation (!). -# For example, you can ignore all files in a docs folder with the file extension .md: -#docs/*.md -# Then explicitly reverse the ignore rule for a single file: -#!docs/README.md -.gitignore -.gitlab-ci.yml -.travis.yml -git_push.sh -README.md -setup.cfg \ No newline at end of file diff --git a/libs/schematic/api-client-python/.openapi-generator/FILES b/libs/schematic/api-client-python/.openapi-generator/FILES deleted file mode 100644 index a6d95e850..000000000 --- a/libs/schematic/api-client-python/.openapi-generator/FILES +++ /dev/null @@ -1,27 +0,0 @@ -docs/BasicError.md -docs/Dataset.md -docs/DatasetsPage.md -docs/DatasetsPageAllOf.md -docs/PageMetadata.md -docs/StorageApi.md -requirements.txt -schematic_client/__init__.py -schematic_client/api/__init__.py -schematic_client/api/storage_api.py -schematic_client/api_client.py -schematic_client/apis/__init__.py -schematic_client/configuration.py -schematic_client/exceptions.py -schematic_client/model/__init__.py -schematic_client/model/basic_error.py -schematic_client/model/dataset.py -schematic_client/model/datasets_page.py -schematic_client/model/datasets_page_all_of.py -schematic_client/model/page_metadata.py -schematic_client/model_utils.py -schematic_client/models/__init__.py -schematic_client/rest.py -setup.py -test-requirements.txt -test/__init__.py -tox.ini diff --git a/libs/schematic/api-client-python/.openapi-generator/VERSION b/libs/schematic/api-client-python/.openapi-generator/VERSION deleted file mode 100644 index 358e78e60..000000000 --- a/libs/schematic/api-client-python/.openapi-generator/VERSION +++ /dev/null @@ -1 +0,0 @@ -6.1.0 \ No newline at end of file diff --git a/libs/schematic/api-client-python/README.md b/libs/schematic/api-client-python/README.md deleted file mode 100644 index 4d8e5779c..000000000 --- a/libs/schematic/api-client-python/README.md +++ /dev/null @@ -1,126 +0,0 @@ -# openapi-client - -No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) - -This Python package is automatically generated by the [OpenAPI Generator](https://openapi-generator.tech) project: - -- API version: 0.1.0 -- Package version: 1.0.0 -- Build package: org.openapitools.codegen.languages.PythonClientCodegen - For more information, please visit [https://github.com/Sage-Bionetworks/sage-monorepo](https://github.com/Sage-Bionetworks/sage-monorepo) - -## Requirements. - -Python >=3.6 - -## Installation & Usage - -### pip install - -If the python package is hosted on a repository, you can install directly using: - -```sh -pip install git+https://github.com/GIT_USER_ID/GIT_REPO_ID.git -``` - -(you may need to run `pip` with root permission: `sudo pip install git+https://github.com/GIT_USER_ID/GIT_REPO_ID.git`) - -Then import the package: - -```python -import openapi_client -``` - -### Setuptools - -Install via [Setuptools](http://pypi.python.org/pypi/setuptools). - -```sh -python setup.py install --user -``` - -(or `sudo python setup.py install` to install the package for all users) - -Then import the package: - -```python -import openapi_client -``` - -## Getting Started - -Please follow the [installation procedure](#installation--usage) and then run the following: - -```python - -import time -import openapi_client -from pprint import pprint -from openapi_client.api import storage_api -from openapi_client.model.basic_error import BasicError -from openapi_client.model.datasets_page import DatasetsPage -# Defining the host is optional and defaults to http://localhost/api/v1 -# See configuration.py for a list of all supported configuration parameters. -configuration = openapi_client.Configuration( - host = "http://localhost/api/v1" -) - - - -# Enter a context with an instance of the API client -with openapi_client.ApiClient(configuration) as api_client: - # Create an instance of the API class - api_instance = storage_api.StorageApi(api_client) - project_id = "syn26251192" # str | The Synapse ID of a storage project. - - try: - # Gets all datasets in folder under a given storage project that the current user has access to. - api_response = api_instance.list_storage_project_datasets(project_id) - pprint(api_response) - except openapi_client.ApiException as e: - print("Exception when calling StorageApi->list_storage_project_datasets: %s\n" % e) -``` - -## Documentation for API Endpoints - -All URIs are relative to _http://localhost/api/v1_ - -| Class | Method | HTTP request | Description | -| ------------ | ------------------------------------------------------------------------------------- | ----------------------------------------------- | ---------------------------------------------------------------------------------------------- | -| _StorageApi_ | [**list_storage_project_datasets**](docs/StorageApi.md#list_storage_project_datasets) | **GET** /storages/projects/{projectId}/datasets | Gets all datasets in folder under a given storage project that the current user has access to. | - -## Documentation For Models - -- [BasicError](docs/BasicError.md) -- [Dataset](docs/Dataset.md) -- [DatasetsPage](docs/DatasetsPage.md) -- [DatasetsPageAllOf](docs/DatasetsPageAllOf.md) -- [PageMetadata](docs/PageMetadata.md) - -## Documentation For Authorization - -All endpoints do not require authorization. - -## Author - -## Notes for Large OpenAPI documents - -If the OpenAPI document is large, imports in openapi_client.apis and openapi_client.models may fail with a -RecursionError indicating the maximum recursion limit has been exceeded. In that case, there are a couple of solutions: - -Solution 1: -Use specific imports for apis and models like: - -- `from openapi_client.api.default_api import DefaultApi` -- `from openapi_client.model.pet import Pet` - -Solution 2: -Before importing the package, adjust the maximum recursion limit as shown below: - -``` -import sys -sys.setrecursionlimit(1500) -import openapi_client -from openapi_client.apis import * -from openapi_client.models import * -``` diff --git a/libs/schematic/api-client-python/docs/BasicError.md b/libs/schematic/api-client-python/docs/BasicError.md deleted file mode 100644 index 00375b117..000000000 --- a/libs/schematic/api-client-python/docs/BasicError.md +++ /dev/null @@ -1,15 +0,0 @@ -# BasicError - -Problem details (tools.ietf.org/html/rfc7807) - -## Properties - -| Name | Type | Description | Notes | -| ------------------- | ---------------------------------------------------------------- | ----------------------------------------------------------------------- | ---------- | -| **title** | **str** | A human readable documentation for the problem type | -| **status** | **int** | The HTTP status code | -| **detail** | **str** | A human readable explanation specific to this occurrence of the problem | [optional] | -| **type** | **str** | An absolute URI that identifies the problem type | [optional] | -| **any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] | - -[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/libs/schematic/api-client-python/docs/Dataset.md b/libs/schematic/api-client-python/docs/Dataset.md deleted file mode 100644 index bea11ddb1..000000000 --- a/libs/schematic/api-client-python/docs/Dataset.md +++ /dev/null @@ -1,12 +0,0 @@ -# Dataset - -A dataset. - -## Properties - -| Name | Type | Description | Notes | -| ------------------- | ---------------------------------------------------------------- | ------------------------------------------------------------------ | ---------- | -| **name** | **str** | The name of the dataset. | -| **any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] | - -[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/libs/schematic/api-client-python/docs/DatasetsPage.md b/libs/schematic/api-client-python/docs/DatasetsPage.md deleted file mode 100644 index c44109263..000000000 --- a/libs/schematic/api-client-python/docs/DatasetsPage.md +++ /dev/null @@ -1,18 +0,0 @@ -# DatasetsPage - -A page of datasets. - -## Properties - -| Name | Type | Description | Notes | -| ------------------- | ---------------------------------------------------------------- | ------------------------------------------------------------------ | ---------- | -| **number** | **int** | The page number. | -| **size** | **int** | The number of items in a single page. | -| **total_elements** | **int** | Total number of elements in the result set. | -| **total_pages** | **int** | Total number of pages in the result set. | -| **has_next** | **bool** | Returns if there is a next page. | -| **has_previous** | **bool** | Returns if there is a previous page. | -| **datasets** | [**[Dataset]**](Dataset.md) | A list of datasets. | -| **any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] | - -[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/libs/schematic/api-client-python/docs/DatasetsPageAllOf.md b/libs/schematic/api-client-python/docs/DatasetsPageAllOf.md deleted file mode 100644 index 35a9617fd..000000000 --- a/libs/schematic/api-client-python/docs/DatasetsPageAllOf.md +++ /dev/null @@ -1,10 +0,0 @@ -# DatasetsPageAllOf - -## Properties - -| Name | Type | Description | Notes | -| ------------------- | ---------------------------------------------------------------- | ------------------------------------------------------------------ | ---------- | -| **datasets** | [**[Dataset]**](Dataset.md) | A list of datasets. | -| **any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] | - -[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/libs/schematic/api-client-python/docs/PageMetadata.md b/libs/schematic/api-client-python/docs/PageMetadata.md deleted file mode 100644 index f4ae292b7..000000000 --- a/libs/schematic/api-client-python/docs/PageMetadata.md +++ /dev/null @@ -1,17 +0,0 @@ -# PageMetadata - -The metadata of a page. - -## Properties - -| Name | Type | Description | Notes | -| ------------------- | ---------------------------------------------------------------- | ------------------------------------------------------------------ | ---------- | -| **number** | **int** | The page number. | -| **size** | **int** | The number of items in a single page. | -| **total_elements** | **int** | Total number of elements in the result set. | -| **total_pages** | **int** | Total number of pages in the result set. | -| **has_next** | **bool** | Returns if there is a next page. | -| **has_previous** | **bool** | Returns if there is a previous page. | -| **any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] | - -[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/libs/schematic/api-client-python/docs/StorageApi.md b/libs/schematic/api-client-python/docs/StorageApi.md deleted file mode 100644 index f4c393b0b..000000000 --- a/libs/schematic/api-client-python/docs/StorageApi.md +++ /dev/null @@ -1,75 +0,0 @@ -# schematic_client.StorageApi - -All URIs are relative to _http://localhost/api/v1_ - -| Method | HTTP request | Description | -| -------------------------------------------------------------------------------- | ----------------------------------------------- | ---------------------------------------------------------------------------------------------- | -| [**list_storage_project_datasets**](StorageApi.md#list_storage_project_datasets) | **GET** /storages/projects/{projectId}/datasets | Gets all datasets in folder under a given storage project that the current user has access to. | - -# **list_storage_project_datasets** - -> DatasetsPage list_storage_project_datasets(project_id) - -Gets all datasets in folder under a given storage project that the current user has access to. - -Gets all datasets in folder under a given storage project that the current user has access to. - -### Example - -```python -import time -import schematic_client -from schematic_client.api import storage_api -from schematic_client.model.basic_error import BasicError -from schematic_client.model.datasets_page import DatasetsPage -from pprint import pprint -# Defining the host is optional and defaults to http://localhost/api/v1 -# See configuration.py for a list of all supported configuration parameters. -configuration = schematic_client.Configuration( - host = "http://localhost/api/v1" -) - - -# Enter a context with an instance of the API client -with schematic_client.ApiClient() as api_client: - # Create an instance of the API class - api_instance = storage_api.StorageApi(api_client) - project_id = "syn26251192" # str | The Synapse ID of a storage project. - - # example passing only required values which don't have defaults set - try: - # Gets all datasets in folder under a given storage project that the current user has access to. - api_response = api_instance.list_storage_project_datasets(project_id) - pprint(api_response) - except schematic_client.ApiException as e: - print("Exception when calling StorageApi->list_storage_project_datasets: %s\n" % e) -``` - -### Parameters - -| Name | Type | Description | Notes | -| -------------- | ------- | ------------------------------------ | ----- | -| **project_id** | **str** | The Synapse ID of a storage project. | - -### Return type - -[**DatasetsPage**](DatasetsPage.md) - -### Authorization - -No authorization required - -### HTTP request headers - -- **Content-Type**: Not defined -- **Accept**: application/json, application/problem+json - -### HTTP response details - -| Status code | Description | Response headers | -| ----------- | ----------------------------------------------------------------- | ---------------- | -| **200** | Success | - | -| **404** | The specified resource was not found | - | -| **500** | The request cannot be fulfilled due to an unexpected server error | - | - -[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) diff --git a/libs/schematic/api-client-python/openapitools.json b/libs/schematic/api-client-python/openapitools.json deleted file mode 100644 index d0d41860f..000000000 --- a/libs/schematic/api-client-python/openapitools.json +++ /dev/null @@ -1,17 +0,0 @@ -{ - "$schema": "../../../node_modules/@openapitools/openapi-generator-cli/config.schema.json", - "spaces": 2, - "generator-cli": { - "version": "6.1.0", - "generators": { - "api-client-python": { - "generatorName": "python", - "inputSpec": "#{cwd}/../api-description/build/openapi.yaml", - "output": "#{cwd}/", - "additionalProperties": { - "packageName": "schematic_client" - } - } - } - } -} diff --git a/libs/schematic/api-client-python/poetry.lock b/libs/schematic/api-client-python/poetry.lock deleted file mode 100644 index 46fb5c44e..000000000 --- a/libs/schematic/api-client-python/poetry.lock +++ /dev/null @@ -1,258 +0,0 @@ -# This file is automatically @generated by Poetry and should not be changed by hand. - -[[package]] -name = "attrs" -version = "22.2.0" -description = "Classes Without Boilerplate" -category = "dev" -optional = false -python-versions = ">=3.6" -files = [ - {file = "attrs-22.2.0-py3-none-any.whl", hash = "sha256:29e95c7f6778868dbd49170f98f8818f78f3dc5e0e37c0b1f474e3561b240836"}, - {file = "attrs-22.2.0.tar.gz", hash = "sha256:c9227bfc2f01993c03f68db37d1d15c9690188323c067c641f1a35ca58185f99"}, -] - -[package.extras] -cov = ["attrs[tests]", "coverage-enable-subprocess", "coverage[toml] (>=5.3)"] -dev = ["attrs[docs,tests]"] -docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope.interface"] -tests = ["attrs[tests-no-zope]", "zope.interface"] -tests-no-zope = ["cloudpickle", "cloudpickle", "hypothesis", "hypothesis", "mypy (>=0.971,<0.990)", "mypy (>=0.971,<0.990)", "pympler", "pympler", "pytest (>=4.3.0)", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-mypy-plugins", "pytest-xdist[psutil]", "pytest-xdist[psutil]"] - -[[package]] -name = "colorama" -version = "0.4.6" -description = "Cross-platform colored terminal text." -category = "dev" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" -files = [ - {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, - {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, -] - -[[package]] -name = "coverage" -version = "7.0.1" -description = "Code coverage measurement for Python" -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ - {file = "coverage-7.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b3695c4f4750bca943b3e1f74ad4be8d29e4aeab927d50772c41359107bd5d5c"}, - {file = "coverage-7.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fa6a5a224b7f4cfb226f4fc55a57e8537fcc096f42219128c2c74c0e7d0953e1"}, - {file = "coverage-7.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:74f70cd92669394eaf8d7756d1b195c8032cf7bbbdfce3bc489d4e15b3b8cf73"}, - {file = "coverage-7.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b66bb21a23680dee0be66557dc6b02a3152ddb55edf9f6723fa4a93368f7158d"}, - {file = "coverage-7.0.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d87717959d4d0ee9db08a0f1d80d21eb585aafe30f9b0a54ecf779a69cb015f6"}, - {file = "coverage-7.0.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:854f22fa361d1ff914c7efa347398374cc7d567bdafa48ac3aa22334650dfba2"}, - {file = "coverage-7.0.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:1e414dc32ee5c3f36544ea466b6f52f28a7af788653744b8570d0bf12ff34bc0"}, - {file = "coverage-7.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6c5ad996c6fa4d8ed669cfa1e8551348729d008a2caf81489ab9ea67cfbc7498"}, - {file = "coverage-7.0.1-cp310-cp310-win32.whl", hash = "sha256:691571f31ace1837838b7e421d3a09a8c00b4aac32efacb4fc9bd0a5c647d25a"}, - {file = "coverage-7.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:89caf4425fe88889e2973a8e9a3f6f5f9bbe5dd411d7d521e86428c08a873a4a"}, - {file = "coverage-7.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:63d56165a7c76265468d7e0c5548215a5ba515fc2cba5232d17df97bffa10f6c"}, - {file = "coverage-7.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4f943a3b2bc520102dd3e0bb465e1286e12c9a54f58accd71b9e65324d9c7c01"}, - {file = "coverage-7.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:830525361249dc4cd013652b0efad645a385707a5ae49350c894b67d23fbb07c"}, - {file = "coverage-7.0.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fd1b9c5adc066db699ccf7fa839189a649afcdd9e02cb5dc9d24e67e7922737d"}, - {file = "coverage-7.0.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e00c14720b8b3b6c23b487e70bd406abafc976ddc50490f645166f111c419c39"}, - {file = "coverage-7.0.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6d55d840e1b8c0002fce66443e124e8581f30f9ead2e54fbf6709fb593181f2c"}, - {file = "coverage-7.0.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:66b18c3cf8bbab0cce0d7b9e4262dc830e93588986865a8c78ab2ae324b3ed56"}, - {file = "coverage-7.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:12a5aa77783d49e05439fbe6e6b427484f8a0f9f456b46a51d8aac022cfd024d"}, - {file = "coverage-7.0.1-cp311-cp311-win32.whl", hash = "sha256:b77015d1cb8fe941be1222a5a8b4e3fbca88180cfa7e2d4a4e58aeabadef0ab7"}, - {file = "coverage-7.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:fb992c47cb1e5bd6a01e97182400bcc2ba2077080a17fcd7be23aaa6e572e390"}, - {file = "coverage-7.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e78e9dcbf4f3853d3ae18a8f9272111242531535ec9e1009fa8ec4a2b74557dc"}, - {file = "coverage-7.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e60bef2e2416f15fdc05772bf87db06c6a6f9870d1db08fdd019fbec98ae24a9"}, - {file = "coverage-7.0.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9823e4789ab70f3ec88724bba1a203f2856331986cd893dedbe3e23a6cfc1e4e"}, - {file = "coverage-7.0.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9158f8fb06747ac17bd237930c4372336edc85b6e13bdc778e60f9d685c3ca37"}, - {file = "coverage-7.0.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:486ee81fa694b4b796fc5617e376326a088f7b9729c74d9defa211813f3861e4"}, - {file = "coverage-7.0.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:1285648428a6101b5f41a18991c84f1c3959cee359e51b8375c5882fc364a13f"}, - {file = "coverage-7.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:2c44fcfb3781b41409d0f060a4ed748537557de9362a8a9282182fafb7a76ab4"}, - {file = "coverage-7.0.1-cp37-cp37m-win32.whl", hash = "sha256:d6814854c02cbcd9c873c0f3286a02e3ac1250625cca822ca6bc1018c5b19f1c"}, - {file = "coverage-7.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:f66460f17c9319ea4f91c165d46840314f0a7c004720b20be58594d162a441d8"}, - {file = "coverage-7.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9b373c9345c584bb4b5f5b8840df7f4ab48c4cbb7934b58d52c57020d911b856"}, - {file = "coverage-7.0.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d3022c3007d3267a880b5adcf18c2a9bf1fc64469b394a804886b401959b8742"}, - {file = "coverage-7.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:92651580bd46519067e36493acb394ea0607b55b45bd81dd4e26379ed1871f55"}, - {file = "coverage-7.0.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3cfc595d2af13856505631be072835c59f1acf30028d1c860b435c5fc9c15b69"}, - {file = "coverage-7.0.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b4b3a4d9915b2be879aff6299c0a6129f3d08a775d5a061f503cf79571f73e4"}, - {file = "coverage-7.0.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b6f22bb64cc39bcb883e5910f99a27b200fdc14cdd79df8696fa96b0005c9444"}, - {file = "coverage-7.0.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:72d1507f152abacea81f65fee38e4ef3ac3c02ff8bc16f21d935fd3a8a4ad910"}, - {file = "coverage-7.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0a79137fc99815fff6a852c233628e735ec15903cfd16da0f229d9c4d45926ab"}, - {file = "coverage-7.0.1-cp38-cp38-win32.whl", hash = "sha256:b3763e7fcade2ff6c8e62340af9277f54336920489ceb6a8cd6cc96da52fcc62"}, - {file = "coverage-7.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:09f6b5a8415b6b3e136d5fec62b552972187265cb705097bf030eb9d4ffb9b60"}, - {file = "coverage-7.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:978258fec36c154b5e250d356c59af7d4c3ba02bef4b99cda90b6029441d797d"}, - {file = "coverage-7.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:19ec666533f0f70a0993f88b8273057b96c07b9d26457b41863ccd021a043b9a"}, - {file = "coverage-7.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cfded268092a84605f1cc19e5c737f9ce630a8900a3589e9289622db161967e9"}, - {file = "coverage-7.0.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07bcfb1d8ac94af886b54e18a88b393f6a73d5959bb31e46644a02453c36e475"}, - {file = "coverage-7.0.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:397b4a923cc7566bbc7ae2dfd0ba5a039b61d19c740f1373791f2ebd11caea59"}, - {file = "coverage-7.0.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:aec2d1515d9d39ff270059fd3afbb3b44e6ec5758af73caf18991807138c7118"}, - {file = "coverage-7.0.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:c20cfebcc149a4c212f6491a5f9ff56f41829cd4f607b5be71bb2d530ef243b1"}, - {file = "coverage-7.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:fd556ff16a57a070ce4f31c635953cc44e25244f91a0378c6e9bdfd40fdb249f"}, - {file = "coverage-7.0.1-cp39-cp39-win32.whl", hash = "sha256:b9ea158775c7c2d3e54530a92da79496fb3fb577c876eec761c23e028f1e216c"}, - {file = "coverage-7.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:d1991f1dd95eba69d2cd7708ff6c2bbd2426160ffc73c2b81f617a053ebcb1a8"}, - {file = "coverage-7.0.1-pp37.pp38.pp39-none-any.whl", hash = "sha256:3dd4ee135e08037f458425b8842d24a95a0961831a33f89685ff86b77d378f89"}, - {file = "coverage-7.0.1.tar.gz", hash = "sha256:a4a574a19eeb67575a5328a5760bbbb737faa685616586a9f9da4281f940109c"}, -] - -[package.dependencies] -tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""} - -[package.extras] -toml = ["tomli"] - -[[package]] -name = "exceptiongroup" -version = "1.1.0" -description = "Backport of PEP 654 (exception groups)" -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ - {file = "exceptiongroup-1.1.0-py3-none-any.whl", hash = "sha256:327cbda3da756e2de031a3107b81ab7b3770a602c4d16ca618298c526f4bec1e"}, - {file = "exceptiongroup-1.1.0.tar.gz", hash = "sha256:bcb67d800a4497e1b404c2dd44fca47d3b7a5e5433dbab67f96c1a685cdfdf23"}, -] - -[package.extras] -test = ["pytest (>=6)"] - -[[package]] -name = "iniconfig" -version = "1.1.1" -description = "iniconfig: brain-dead simple config-ini parsing" -category = "dev" -optional = false -python-versions = "*" -files = [ - {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, - {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"}, -] - -[[package]] -name = "packaging" -version = "22.0" -description = "Core utilities for Python packages" -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ - {file = "packaging-22.0-py3-none-any.whl", hash = "sha256:957e2148ba0e1a3b282772e791ef1d8083648bc131c8ab0c1feba110ce1146c3"}, - {file = "packaging-22.0.tar.gz", hash = "sha256:2198ec20bd4c017b8f9717e00f0c8714076fc2fd93816750ab48e2c41de2cfd3"}, -] - -[[package]] -name = "pluggy" -version = "1.0.0" -description = "plugin and hook calling mechanisms for python" -category = "dev" -optional = false -python-versions = ">=3.6" -files = [ - {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"}, - {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"}, -] - -[package.extras] -dev = ["pre-commit", "tox"] -testing = ["pytest", "pytest-benchmark"] - -[[package]] -name = "pytest" -version = "7.2.0" -description = "pytest: simple powerful testing with Python" -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ - {file = "pytest-7.2.0-py3-none-any.whl", hash = "sha256:892f933d339f068883b6fd5a459f03d85bfcb355e4981e146d2c7616c21fef71"}, - {file = "pytest-7.2.0.tar.gz", hash = "sha256:c4014eb40e10f11f355ad4e3c2fb2c6c6d1919c73f3b5a433de4708202cade59"}, -] - -[package.dependencies] -attrs = ">=19.2.0" -colorama = {version = "*", markers = "sys_platform == \"win32\""} -exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} -iniconfig = "*" -packaging = "*" -pluggy = ">=0.12,<2.0" -tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} - -[package.extras] -testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "xmlschema"] - -[[package]] -name = "pytest-cov" -version = "4.0.0" -description = "Pytest plugin for measuring coverage." -category = "dev" -optional = false -python-versions = ">=3.6" -files = [ - {file = "pytest-cov-4.0.0.tar.gz", hash = "sha256:996b79efde6433cdbd0088872dbc5fb3ed7fe1578b68cdbba634f14bb8dd0470"}, - {file = "pytest_cov-4.0.0-py3-none-any.whl", hash = "sha256:2feb1b751d66a8bd934e5edfa2e961d11309dc37b73b0eabe73b5945fee20f6b"}, -] - -[package.dependencies] -coverage = {version = ">=5.2.1", extras = ["toml"]} -pytest = ">=4.6" - -[package.extras] -testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"] - -[[package]] -name = "python-dateutil" -version = "2.8.2" -description = "Extensions to the standard Python datetime module" -category = "main" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" -files = [ - {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, - {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, -] - -[package.dependencies] -six = ">=1.5" - -[[package]] -name = "six" -version = "1.16.0" -description = "Python 2 and 3 compatibility utilities" -category = "main" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" -files = [ - {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, - {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, -] - -[[package]] -name = "tomli" -version = "2.0.1" -description = "A lil' TOML parser" -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ - {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, - {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, -] - -[[package]] -name = "urllib3" -version = "1.26.13" -description = "HTTP library with thread-safe connection pooling, file post, and more." -category = "main" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" -files = [ - {file = "urllib3-1.26.13-py2.py3-none-any.whl", hash = "sha256:47cc05d99aaa09c9e72ed5809b60e7ba354e64b59c9c173ac3018642d8bb41fc"}, - {file = "urllib3-1.26.13.tar.gz", hash = "sha256:c083dd0dce68dbfbe1129d5271cb90f9447dea7d52097c6e0126120c521ddea8"}, -] - -[package.extras] -brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] -secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] -socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] - -[metadata] -lock-version = "2.0" -python-versions = "3.9.2" -content-hash = "6f1997fc0b52d9775a5b1ae7fac1c2c0867f82525a9f024064f0900e5013b0fa" diff --git a/libs/schematic/api-client-python/prepare-python.sh b/libs/schematic/api-client-python/prepare-python.sh deleted file mode 100755 index 3c3a68a53..000000000 --- a/libs/schematic/api-client-python/prepare-python.sh +++ /dev/null @@ -1,8 +0,0 @@ -#!/usr/bin/env bash - -pyenv install --skip-existing 3.9.2 -pyenv local 3.9.2 -poetry env use 3.9.2 -poetry run pip install "cython<3.0.0" -poetry run pip install --no-build-isolation pyyaml==5.4.1 -poetry install \ No newline at end of file diff --git a/libs/schematic/api-client-python/project.json b/libs/schematic/api-client-python/project.json deleted file mode 100644 index 7748c003f..000000000 --- a/libs/schematic/api-client-python/project.json +++ /dev/null @@ -1,31 +0,0 @@ -{ - "name": "schematic-api-client-python", - "$schema": "../../../node_modules/nx/schemas/project-schema.json", - "sourceRoot": "libs/schematic/api-client-python/src", - "projectType": "library", - "prefix": "schematic", - "targets": { - "prepare": { - "executor": "nx:run-commands", - "options": { - "command": "./prepare-python.sh", - "cwd": "{projectRoot}" - } - }, - "generate": { - "executor": "nx:run-commands", - "options": { - "commands": [ - "rm -fr src/*", - "openapi-generator-cli generate", - "echo 'TODO Format generated code'" - ], - "cwd": "{projectRoot}", - "parallel": false - }, - "dependsOn": ["^build"] - } - }, - "tags": ["language:python", "package-manager:poetry"], - "implicitDependencies": ["schematic-api-description"] -} diff --git a/libs/schematic/api-client-python/pyproject.toml b/libs/schematic/api-client-python/pyproject.toml deleted file mode 100644 index 215d52348..000000000 --- a/libs/schematic/api-client-python/pyproject.toml +++ /dev/null @@ -1,20 +0,0 @@ -[tool.poetry] -name = "schematic-client" -version = "0.1.0" -description = "Schematic REST API client" -authors = ["Thomas Schaffter "] -readme = "README.md" -packages = [{include = "schematic_client"}] - -[tool.poetry.dependencies] -python = "3.9.2" -python-dateutil = "2.8.2" -urllib3 = "1.26.13" - - -[tool.poetry.group.test.dependencies] -pytest-cov = "4.0.0" - -[build-system] -requires = ["poetry-core"] -build-backend = "poetry.core.masonry.api" diff --git a/libs/schematic/api-client-python/schematic_client/__init__.py b/libs/schematic/api-client-python/schematic_client/__init__.py deleted file mode 100644 index 74e890e3f..000000000 --- a/libs/schematic/api-client-python/schematic_client/__init__.py +++ /dev/null @@ -1,27 +0,0 @@ -# flake8: noqa - -""" - Schematic REST API - - No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501 - - The version of the OpenAPI document: 0.1.0 - Generated by: https://openapi-generator.tech -""" - - -__version__ = "1.0.0" - -# import ApiClient -from schematic_client.api_client import ApiClient - -# import Configuration -from schematic_client.configuration import Configuration - -# import exceptions -from schematic_client.exceptions import OpenApiException -from schematic_client.exceptions import ApiAttributeError -from schematic_client.exceptions import ApiTypeError -from schematic_client.exceptions import ApiValueError -from schematic_client.exceptions import ApiKeyError -from schematic_client.exceptions import ApiException diff --git a/libs/schematic/api-client-python/schematic_client/api/__init__.py b/libs/schematic/api-client-python/schematic_client/api/__init__.py deleted file mode 100644 index 5c573b9e4..000000000 --- a/libs/schematic/api-client-python/schematic_client/api/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -# do not import all apis into this module because that uses a lot of memory and stack frames -# if you need the ability to import all apis from one package, import them with -# from schematic_client.apis import StorageApi diff --git a/libs/schematic/api-client-python/schematic_client/api/storage_api.py b/libs/schematic/api-client-python/schematic_client/api/storage_api.py deleted file mode 100644 index 686bcbad7..000000000 --- a/libs/schematic/api-client-python/schematic_client/api/storage_api.py +++ /dev/null @@ -1,140 +0,0 @@ -""" - Schematic REST API - - No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501 - - The version of the OpenAPI document: 0.1.0 - Generated by: https://openapi-generator.tech -""" - -import re # noqa: F401 -import sys # noqa: F401 - -from schematic_client.api_client import ApiClient, Endpoint as _Endpoint -from schematic_client.model_utils import ( # noqa: F401 - check_allowed_values, - check_validations, - date, - datetime, - file_type, - none_type, - validate_and_convert_types, -) -from schematic_client.model.basic_error import BasicError -from schematic_client.model.datasets_page import DatasetsPage - - -class StorageApi(object): - """NOTE: This class is auto generated by OpenAPI Generator - Ref: https://openapi-generator.tech - - Do not edit the class manually. - """ - - def __init__(self, api_client=None): - if api_client is None: - api_client = ApiClient() - self.api_client = api_client - self.list_storage_project_datasets_endpoint = _Endpoint( - settings={ - "response_type": (DatasetsPage,), - "auth": [], - "endpoint_path": "/storages/projects/{projectId}/datasets", - "operation_id": "list_storage_project_datasets", - "http_method": "GET", - "servers": None, - }, - params_map={ - "all": [ - "project_id", - ], - "required": [ - "project_id", - ], - "nullable": [], - "enum": [], - "validation": [], - }, - root_map={ - "validations": {}, - "allowed_values": {}, - "openapi_types": { - "project_id": (str,), - }, - "attribute_map": { - "project_id": "projectId", - }, - "location_map": { - "project_id": "path", - }, - "collection_format_map": {}, - }, - headers_map={ - "accept": ["application/json", "application/problem+json"], - "content_type": [], - }, - api_client=api_client, - ) - - def list_storage_project_datasets(self, project_id, **kwargs): - """Gets all datasets in folder under a given storage project that the current user has access to. # noqa: E501 - - Gets all datasets in folder under a given storage project that the current user has access to. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.list_storage_project_datasets(project_id, async_req=True) - >>> result = thread.get() - - Args: - project_id (str): The Synapse ID of a storage project. - - Keyword Args: - _return_http_data_only (bool): response data without head status - code and headers. Default is True. - _preload_content (bool): if False, the urllib3.HTTPResponse object - will be returned without reading/decoding response data. - Default is True. - _request_timeout (int/float/tuple): timeout setting for this request. If - one number provided, it will be total request timeout. It can also - be a pair (tuple) of (connection, read) timeouts. - Default is None. - _check_input_type (bool): specifies if type checking - should be done one the data sent to the server. - Default is True. - _check_return_type (bool): specifies if type checking - should be done one the data received from the server. - Default is True. - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _content_type (str/None): force body content-type. - Default is None and content-type will be predicted by allowed - content-types and body. - _host_index (int/None): specifies the index of the server - that we want to use. - Default is read from the configuration. - _request_auths (list): set to override the auth_settings for an a single - request; this effectively ignores the authentication - in the spec for a single request. - Default is None - async_req (bool): execute request asynchronously - - Returns: - DatasetsPage - If the method is called asynchronously, returns the request - thread. - """ - kwargs["async_req"] = kwargs.get("async_req", False) - kwargs["_return_http_data_only"] = kwargs.get("_return_http_data_only", True) - kwargs["_preload_content"] = kwargs.get("_preload_content", True) - kwargs["_request_timeout"] = kwargs.get("_request_timeout", None) - kwargs["_check_input_type"] = kwargs.get("_check_input_type", True) - kwargs["_check_return_type"] = kwargs.get("_check_return_type", True) - kwargs["_spec_property_naming"] = kwargs.get("_spec_property_naming", False) - kwargs["_content_type"] = kwargs.get("_content_type") - kwargs["_host_index"] = kwargs.get("_host_index") - kwargs["_request_auths"] = kwargs.get("_request_auths", None) - kwargs["project_id"] = project_id - return self.list_storage_project_datasets_endpoint.call_with_http_info(**kwargs) diff --git a/libs/schematic/api-client-python/schematic_client/api_client.py b/libs/schematic/api-client-python/schematic_client/api_client.py deleted file mode 100644 index 44fbc2ea1..000000000 --- a/libs/schematic/api-client-python/schematic_client/api_client.py +++ /dev/null @@ -1,1011 +0,0 @@ -""" - Schematic REST API - - No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501 - - The version of the OpenAPI document: 0.1.0 - Generated by: https://openapi-generator.tech -""" - -import json -import atexit -import mimetypes -from multiprocessing.pool import ThreadPool -import io -import os -import re -import typing -from urllib.parse import quote -from urllib3.fields import RequestField - - -from schematic_client import rest -from schematic_client.configuration import Configuration -from schematic_client.exceptions import ApiTypeError, ApiValueError, ApiException -from schematic_client.model_utils import ( - ModelNormal, - ModelSimple, - ModelComposed, - check_allowed_values, - check_validations, - date, - datetime, - deserialize_file, - file_type, - model_to_dict, - none_type, - validate_and_convert_types, -) - - -class ApiClient(object): - """Generic API client for OpenAPI client library builds. - - OpenAPI generic API client. This client handles the client- - server communication, and is invariant across implementations. Specifics of - the methods and models for each application are generated from the OpenAPI - templates. - - NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - Do not edit the class manually. - - :param configuration: .Configuration object for this client - :param header_name: a header to pass when making calls to the API. - :param header_value: a header value to pass when making calls to - the API. - :param cookie: a cookie to include in the header when making calls - to the API - :param pool_threads: The number of threads to use for async requests - to the API. More threads means more concurrent API requests. - """ - - _pool = None - - def __init__( - self, - configuration=None, - header_name=None, - header_value=None, - cookie=None, - pool_threads=1, - ): - if configuration is None: - configuration = Configuration.get_default_copy() - self.configuration = configuration - self.pool_threads = pool_threads - - self.rest_client = rest.RESTClientObject(configuration) - self.default_headers = {} - if header_name is not None: - self.default_headers[header_name] = header_value - self.cookie = cookie - # Set default User-Agent. - self.user_agent = "OpenAPI-Generator/1.0.0/python" - - def __enter__(self): - return self - - def __exit__(self, exc_type, exc_value, traceback): - self.close() - - def close(self): - if self._pool: - self._pool.close() - self._pool.join() - self._pool = None - if hasattr(atexit, "unregister"): - atexit.unregister(self.close) - - @property - def pool(self): - """Create thread pool on first request - avoids instantiating unused threadpool for blocking clients. - """ - if self._pool is None: - atexit.register(self.close) - self._pool = ThreadPool(self.pool_threads) - return self._pool - - @property - def user_agent(self): - """User agent for this API client""" - return self.default_headers["User-Agent"] - - @user_agent.setter - def user_agent(self, value): - self.default_headers["User-Agent"] = value - - def set_default_header(self, header_name, header_value): - self.default_headers[header_name] = header_value - - def __call_api( - self, - resource_path: str, - method: str, - path_params: typing.Optional[typing.Dict[str, typing.Any]] = None, - query_params: typing.Optional[ - typing.List[typing.Tuple[str, typing.Any]] - ] = None, - header_params: typing.Optional[typing.Dict[str, typing.Any]] = None, - body: typing.Optional[typing.Any] = None, - post_params: typing.Optional[typing.List[typing.Tuple[str, typing.Any]]] = None, - files: typing.Optional[typing.Dict[str, typing.List[io.IOBase]]] = None, - response_type: typing.Optional[typing.Tuple[typing.Any]] = None, - auth_settings: typing.Optional[typing.List[str]] = None, - _return_http_data_only: typing.Optional[bool] = None, - collection_formats: typing.Optional[typing.Dict[str, str]] = None, - _preload_content: bool = True, - _request_timeout: typing.Optional[ - typing.Union[int, float, typing.Tuple] - ] = None, - _host: typing.Optional[str] = None, - _check_type: typing.Optional[bool] = None, - _content_type: typing.Optional[str] = None, - _request_auths: typing.Optional[ - typing.List[typing.Dict[str, typing.Any]] - ] = None, - ): - - config = self.configuration - - # header parameters - header_params = header_params or {} - header_params.update(self.default_headers) - if self.cookie: - header_params["Cookie"] = self.cookie - if header_params: - header_params = self.sanitize_for_serialization(header_params) - header_params = dict( - self.parameters_to_tuples(header_params, collection_formats) - ) - - # path parameters - if path_params: - path_params = self.sanitize_for_serialization(path_params) - path_params = self.parameters_to_tuples(path_params, collection_formats) - for k, v in path_params: - # specified safe chars, encode everything - resource_path = resource_path.replace( - "{%s}" % k, quote(str(v), safe=config.safe_chars_for_path_param) - ) - - # query parameters - if query_params: - query_params = self.sanitize_for_serialization(query_params) - query_params = self.parameters_to_tuples(query_params, collection_formats) - - # post parameters - if post_params or files: - post_params = post_params if post_params else [] - post_params = self.sanitize_for_serialization(post_params) - post_params = self.parameters_to_tuples(post_params, collection_formats) - post_params.extend(self.files_parameters(files)) - if header_params["Content-Type"].startswith("multipart"): - post_params = self.parameters_to_multipart(post_params, (dict)) - - # body - if body: - body = self.sanitize_for_serialization(body) - - # auth setting - self.update_params_for_auth( - header_params, - query_params, - auth_settings, - resource_path, - method, - body, - request_auths=_request_auths, - ) - - # request url - if _host is None: - url = self.configuration.host + resource_path - else: - # use server/host defined in path or operation instead - url = _host + resource_path - - try: - # perform request and return response - response_data = self.request( - method, - url, - query_params=query_params, - headers=header_params, - post_params=post_params, - body=body, - _preload_content=_preload_content, - _request_timeout=_request_timeout, - ) - except ApiException as e: - e.body = e.body.decode("utf-8") - raise e - - self.last_response = response_data - - return_data = response_data - - if not _preload_content: - return return_data - return return_data - - # deserialize response data - if response_type: - if response_type != (file_type,): - encoding = "utf-8" - content_type = response_data.getheader("content-type") - if content_type is not None: - match = re.search(r"charset=([a-zA-Z\-\d]+)[\s\;]?", content_type) - if match: - encoding = match.group(1) - response_data.data = response_data.data.decode(encoding) - - return_data = self.deserialize(response_data, response_type, _check_type) - else: - return_data = None - - if _return_http_data_only: - return return_data - else: - return (return_data, response_data.status, response_data.getheaders()) - - def parameters_to_multipart(self, params, collection_types): - """Get parameters as list of tuples, formatting as json if value is collection_types - - :param params: Parameters as list of two-tuples - :param dict collection_types: Parameter collection types - :return: Parameters as list of tuple or urllib3.fields.RequestField - """ - new_params = [] - if collection_types is None: - collection_types = dict - for k, v in ( - params.items() if isinstance(params, dict) else params - ): # noqa: E501 - if isinstance( - v, collection_types - ): # v is instance of collection_type, formatting as application/json - v = json.dumps(v, ensure_ascii=False).encode("utf-8") - field = RequestField(k, v) - field.make_multipart(content_type="application/json; charset=utf-8") - new_params.append(field) - else: - new_params.append((k, v)) - return new_params - - @classmethod - def sanitize_for_serialization(cls, obj): - """Prepares data for transmission before it is sent with the rest client - If obj is None, return None. - If obj is str, int, long, float, bool, return directly. - If obj is datetime.datetime, datetime.date - convert to string in iso8601 format. - If obj is list, sanitize each element in the list. - If obj is dict, return the dict. - If obj is OpenAPI model, return the properties dict. - If obj is io.IOBase, return the bytes - :param obj: The data to serialize. - :return: The serialized form of data. - """ - if isinstance(obj, (ModelNormal, ModelComposed)): - return { - key: cls.sanitize_for_serialization(val) - for key, val in model_to_dict(obj, serialize=True).items() - } - elif isinstance(obj, io.IOBase): - return cls.get_file_data_and_close_file(obj) - elif isinstance(obj, (str, int, float, none_type, bool)): - return obj - elif isinstance(obj, (datetime, date)): - return obj.isoformat() - elif isinstance(obj, ModelSimple): - return cls.sanitize_for_serialization(obj.value) - elif isinstance(obj, (list, tuple)): - return [cls.sanitize_for_serialization(item) for item in obj] - if isinstance(obj, dict): - return { - key: cls.sanitize_for_serialization(val) for key, val in obj.items() - } - raise ApiValueError( - "Unable to prepare type {} for serialization".format(obj.__class__.__name__) - ) - - def deserialize(self, response, response_type, _check_type): - """Deserializes response into an object. - - :param response: RESTResponse object to be deserialized. - :param response_type: For the response, a tuple containing: - valid classes - a list containing valid classes (for list schemas) - a dict containing a tuple of valid classes as the value - Example values: - (str,) - (Pet,) - (float, none_type) - ([int, none_type],) - ({str: (bool, str, int, float, date, datetime, str, none_type)},) - :param _check_type: boolean, whether to check the types of the data - received from the server - :type _check_type: bool - - :return: deserialized object. - """ - # handle file downloading - # save response body into a tmp file and return the instance - if response_type == (file_type,): - content_disposition = response.getheader("Content-Disposition") - return deserialize_file( - response.data, - self.configuration, - content_disposition=content_disposition, - ) - - # fetch data from response object - try: - received_data = json.loads(response.data) - except ValueError: - received_data = response.data - - # store our data under the key of 'received_data' so users have some - # context if they are deserializing a string and the data type is wrong - deserialized_data = validate_and_convert_types( - received_data, - response_type, - ["received_data"], - True, - _check_type, - configuration=self.configuration, - ) - return deserialized_data - - def call_api( - self, - resource_path: str, - method: str, - path_params: typing.Optional[typing.Dict[str, typing.Any]] = None, - query_params: typing.Optional[ - typing.List[typing.Tuple[str, typing.Any]] - ] = None, - header_params: typing.Optional[typing.Dict[str, typing.Any]] = None, - body: typing.Optional[typing.Any] = None, - post_params: typing.Optional[typing.List[typing.Tuple[str, typing.Any]]] = None, - files: typing.Optional[typing.Dict[str, typing.List[io.IOBase]]] = None, - response_type: typing.Optional[typing.Tuple[typing.Any]] = None, - auth_settings: typing.Optional[typing.List[str]] = None, - async_req: typing.Optional[bool] = None, - _return_http_data_only: typing.Optional[bool] = None, - collection_formats: typing.Optional[typing.Dict[str, str]] = None, - _preload_content: bool = True, - _request_timeout: typing.Optional[ - typing.Union[int, float, typing.Tuple] - ] = None, - _host: typing.Optional[str] = None, - _check_type: typing.Optional[bool] = None, - _request_auths: typing.Optional[ - typing.List[typing.Dict[str, typing.Any]] - ] = None, - ): - """Makes the HTTP request (synchronous) and returns deserialized data. - - To make an async_req request, set the async_req parameter. - - :param resource_path: Path to method endpoint. - :param method: Method to call. - :param path_params: Path parameters in the url. - :param query_params: Query parameters in the url. - :param header_params: Header parameters to be - placed in the request header. - :param body: Request body. - :param post_params dict: Request post form parameters, - for `application/x-www-form-urlencoded`, `multipart/form-data`. - :param auth_settings list: Auth Settings names for the request. - :param response_type: For the response, a tuple containing: - valid classes - a list containing valid classes (for list schemas) - a dict containing a tuple of valid classes as the value - Example values: - (str,) - (Pet,) - (float, none_type) - ([int, none_type],) - ({str: (bool, str, int, float, date, datetime, str, none_type)},) - :param files: key -> field name, value -> a list of open file - objects for `multipart/form-data`. - :type files: dict - :param async_req bool: execute request asynchronously - :type async_req: bool, optional - :param _return_http_data_only: response data without head status code - and headers - :type _return_http_data_only: bool, optional - :param collection_formats: dict of collection formats for path, query, - header, and post parameters. - :type collection_formats: dict, optional - :param _preload_content: if False, the urllib3.HTTPResponse object will - be returned without reading/decoding response - data. Default is True. - :type _preload_content: bool, optional - :param _request_timeout: timeout setting for this request. If one - number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - :param _check_type: boolean describing if the data back from the server - should have its type checked. - :type _check_type: bool, optional - :param _request_auths: set to override the auth_settings for an a single - request; this effectively ignores the authentication - in the spec for a single request. - :type _request_auths: list, optional - :return: - If async_req parameter is True, - the request will be called asynchronously. - The method will return the request thread. - If parameter async_req is False or missing, - then the method will return the response directly. - """ - if not async_req: - return self.__call_api( - resource_path, - method, - path_params, - query_params, - header_params, - body, - post_params, - files, - response_type, - auth_settings, - _return_http_data_only, - collection_formats, - _preload_content, - _request_timeout, - _host, - _check_type, - _request_auths=_request_auths, - ) - - return self.pool.apply_async( - self.__call_api, - ( - resource_path, - method, - path_params, - query_params, - header_params, - body, - post_params, - files, - response_type, - auth_settings, - _return_http_data_only, - collection_formats, - _preload_content, - _request_timeout, - _host, - _check_type, - None, - _request_auths, - ), - ) - - def request( - self, - method, - url, - query_params=None, - headers=None, - post_params=None, - body=None, - _preload_content=True, - _request_timeout=None, - ): - """Makes the HTTP request using RESTClient.""" - if method == "GET": - return self.rest_client.GET( - url, - query_params=query_params, - _preload_content=_preload_content, - _request_timeout=_request_timeout, - headers=headers, - ) - elif method == "HEAD": - return self.rest_client.HEAD( - url, - query_params=query_params, - _preload_content=_preload_content, - _request_timeout=_request_timeout, - headers=headers, - ) - elif method == "OPTIONS": - return self.rest_client.OPTIONS( - url, - query_params=query_params, - headers=headers, - post_params=post_params, - _preload_content=_preload_content, - _request_timeout=_request_timeout, - body=body, - ) - elif method == "POST": - return self.rest_client.POST( - url, - query_params=query_params, - headers=headers, - post_params=post_params, - _preload_content=_preload_content, - _request_timeout=_request_timeout, - body=body, - ) - elif method == "PUT": - return self.rest_client.PUT( - url, - query_params=query_params, - headers=headers, - post_params=post_params, - _preload_content=_preload_content, - _request_timeout=_request_timeout, - body=body, - ) - elif method == "PATCH": - return self.rest_client.PATCH( - url, - query_params=query_params, - headers=headers, - post_params=post_params, - _preload_content=_preload_content, - _request_timeout=_request_timeout, - body=body, - ) - elif method == "DELETE": - return self.rest_client.DELETE( - url, - query_params=query_params, - headers=headers, - _preload_content=_preload_content, - _request_timeout=_request_timeout, - body=body, - ) - else: - raise ApiValueError( - "http method must be `GET`, `HEAD`, `OPTIONS`," - " `POST`, `PATCH`, `PUT` or `DELETE`." - ) - - def parameters_to_tuples(self, params, collection_formats): - """Get parameters as list of tuples, formatting collections. - - :param params: Parameters as dict or list of two-tuples - :param dict collection_formats: Parameter collection formats - :return: Parameters as list of tuples, collections formatted - """ - new_params = [] - if collection_formats is None: - collection_formats = {} - for k, v in ( - params.items() if isinstance(params, dict) else params - ): # noqa: E501 - if k in collection_formats: - collection_format = collection_formats[k] - if collection_format == "multi": - new_params.extend((k, value) for value in v) - else: - if collection_format == "ssv": - delimiter = " " - elif collection_format == "tsv": - delimiter = "\t" - elif collection_format == "pipes": - delimiter = "|" - else: # csv is the default - delimiter = "," - new_params.append((k, delimiter.join(str(value) for value in v))) - else: - new_params.append((k, v)) - return new_params - - @staticmethod - def get_file_data_and_close_file(file_instance: io.IOBase) -> bytes: - file_data = file_instance.read() - file_instance.close() - return file_data - - def files_parameters( - self, files: typing.Optional[typing.Dict[str, typing.List[io.IOBase]]] = None - ): - """Builds form parameters. - - :param files: None or a dict with key=param_name and - value is a list of open file objects - :return: List of tuples of form parameters with file data - """ - if files is None: - return [] - - params = [] - for param_name, file_instances in files.items(): - if file_instances is None: - # if the file field is nullable, skip None values - continue - for file_instance in file_instances: - if file_instance is None: - # if the file field is nullable, skip None values - continue - if file_instance.closed is True: - raise ApiValueError( - "Cannot read a closed file. The passed in file_type " - "for %s must be open." % param_name - ) - filename = os.path.basename(file_instance.name) - filedata = self.get_file_data_and_close_file(file_instance) - mimetype = ( - mimetypes.guess_type(filename)[0] or "application/octet-stream" - ) - params.append( - tuple([param_name, tuple([filename, filedata, mimetype])]) - ) - - return params - - def select_header_accept(self, accepts): - """Returns `Accept` based on an array of accepts provided. - - :param accepts: List of headers. - :return: Accept (e.g. application/json). - """ - if not accepts: - return - - accepts = [x.lower() for x in accepts] - - if "application/json" in accepts: - return "application/json" - else: - return ", ".join(accepts) - - def select_header_content_type(self, content_types, method=None, body=None): - """Returns `Content-Type` based on an array of content_types provided. - - :param content_types: List of content-types. - :param method: http method (e.g. POST, PATCH). - :param body: http body to send. - :return: Content-Type (e.g. application/json). - """ - if not content_types: - return None - - content_types = [x.lower() for x in content_types] - - if ( - method == "PATCH" - and "application/json-patch+json" in content_types - and isinstance(body, list) - ): - return "application/json-patch+json" - - if "application/json" in content_types or "*/*" in content_types: - return "application/json" - else: - return content_types[0] - - def update_params_for_auth( - self, - headers, - queries, - auth_settings, - resource_path, - method, - body, - request_auths=None, - ): - """Updates header and query params based on authentication setting. - - :param headers: Header parameters dict to be updated. - :param queries: Query parameters tuple list to be updated. - :param auth_settings: Authentication setting identifiers list. - :param resource_path: A string representation of the HTTP request resource path. - :param method: A string representation of the HTTP request method. - :param body: A object representing the body of the HTTP request. - The object type is the return value of _encoder.default(). - :param request_auths: if set, the provided settings will - override the token in the configuration. - """ - if not auth_settings: - return - - if request_auths: - for auth_setting in request_auths: - self._apply_auth_params( - headers, queries, resource_path, method, body, auth_setting - ) - return - - for auth in auth_settings: - auth_setting = self.configuration.auth_settings().get(auth) - if auth_setting: - self._apply_auth_params( - headers, queries, resource_path, method, body, auth_setting - ) - - def _apply_auth_params( - self, headers, queries, resource_path, method, body, auth_setting - ): - if auth_setting["in"] == "cookie": - headers["Cookie"] = auth_setting["key"] + "=" + auth_setting["value"] - elif auth_setting["in"] == "header": - if auth_setting["type"] != "http-signature": - headers[auth_setting["key"]] = auth_setting["value"] - elif auth_setting["in"] == "query": - queries.append((auth_setting["key"], auth_setting["value"])) - else: - raise ApiValueError("Authentication token must be in `query` or `header`") - - -class Endpoint(object): - def __init__( - self, - settings=None, - params_map=None, - root_map=None, - headers_map=None, - api_client=None, - callable=None, - ): - """Creates an endpoint - - Args: - settings (dict): see below key value pairs - 'response_type' (tuple/None): response type - 'auth' (list): a list of auth type keys - 'endpoint_path' (str): the endpoint path - 'operation_id' (str): endpoint string identifier - 'http_method' (str): POST/PUT/PATCH/GET etc - 'servers' (list): list of str servers that this endpoint is at - params_map (dict): see below key value pairs - 'all' (list): list of str endpoint parameter names - 'required' (list): list of required parameter names - 'nullable' (list): list of nullable parameter names - 'enum' (list): list of parameters with enum values - 'validation' (list): list of parameters with validations - root_map - 'validations' (dict): the dict mapping endpoint parameter tuple - paths to their validation dictionaries - 'allowed_values' (dict): the dict mapping endpoint parameter - tuple paths to their allowed_values (enum) dictionaries - 'openapi_types' (dict): param_name to openapi type - 'attribute_map' (dict): param_name to camelCase name - 'location_map' (dict): param_name to 'body', 'file', 'form', - 'header', 'path', 'query' - collection_format_map (dict): param_name to `csv` etc. - headers_map (dict): see below key value pairs - 'accept' (list): list of Accept header strings - 'content_type' (list): list of Content-Type header strings - api_client (ApiClient) api client instance - callable (function): the function which is invoked when the - Endpoint is called - """ - self.settings = settings - self.params_map = params_map - self.params_map["all"].extend( - [ - "async_req", - "_host_index", - "_preload_content", - "_request_timeout", - "_return_http_data_only", - "_check_input_type", - "_check_return_type", - "_content_type", - "_spec_property_naming", - "_request_auths", - ] - ) - self.params_map["nullable"].extend(["_request_timeout"]) - self.validations = root_map["validations"] - self.allowed_values = root_map["allowed_values"] - self.openapi_types = root_map["openapi_types"] - extra_types = { - "async_req": (bool,), - "_host_index": (none_type, int), - "_preload_content": (bool,), - "_request_timeout": ( - none_type, - float, - (float,), - [float], - int, - (int,), - [int], - ), - "_return_http_data_only": (bool,), - "_check_input_type": (bool,), - "_check_return_type": (bool,), - "_spec_property_naming": (bool,), - "_content_type": (none_type, str), - "_request_auths": (none_type, list), - } - self.openapi_types.update(extra_types) - self.attribute_map = root_map["attribute_map"] - self.location_map = root_map["location_map"] - self.collection_format_map = root_map["collection_format_map"] - self.headers_map = headers_map - self.api_client = api_client - self.callable = callable - - def __validate_inputs(self, kwargs): - for param in self.params_map["enum"]: - if param in kwargs: - check_allowed_values(self.allowed_values, (param,), kwargs[param]) - - for param in self.params_map["validation"]: - if param in kwargs: - check_validations( - self.validations, - (param,), - kwargs[param], - configuration=self.api_client.configuration, - ) - - if kwargs["_check_input_type"] is False: - return - - for key, value in kwargs.items(): - fixed_val = validate_and_convert_types( - value, - self.openapi_types[key], - [key], - kwargs["_spec_property_naming"], - kwargs["_check_input_type"], - configuration=self.api_client.configuration, - ) - kwargs[key] = fixed_val - - def __gather_params(self, kwargs): - params = { - "body": None, - "collection_format": {}, - "file": {}, - "form": [], - "header": {}, - "path": {}, - "query": [], - } - - for param_name, param_value in kwargs.items(): - param_location = self.location_map.get(param_name) - if param_location is None: - continue - if param_location: - if param_location == "body": - params["body"] = param_value - continue - base_name = self.attribute_map[param_name] - if param_location == "form" and self.openapi_types[param_name] == ( - file_type, - ): - params["file"][base_name] = [param_value] - elif param_location == "form" and self.openapi_types[param_name] == ( - [file_type], - ): - # param_value is already a list - params["file"][base_name] = param_value - elif param_location in {"form", "query"}: - param_value_full = (base_name, param_value) - params[param_location].append(param_value_full) - if param_location not in {"form", "query"}: - params[param_location][base_name] = param_value - collection_format = self.collection_format_map.get(param_name) - if collection_format: - params["collection_format"][base_name] = collection_format - - return params - - def __call__(self, *args, **kwargs): - """This method is invoked when endpoints are called - Example: - - api_instance = StorageApi() - api_instance.list_storage_project_datasets # this is an instance of the class Endpoint - api_instance.list_storage_project_datasets() # this invokes api_instance.list_storage_project_datasets.__call__() - which then invokes the callable functions stored in that endpoint at - api_instance.list_storage_project_datasets.callable or self.callable in this class - - """ - return self.callable(self, *args, **kwargs) - - def call_with_http_info(self, **kwargs): - - try: - index = ( - self.api_client.configuration.server_operation_index.get( - self.settings["operation_id"], - self.api_client.configuration.server_index, - ) - if kwargs["_host_index"] is None - else kwargs["_host_index"] - ) - server_variables = ( - self.api_client.configuration.server_operation_variables.get( - self.settings["operation_id"], - self.api_client.configuration.server_variables, - ) - ) - _host = self.api_client.configuration.get_host_from_settings( - index, variables=server_variables, servers=self.settings["servers"] - ) - except IndexError: - if self.settings["servers"]: - raise ApiValueError( - "Invalid host index. Must be 0 <= index < %s" - % len(self.settings["servers"]) - ) - _host = None - - for key, value in kwargs.items(): - if key not in self.params_map["all"]: - raise ApiTypeError( - "Got an unexpected parameter '%s'" - " to method `%s`" % (key, self.settings["operation_id"]) - ) - # only throw this nullable ApiValueError if _check_input_type - # is False, if _check_input_type==True we catch this case - # in self.__validate_inputs - if ( - key not in self.params_map["nullable"] - and value is None - and kwargs["_check_input_type"] is False - ): - raise ApiValueError( - "Value may not be None for non-nullable parameter `%s`" - " when calling `%s`" % (key, self.settings["operation_id"]) - ) - - for key in self.params_map["required"]: - if key not in kwargs.keys(): - raise ApiValueError( - "Missing the required parameter `%s` when calling " - "`%s`" % (key, self.settings["operation_id"]) - ) - - self.__validate_inputs(kwargs) - - params = self.__gather_params(kwargs) - - accept_headers_list = self.headers_map["accept"] - if accept_headers_list: - params["header"]["Accept"] = self.api_client.select_header_accept( - accept_headers_list - ) - - if kwargs.get("_content_type"): - params["header"]["Content-Type"] = kwargs["_content_type"] - else: - content_type_headers_list = self.headers_map["content_type"] - if content_type_headers_list: - if params["body"] != "": - content_types_list = self.api_client.select_header_content_type( - content_type_headers_list, - self.settings["http_method"], - params["body"], - ) - if content_types_list: - params["header"]["Content-Type"] = content_types_list - - return self.api_client.call_api( - self.settings["endpoint_path"], - self.settings["http_method"], - params["path"], - params["query"], - params["header"], - body=params["body"], - post_params=params["form"], - files=params["file"], - response_type=self.settings["response_type"], - auth_settings=self.settings["auth"], - async_req=kwargs["async_req"], - _check_type=kwargs["_check_return_type"], - _return_http_data_only=kwargs["_return_http_data_only"], - _preload_content=kwargs["_preload_content"], - _request_timeout=kwargs["_request_timeout"], - _host=_host, - _request_auths=kwargs["_request_auths"], - collection_formats=params["collection_format"], - ) diff --git a/libs/schematic/api-client-python/schematic_client/apis/__init__.py b/libs/schematic/api-client-python/schematic_client/apis/__init__.py deleted file mode 100644 index 91fca3862..000000000 --- a/libs/schematic/api-client-python/schematic_client/apis/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ -# flake8: noqa - -# Import all APIs into this package. -# If you have many APIs here with many many models used in each API this may -# raise a `RecursionError`. -# In order to avoid this, import only the API that you directly need like: -# -# from schematic_client.api.storage_api import StorageApi -# -# or import this package, but before doing it, use: -# -# import sys -# sys.setrecursionlimit(n) - -# Import APIs into API package: -from schematic_client.api.storage_api import StorageApi diff --git a/libs/schematic/api-client-python/schematic_client/configuration.py b/libs/schematic/api-client-python/schematic_client/configuration.py deleted file mode 100644 index 61ade8758..000000000 --- a/libs/schematic/api-client-python/schematic_client/configuration.py +++ /dev/null @@ -1,462 +0,0 @@ -""" - Schematic REST API - - No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501 - - The version of the OpenAPI document: 0.1.0 - Generated by: https://openapi-generator.tech -""" - -import copy -import logging -import multiprocessing -import sys -import urllib3 - -from http import client as http_client -from schematic_client.exceptions import ApiValueError - - -JSON_SCHEMA_VALIDATION_KEYWORDS = { - "multipleOf", - "maximum", - "exclusiveMaximum", - "minimum", - "exclusiveMinimum", - "maxLength", - "minLength", - "pattern", - "maxItems", - "minItems", -} - - -class Configuration(object): - """NOTE: This class is auto generated by OpenAPI Generator - - Ref: https://openapi-generator.tech - Do not edit the class manually. - - :param host: Base url - :param api_key: Dict to store API key(s). - Each entry in the dict specifies an API key. - The dict key is the name of the security scheme in the OAS specification. - The dict value is the API key secret. - :param api_key_prefix: Dict to store API prefix (e.g. Bearer) - The dict key is the name of the security scheme in the OAS specification. - The dict value is an API key prefix when generating the auth data. - :param username: Username for HTTP basic authentication - :param password: Password for HTTP basic authentication - :param discard_unknown_keys: Boolean value indicating whether to discard - unknown properties. A server may send a response that includes additional - properties that are not known by the client in the following scenarios: - 1. The OpenAPI document is incomplete, i.e. it does not match the server - implementation. - 2. The client was generated using an older version of the OpenAPI document - and the server has been upgraded since then. - If a schema in the OpenAPI document defines the additionalProperties attribute, - then all undeclared properties received by the server are injected into the - additional properties map. In that case, there are undeclared properties, and - nothing to discard. - :param disabled_client_side_validations (string): Comma-separated list of - JSON schema validation keywords to disable JSON schema structural validation - rules. The following keywords may be specified: multipleOf, maximum, - exclusiveMaximum, minimum, exclusiveMinimum, maxLength, minLength, pattern, - maxItems, minItems. - By default, the validation is performed for data generated locally by the client - and data received from the server, independent of any validation performed by - the server side. If the input data does not satisfy the JSON schema validation - rules specified in the OpenAPI document, an exception is raised. - If disabled_client_side_validations is set, structural validation is - disabled. This can be useful to troubleshoot data validation problem, such as - when the OpenAPI document validation rules do not match the actual API data - received by the server. - :param server_index: Index to servers configuration. - :param server_variables: Mapping with string values to replace variables in - templated server configuration. The validation of enums is performed for - variables with defined enum values before. - :param server_operation_index: Mapping from operation ID to an index to server - configuration. - :param server_operation_variables: Mapping from operation ID to a mapping with - string values to replace variables in templated server configuration. - The validation of enums is performed for variables with defined enum values before. - :param ssl_ca_cert: str - the path to a file of concatenated CA certificates - in PEM format - - """ - - _default = None - - def __init__( - self, - host=None, - api_key=None, - api_key_prefix=None, - access_token=None, - username=None, - password=None, - discard_unknown_keys=False, - disabled_client_side_validations="", - server_index=None, - server_variables=None, - server_operation_index=None, - server_operation_variables=None, - ssl_ca_cert=None, - ): - """Constructor""" - self._base_path = "http://localhost/api/v1" if host is None else host - """Default Base url - """ - self.server_index = 0 if server_index is None and host is None else server_index - self.server_operation_index = server_operation_index or {} - """Default server index - """ - self.server_variables = server_variables or {} - self.server_operation_variables = server_operation_variables or {} - """Default server variables - """ - self.temp_folder_path = None - """Temp file folder for downloading files - """ - # Authentication Settings - self.access_token = access_token - self.api_key = {} - if api_key: - self.api_key = api_key - """dict to store API key(s) - """ - self.api_key_prefix = {} - if api_key_prefix: - self.api_key_prefix = api_key_prefix - """dict to store API prefix (e.g. Bearer) - """ - self.refresh_api_key_hook = None - """function hook to refresh API key if expired - """ - self.username = username - """Username for HTTP basic authentication - """ - self.password = password - """Password for HTTP basic authentication - """ - self.discard_unknown_keys = discard_unknown_keys - self.disabled_client_side_validations = disabled_client_side_validations - self.logger = {} - """Logging Settings - """ - self.logger["package_logger"] = logging.getLogger("schematic_client") - self.logger["urllib3_logger"] = logging.getLogger("urllib3") - self.logger_format = "%(asctime)s %(levelname)s %(message)s" - """Log format - """ - self.logger_stream_handler = None - """Log stream handler - """ - self.logger_file_handler = None - """Log file handler - """ - self.logger_file = None - """Debug file location - """ - self.debug = False - """Debug switch - """ - - self.verify_ssl = True - """SSL/TLS verification - Set this to false to skip verifying SSL certificate when calling API - from https server. - """ - self.ssl_ca_cert = ssl_ca_cert - """Set this to customize the certificate file to verify the peer. - """ - self.cert_file = None - """client certificate file - """ - self.key_file = None - """client key file - """ - self.assert_hostname = None - """Set this to True/False to enable/disable SSL hostname verification. - """ - - self.connection_pool_maxsize = multiprocessing.cpu_count() * 5 - """urllib3 connection pool's maximum number of connections saved - per pool. urllib3 uses 1 connection as default value, but this is - not the best value when you are making a lot of possibly parallel - requests to the same host, which is often the case here. - cpu_count * 5 is used as default value to increase performance. - """ - - self.proxy = None - """Proxy URL - """ - self.no_proxy = None - """bypass proxy for host in the no_proxy list. - """ - self.proxy_headers = None - """Proxy headers - """ - self.safe_chars_for_path_param = "" - """Safe chars for path_param - """ - self.retries = None - """Adding retries to override urllib3 default value 3 - """ - # Enable client side validation - self.client_side_validation = True - - # Options to pass down to the underlying urllib3 socket - self.socket_options = None - - def __deepcopy__(self, memo): - cls = self.__class__ - result = cls.__new__(cls) - memo[id(self)] = result - for k, v in self.__dict__.items(): - if k not in ("logger", "logger_file_handler"): - setattr(result, k, copy.deepcopy(v, memo)) - # shallow copy of loggers - result.logger = copy.copy(self.logger) - # use setters to configure loggers - result.logger_file = self.logger_file - result.debug = self.debug - return result - - def __setattr__(self, name, value): - object.__setattr__(self, name, value) - if name == "disabled_client_side_validations": - s = set(filter(None, value.split(","))) - for v in s: - if v not in JSON_SCHEMA_VALIDATION_KEYWORDS: - raise ApiValueError("Invalid keyword: '{0}''".format(v)) - self._disabled_client_side_validations = s - - @classmethod - def set_default(cls, default): - """Set default instance of configuration. - - It stores default configuration, which can be - returned by get_default_copy method. - - :param default: object of Configuration - """ - cls._default = copy.deepcopy(default) - - @classmethod - def get_default_copy(cls): - """Return new instance of configuration. - - This method returns newly created, based on default constructor, - object of Configuration class or returns a copy of default - configuration passed by the set_default method. - - :return: The configuration object. - """ - if cls._default is not None: - return copy.deepcopy(cls._default) - return Configuration() - - @property - def logger_file(self): - """The logger file. - - If the logger_file is None, then add stream handler and remove file - handler. Otherwise, add file handler and remove stream handler. - - :param value: The logger_file path. - :type: str - """ - return self.__logger_file - - @logger_file.setter - def logger_file(self, value): - """The logger file. - - If the logger_file is None, then add stream handler and remove file - handler. Otherwise, add file handler and remove stream handler. - - :param value: The logger_file path. - :type: str - """ - self.__logger_file = value - if self.__logger_file: - # If set logging file, - # then add file handler and remove stream handler. - self.logger_file_handler = logging.FileHandler(self.__logger_file) - self.logger_file_handler.setFormatter(self.logger_formatter) - for _, logger in self.logger.items(): - logger.addHandler(self.logger_file_handler) - - @property - def debug(self): - """Debug status - - :param value: The debug status, True or False. - :type: bool - """ - return self.__debug - - @debug.setter - def debug(self, value): - """Debug status - - :param value: The debug status, True or False. - :type: bool - """ - self.__debug = value - if self.__debug: - # if debug status is True, turn on debug logging - for _, logger in self.logger.items(): - logger.setLevel(logging.DEBUG) - # turn on http_client debug - http_client.HTTPConnection.debuglevel = 1 - else: - # if debug status is False, turn off debug logging, - # setting log level to default `logging.WARNING` - for _, logger in self.logger.items(): - logger.setLevel(logging.WARNING) - # turn off http_client debug - http_client.HTTPConnection.debuglevel = 0 - - @property - def logger_format(self): - """The logger format. - - The logger_formatter will be updated when sets logger_format. - - :param value: The format string. - :type: str - """ - return self.__logger_format - - @logger_format.setter - def logger_format(self, value): - """The logger format. - - The logger_formatter will be updated when sets logger_format. - - :param value: The format string. - :type: str - """ - self.__logger_format = value - self.logger_formatter = logging.Formatter(self.__logger_format) - - def get_api_key_with_prefix(self, identifier, alias=None): - """Gets API key (with prefix if set). - - :param identifier: The identifier of apiKey. - :param alias: The alternative identifier of apiKey. - :return: The token for api key authentication. - """ - if self.refresh_api_key_hook is not None: - self.refresh_api_key_hook(self) - key = self.api_key.get( - identifier, self.api_key.get(alias) if alias is not None else None - ) - if key: - prefix = self.api_key_prefix.get(identifier) - if prefix: - return "%s %s" % (prefix, key) - else: - return key - - def get_basic_auth_token(self): - """Gets HTTP basic authentication header (string). - - :return: The token for basic HTTP authentication. - """ - username = "" - if self.username is not None: - username = self.username - password = "" - if self.password is not None: - password = self.password - return urllib3.util.make_headers(basic_auth=username + ":" + password).get( - "authorization" - ) - - def auth_settings(self): - """Gets Auth Settings dict for api client. - - :return: The Auth Settings information dict. - """ - auth = {} - return auth - - def to_debug_report(self): - """Gets the essential information for debugging. - - :return: The report for debugging. - """ - return ( - "Python SDK Debug Report:\n" - "OS: {env}\n" - "Python Version: {pyversion}\n" - "Version of the API: 0.1.0\n" - "SDK Package Version: 1.0.0".format(env=sys.platform, pyversion=sys.version) - ) - - def get_host_settings(self): - """Gets an array of host settings - - :return: An array of host settings - """ - return [ - { - "url": "http://localhost/api/v1", - "description": "No description provided", - } - ] - - def get_host_from_settings(self, index, variables=None, servers=None): - """Gets host URL based on the index and variables - :param index: array index of the host settings - :param variables: hash of variable and the corresponding value - :param servers: an array of host settings or None - :return: URL based on host settings - """ - if index is None: - return self._base_path - - variables = {} if variables is None else variables - servers = self.get_host_settings() if servers is None else servers - - try: - server = servers[index] - except IndexError: - raise ValueError( - "Invalid index {0} when selecting the host settings. " - "Must be less than {1}".format(index, len(servers)) - ) - - url = server["url"] - - # go through variables and replace placeholders - for variable_name, variable in server.get("variables", {}).items(): - used_value = variables.get(variable_name, variable["default_value"]) - - if "enum_values" in variable and used_value not in variable["enum_values"]: - raise ValueError( - "The variable `{0}` in the host URL has invalid value " - "{1}. Must be {2}.".format( - variable_name, variables[variable_name], variable["enum_values"] - ) - ) - - url = url.replace("{" + variable_name + "}", used_value) - - return url - - @property - def host(self): - """Return generated host.""" - return self.get_host_from_settings( - self.server_index, variables=self.server_variables - ) - - @host.setter - def host(self, value): - """Fix base path.""" - self._base_path = value - self.server_index = None diff --git a/libs/schematic/api-client-python/schematic_client/exceptions.py b/libs/schematic/api-client-python/schematic_client/exceptions.py deleted file mode 100644 index 325134dd3..000000000 --- a/libs/schematic/api-client-python/schematic_client/exceptions.py +++ /dev/null @@ -1,157 +0,0 @@ -""" - Schematic REST API - - No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501 - - The version of the OpenAPI document: 0.1.0 - Generated by: https://openapi-generator.tech -""" - - -class OpenApiException(Exception): - """The base exception class for all OpenAPIExceptions""" - - -class ApiTypeError(OpenApiException, TypeError): - def __init__(self, msg, path_to_item=None, valid_classes=None, key_type=None): - """Raises an exception for TypeErrors - - Args: - msg (str): the exception message - - Keyword Args: - path_to_item (list): a list of keys an indices to get to the - current_item - None if unset - valid_classes (tuple): the primitive classes that current item - should be an instance of - None if unset - key_type (bool): False if our value is a value in a dict - True if it is a key in a dict - False if our item is an item in a list - None if unset - """ - self.path_to_item = path_to_item - self.valid_classes = valid_classes - self.key_type = key_type - full_msg = msg - if path_to_item: - full_msg = "{0} at {1}".format(msg, render_path(path_to_item)) - super(ApiTypeError, self).__init__(full_msg) - - -class ApiValueError(OpenApiException, ValueError): - def __init__(self, msg, path_to_item=None): - """ - Args: - msg (str): the exception message - - Keyword Args: - path_to_item (list) the path to the exception in the - received_data dict. None if unset - """ - - self.path_to_item = path_to_item - full_msg = msg - if path_to_item: - full_msg = "{0} at {1}".format(msg, render_path(path_to_item)) - super(ApiValueError, self).__init__(full_msg) - - -class ApiAttributeError(OpenApiException, AttributeError): - def __init__(self, msg, path_to_item=None): - """ - Raised when an attribute reference or assignment fails. - - Args: - msg (str): the exception message - - Keyword Args: - path_to_item (None/list) the path to the exception in the - received_data dict - """ - self.path_to_item = path_to_item - full_msg = msg - if path_to_item: - full_msg = "{0} at {1}".format(msg, render_path(path_to_item)) - super(ApiAttributeError, self).__init__(full_msg) - - -class ApiKeyError(OpenApiException, KeyError): - def __init__(self, msg, path_to_item=None): - """ - Args: - msg (str): the exception message - - Keyword Args: - path_to_item (None/list) the path to the exception in the - received_data dict - """ - self.path_to_item = path_to_item - full_msg = msg - if path_to_item: - full_msg = "{0} at {1}".format(msg, render_path(path_to_item)) - super(ApiKeyError, self).__init__(full_msg) - - -class ApiException(OpenApiException): - - def __init__(self, status=None, reason=None, http_resp=None): - if http_resp: - self.status = http_resp.status - self.reason = http_resp.reason - self.body = http_resp.data - self.headers = http_resp.getheaders() - else: - self.status = status - self.reason = reason - self.body = None - self.headers = None - - def __str__(self): - """Custom error messages for exception""" - error_message = "Status Code: {0}\n" "Reason: {1}\n".format( - self.status, self.reason - ) - if self.headers: - error_message += "HTTP response headers: {0}\n".format(self.headers) - - if self.body: - error_message += "HTTP response body: {0}\n".format(self.body) - - return error_message - - -class NotFoundException(ApiException): - - def __init__(self, status=None, reason=None, http_resp=None): - super(NotFoundException, self).__init__(status, reason, http_resp) - - -class UnauthorizedException(ApiException): - - def __init__(self, status=None, reason=None, http_resp=None): - super(UnauthorizedException, self).__init__(status, reason, http_resp) - - -class ForbiddenException(ApiException): - - def __init__(self, status=None, reason=None, http_resp=None): - super(ForbiddenException, self).__init__(status, reason, http_resp) - - -class ServiceException(ApiException): - - def __init__(self, status=None, reason=None, http_resp=None): - super(ServiceException, self).__init__(status, reason, http_resp) - - -def render_path(path_to_item): - """Returns a string representation of a path""" - result = "" - for pth in path_to_item: - if isinstance(pth, int): - result += "[{0}]".format(pth) - else: - result += "['{0}']".format(pth) - return result diff --git a/libs/schematic/api-client-python/schematic_client/model/__init__.py b/libs/schematic/api-client-python/schematic_client/model/__init__.py deleted file mode 100644 index 494c0d3a3..000000000 --- a/libs/schematic/api-client-python/schematic_client/model/__init__.py +++ /dev/null @@ -1,5 +0,0 @@ -# we can not import model classes here because that would create a circular -# reference which would not work in python2 -# do not import all models into this module because that uses a lot of memory and stack frames -# if you need the ability to import all models from one package, import them with -# from schematic_client.models import ModelA, ModelB diff --git a/libs/schematic/api-client-python/schematic_client/model/basic_error.py b/libs/schematic/api-client-python/schematic_client/model/basic_error.py deleted file mode 100644 index 7969affc3..000000000 --- a/libs/schematic/api-client-python/schematic_client/model/basic_error.py +++ /dev/null @@ -1,297 +0,0 @@ -""" - Schematic REST API - - No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501 - - The version of the OpenAPI document: 0.1.0 - Generated by: https://openapi-generator.tech -""" - -import re # noqa: F401 -import sys # noqa: F401 - -from schematic_client.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel, -) -from schematic_client.exceptions import ApiAttributeError - - -class BasicError(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = {} - - validations = {} - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return ( - bool, - date, - datetime, - dict, - float, - int, - list, - str, - none_type, - ) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - "title": (str,), # noqa: E501 - "status": (int,), # noqa: E501 - "detail": (str,), # noqa: E501 - "type": (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - attribute_map = { - "title": "title", # noqa: E501 - "status": "status", # noqa: E501 - "detail": "detail", # noqa: E501 - "type": "type", # noqa: E501 - } - - read_only_vars = {} - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, title, status, *args, **kwargs): # noqa: E501 - """BasicError - a model defined in OpenAPI - - Args: - title (str): A human readable documentation for the problem type - status (int): The HTTP status code - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - detail (str): A human readable explanation specific to this occurrence of the problem. [optional] # noqa: E501 - type (str): An absolute URI that identifies the problem type. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop("_check_type", True) - _spec_property_naming = kwargs.pop("_spec_property_naming", True) - _path_to_item = kwargs.pop("_path_to_item", ()) - _configuration = kwargs.pop("_configuration", None) - _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - for arg in args: - if isinstance(arg, dict): - kwargs.update(arg) - else: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." - % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.title = title - self.status = status - for var_name, var_value in kwargs.items(): - if ( - var_name not in self.attribute_map - and self._configuration is not None - and self._configuration.discard_unknown_keys - and self.additional_properties_type is None - ): - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set( - [ - "_data_store", - "_check_type", - "_spec_property_naming", - "_path_to_item", - "_configuration", - "_visited_composed_classes", - ] - ) - - @convert_js_args_to_python_args - def __init__(self, title, status, *args, **kwargs): # noqa: E501 - """BasicError - a model defined in OpenAPI - - Args: - title (str): A human readable documentation for the problem type - status (int): The HTTP status code - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - detail (str): A human readable explanation specific to this occurrence of the problem. [optional] # noqa: E501 - type (str): An absolute URI that identifies the problem type. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop("_check_type", True) - _spec_property_naming = kwargs.pop("_spec_property_naming", False) - _path_to_item = kwargs.pop("_path_to_item", ()) - _configuration = kwargs.pop("_configuration", None) - _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) - - if args: - for arg in args: - if isinstance(arg, dict): - kwargs.update(arg) - else: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." - % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.title = title - self.status = status - for var_name, var_value in kwargs.items(): - if ( - var_name not in self.attribute_map - and self._configuration is not None - and self._configuration.discard_unknown_keys - and self.additional_properties_type is None - ): - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError( - f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes." - ) diff --git a/libs/schematic/api-client-python/schematic_client/model/dataset.py b/libs/schematic/api-client-python/schematic_client/model/dataset.py deleted file mode 100644 index 590e3bd39..000000000 --- a/libs/schematic/api-client-python/schematic_client/model/dataset.py +++ /dev/null @@ -1,283 +0,0 @@ -""" - Schematic REST API - - No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501 - - The version of the OpenAPI document: 0.1.0 - Generated by: https://openapi-generator.tech -""" - -import re # noqa: F401 -import sys # noqa: F401 - -from schematic_client.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel, -) -from schematic_client.exceptions import ApiAttributeError - - -class Dataset(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = {} - - validations = {} - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return ( - bool, - date, - datetime, - dict, - float, - int, - list, - str, - none_type, - ) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - "name": (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - attribute_map = { - "name": "name", # noqa: E501 - } - - read_only_vars = {} - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, name, *args, **kwargs): # noqa: E501 - """Dataset - a model defined in OpenAPI - - Args: - name (str): The name of the dataset. - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - """ - - _check_type = kwargs.pop("_check_type", True) - _spec_property_naming = kwargs.pop("_spec_property_naming", True) - _path_to_item = kwargs.pop("_path_to_item", ()) - _configuration = kwargs.pop("_configuration", None) - _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - for arg in args: - if isinstance(arg, dict): - kwargs.update(arg) - else: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." - % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.name = name - for var_name, var_value in kwargs.items(): - if ( - var_name not in self.attribute_map - and self._configuration is not None - and self._configuration.discard_unknown_keys - and self.additional_properties_type is None - ): - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set( - [ - "_data_store", - "_check_type", - "_spec_property_naming", - "_path_to_item", - "_configuration", - "_visited_composed_classes", - ] - ) - - @convert_js_args_to_python_args - def __init__(self, name, *args, **kwargs): # noqa: E501 - """Dataset - a model defined in OpenAPI - - Args: - name (str): The name of the dataset. - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - """ - - _check_type = kwargs.pop("_check_type", True) - _spec_property_naming = kwargs.pop("_spec_property_naming", False) - _path_to_item = kwargs.pop("_path_to_item", ()) - _configuration = kwargs.pop("_configuration", None) - _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) - - if args: - for arg in args: - if isinstance(arg, dict): - kwargs.update(arg) - else: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." - % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.name = name - for var_name, var_value in kwargs.items(): - if ( - var_name not in self.attribute_map - and self._configuration is not None - and self._configuration.discard_unknown_keys - and self.additional_properties_type is None - ): - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError( - f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes." - ) diff --git a/libs/schematic/api-client-python/schematic_client/model/datasets_page.py b/libs/schematic/api-client-python/schematic_client/model/datasets_page.py deleted file mode 100644 index a2a5bbcf5..000000000 --- a/libs/schematic/api-client-python/schematic_client/model/datasets_page.py +++ /dev/null @@ -1,360 +0,0 @@ -""" - Schematic REST API - - No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501 - - The version of the OpenAPI document: 0.1.0 - Generated by: https://openapi-generator.tech -""" - -import re # noqa: F401 -import sys # noqa: F401 - -from schematic_client.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel, -) -from schematic_client.exceptions import ApiAttributeError - - -def lazy_import(): - from schematic_client.model.dataset import Dataset - from schematic_client.model.datasets_page_all_of import DatasetsPageAllOf - from schematic_client.model.page_metadata import PageMetadata - - globals()["Dataset"] = Dataset - globals()["DatasetsPageAllOf"] = DatasetsPageAllOf - globals()["PageMetadata"] = PageMetadata - - -class DatasetsPage(ModelComposed): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = {} - - validations = {} - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return ( - bool, - date, - datetime, - dict, - float, - int, - list, - str, - none_type, - ) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - "number": (int,), # noqa: E501 - "size": (int,), # noqa: E501 - "total_elements": (int,), # noqa: E501 - "total_pages": (int,), # noqa: E501 - "has_next": (bool,), # noqa: E501 - "has_previous": (bool,), # noqa: E501 - "datasets": ([Dataset],), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - attribute_map = { - "number": "number", # noqa: E501 - "size": "size", # noqa: E501 - "total_elements": "totalElements", # noqa: E501 - "total_pages": "totalPages", # noqa: E501 - "has_next": "hasNext", # noqa: E501 - "has_previous": "hasPrevious", # noqa: E501 - "datasets": "datasets", # noqa: E501 - } - - read_only_vars = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """DatasetsPage - a model defined in OpenAPI - - Keyword Args: - number (int): The page number. - size (int): The number of items in a single page. - total_elements (int): Total number of elements in the result set. - total_pages (int): Total number of pages in the result set. - has_next (bool): Returns if there is a next page. - has_previous (bool): Returns if there is a previous page. - datasets ([Dataset]): A list of datasets. - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - """ - - _check_type = kwargs.pop("_check_type", True) - _spec_property_naming = kwargs.pop("_spec_property_naming", False) - _path_to_item = kwargs.pop("_path_to_item", ()) - _configuration = kwargs.pop("_configuration", None) - _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - for arg in args: - if isinstance(arg, dict): - kwargs.update(arg) - else: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." - % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - constant_args = { - "_check_type": _check_type, - "_path_to_item": _path_to_item, - "_spec_property_naming": _spec_property_naming, - "_configuration": _configuration, - "_visited_composed_classes": self._visited_composed_classes, - } - composed_info = validate_get_composed_info(constant_args, kwargs, self) - self._composed_instances = composed_info[0] - self._var_name_to_model_instances = composed_info[1] - self._additional_properties_model_instances = composed_info[2] - discarded_args = composed_info[3] - - for var_name, var_value in kwargs.items(): - if ( - var_name in discarded_args - and self._configuration is not None - and self._configuration.discard_unknown_keys - and self._additional_properties_model_instances - ): - # discard variable. - continue - setattr(self, var_name, var_value) - - return self - - required_properties = set( - [ - "_data_store", - "_check_type", - "_spec_property_naming", - "_path_to_item", - "_configuration", - "_visited_composed_classes", - "_composed_instances", - "_var_name_to_model_instances", - "_additional_properties_model_instances", - ] - ) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """DatasetsPage - a model defined in OpenAPI - - Keyword Args: - number (int): The page number. - size (int): The number of items in a single page. - total_elements (int): Total number of elements in the result set. - total_pages (int): Total number of pages in the result set. - has_next (bool): Returns if there is a next page. - has_previous (bool): Returns if there is a previous page. - datasets ([Dataset]): A list of datasets. - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - """ - - _check_type = kwargs.pop("_check_type", True) - _spec_property_naming = kwargs.pop("_spec_property_naming", False) - _path_to_item = kwargs.pop("_path_to_item", ()) - _configuration = kwargs.pop("_configuration", None) - _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) - - if args: - for arg in args: - if isinstance(arg, dict): - kwargs.update(arg) - else: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." - % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - constant_args = { - "_check_type": _check_type, - "_path_to_item": _path_to_item, - "_spec_property_naming": _spec_property_naming, - "_configuration": _configuration, - "_visited_composed_classes": self._visited_composed_classes, - } - composed_info = validate_get_composed_info(constant_args, kwargs, self) - self._composed_instances = composed_info[0] - self._var_name_to_model_instances = composed_info[1] - self._additional_properties_model_instances = composed_info[2] - discarded_args = composed_info[3] - - for var_name, var_value in kwargs.items(): - if ( - var_name in discarded_args - and self._configuration is not None - and self._configuration.discard_unknown_keys - and self._additional_properties_model_instances - ): - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError( - f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes." - ) - - @cached_property - def _composed_schemas(): - # we need this here to make our import statements work - # we must store _composed_schemas in here so the code is only run - # when we invoke this method. If we kept this at the class - # level we would get an error because the class level - # code would be run when this module is imported, and these composed - # classes don't exist yet because their module has not finished - # loading - lazy_import() - return { - "anyOf": [], - "allOf": [ - DatasetsPageAllOf, - PageMetadata, - ], - "oneOf": [], - } diff --git a/libs/schematic/api-client-python/schematic_client/model/datasets_page_all_of.py b/libs/schematic/api-client-python/schematic_client/model/datasets_page_all_of.py deleted file mode 100644 index a377436f7..000000000 --- a/libs/schematic/api-client-python/schematic_client/model/datasets_page_all_of.py +++ /dev/null @@ -1,291 +0,0 @@ -""" - Schematic REST API - - No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501 - - The version of the OpenAPI document: 0.1.0 - Generated by: https://openapi-generator.tech -""" - -import re # noqa: F401 -import sys # noqa: F401 - -from schematic_client.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel, -) -from schematic_client.exceptions import ApiAttributeError - - -def lazy_import(): - from schematic_client.model.dataset import Dataset - - globals()["Dataset"] = Dataset - - -class DatasetsPageAllOf(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = {} - - validations = {} - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return ( - bool, - date, - datetime, - dict, - float, - int, - list, - str, - none_type, - ) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - "datasets": ([Dataset],), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - attribute_map = { - "datasets": "datasets", # noqa: E501 - } - - read_only_vars = {} - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, datasets, *args, **kwargs): # noqa: E501 - """DatasetsPageAllOf - a model defined in OpenAPI - - Args: - datasets ([Dataset]): A list of datasets. - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - """ - - _check_type = kwargs.pop("_check_type", True) - _spec_property_naming = kwargs.pop("_spec_property_naming", True) - _path_to_item = kwargs.pop("_path_to_item", ()) - _configuration = kwargs.pop("_configuration", None) - _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - for arg in args: - if isinstance(arg, dict): - kwargs.update(arg) - else: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." - % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.datasets = datasets - for var_name, var_value in kwargs.items(): - if ( - var_name not in self.attribute_map - and self._configuration is not None - and self._configuration.discard_unknown_keys - and self.additional_properties_type is None - ): - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set( - [ - "_data_store", - "_check_type", - "_spec_property_naming", - "_path_to_item", - "_configuration", - "_visited_composed_classes", - ] - ) - - @convert_js_args_to_python_args - def __init__(self, datasets, *args, **kwargs): # noqa: E501 - """DatasetsPageAllOf - a model defined in OpenAPI - - Args: - datasets ([Dataset]): A list of datasets. - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - """ - - _check_type = kwargs.pop("_check_type", True) - _spec_property_naming = kwargs.pop("_spec_property_naming", False) - _path_to_item = kwargs.pop("_path_to_item", ()) - _configuration = kwargs.pop("_configuration", None) - _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) - - if args: - for arg in args: - if isinstance(arg, dict): - kwargs.update(arg) - else: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." - % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.datasets = datasets - for var_name, var_value in kwargs.items(): - if ( - var_name not in self.attribute_map - and self._configuration is not None - and self._configuration.discard_unknown_keys - and self.additional_properties_type is None - ): - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError( - f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes." - ) diff --git a/libs/schematic/api-client-python/schematic_client/model/page_metadata.py b/libs/schematic/api-client-python/schematic_client/model/page_metadata.py deleted file mode 100644 index 378315cad..000000000 --- a/libs/schematic/api-client-python/schematic_client/model/page_metadata.py +++ /dev/null @@ -1,333 +0,0 @@ -""" - Schematic REST API - - No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501 - - The version of the OpenAPI document: 0.1.0 - Generated by: https://openapi-generator.tech -""" - -import re # noqa: F401 -import sys # noqa: F401 - -from schematic_client.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel, -) -from schematic_client.exceptions import ApiAttributeError - - -class PageMetadata(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = {} - - validations = {} - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return ( - bool, - date, - datetime, - dict, - float, - int, - list, - str, - none_type, - ) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - "number": (int,), # noqa: E501 - "size": (int,), # noqa: E501 - "total_elements": (int,), # noqa: E501 - "total_pages": (int,), # noqa: E501 - "has_next": (bool,), # noqa: E501 - "has_previous": (bool,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - attribute_map = { - "number": "number", # noqa: E501 - "size": "size", # noqa: E501 - "total_elements": "totalElements", # noqa: E501 - "total_pages": "totalPages", # noqa: E501 - "has_next": "hasNext", # noqa: E501 - "has_previous": "hasPrevious", # noqa: E501 - } - - read_only_vars = {} - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data( - cls, - number, - size, - total_elements, - total_pages, - has_next, - has_previous, - *args, - **kwargs, - ): # noqa: E501 - """PageMetadata - a model defined in OpenAPI - - Args: - number (int): The page number. - size (int): The number of items in a single page. - total_elements (int): Total number of elements in the result set. - total_pages (int): Total number of pages in the result set. - has_next (bool): Returns if there is a next page. - has_previous (bool): Returns if there is a previous page. - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - """ - - _check_type = kwargs.pop("_check_type", True) - _spec_property_naming = kwargs.pop("_spec_property_naming", True) - _path_to_item = kwargs.pop("_path_to_item", ()) - _configuration = kwargs.pop("_configuration", None) - _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - for arg in args: - if isinstance(arg, dict): - kwargs.update(arg) - else: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." - % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.number = number - self.size = size - self.total_elements = total_elements - self.total_pages = total_pages - self.has_next = has_next - self.has_previous = has_previous - for var_name, var_value in kwargs.items(): - if ( - var_name not in self.attribute_map - and self._configuration is not None - and self._configuration.discard_unknown_keys - and self.additional_properties_type is None - ): - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set( - [ - "_data_store", - "_check_type", - "_spec_property_naming", - "_path_to_item", - "_configuration", - "_visited_composed_classes", - ] - ) - - @convert_js_args_to_python_args - def __init__( - self, - number, - size, - total_elements, - total_pages, - has_next, - has_previous, - *args, - **kwargs, - ): # noqa: E501 - """PageMetadata - a model defined in OpenAPI - - Args: - number (int): The page number. - size (int): The number of items in a single page. - total_elements (int): Total number of elements in the result set. - total_pages (int): Total number of pages in the result set. - has_next (bool): Returns if there is a next page. - has_previous (bool): Returns if there is a previous page. - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - """ - - _check_type = kwargs.pop("_check_type", True) - _spec_property_naming = kwargs.pop("_spec_property_naming", False) - _path_to_item = kwargs.pop("_path_to_item", ()) - _configuration = kwargs.pop("_configuration", None) - _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) - - if args: - for arg in args: - if isinstance(arg, dict): - kwargs.update(arg) - else: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." - % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.number = number - self.size = size - self.total_elements = total_elements - self.total_pages = total_pages - self.has_next = has_next - self.has_previous = has_previous - for var_name, var_value in kwargs.items(): - if ( - var_name not in self.attribute_map - and self._configuration is not None - and self._configuration.discard_unknown_keys - and self.additional_properties_type is None - ): - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError( - f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes." - ) diff --git a/libs/schematic/api-client-python/schematic_client/model_utils.py b/libs/schematic/api-client-python/schematic_client/model_utils.py deleted file mode 100644 index 4ed3db06e..000000000 --- a/libs/schematic/api-client-python/schematic_client/model_utils.py +++ /dev/null @@ -1,2090 +0,0 @@ -""" - Schematic REST API - - No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501 - - The version of the OpenAPI document: 0.1.0 - Generated by: https://openapi-generator.tech -""" - -from datetime import date, datetime # noqa: F401 -from copy import deepcopy -import inspect -import io -import os -import pprint -import re -import tempfile -import uuid - -from dateutil.parser import parse - -from schematic_client.exceptions import ( - ApiKeyError, - ApiAttributeError, - ApiTypeError, - ApiValueError, -) - -none_type = type(None) -file_type = io.IOBase - - -def convert_js_args_to_python_args(fn): - from functools import wraps - - @wraps(fn) - def wrapped_init(_self, *args, **kwargs): - """ - An attribute named `self` received from the api will conflicts with the reserved `self` - parameter of a class method. During generation, `self` attributes are mapped - to `_self` in models. Here, we name `_self` instead of `self` to avoid conflicts. - """ - spec_property_naming = kwargs.get("_spec_property_naming", False) - if spec_property_naming: - kwargs = change_keys_js_to_python( - kwargs, _self if isinstance(_self, type) else _self.__class__ - ) - return fn(_self, *args, **kwargs) - - return wrapped_init - - -class cached_property(object): - # this caches the result of the function call for fn with no inputs - # use this as a decorator on function methods that you want converted - # into cached properties - result_key = "_results" - - def __init__(self, fn): - self._fn = fn - - def __get__(self, instance, cls=None): - if self.result_key in vars(self): - return vars(self)[self.result_key] - else: - result = self._fn() - setattr(self, self.result_key, result) - return result - - -PRIMITIVE_TYPES = (list, float, int, bool, datetime, date, str, file_type) - - -def allows_single_value_input(cls): - """ - This function returns True if the input composed schema model or any - descendant model allows a value only input - This is true for cases where oneOf contains items like: - oneOf: - - float - - NumberWithValidation - - StringEnum - - ArrayModel - - null - TODO: lru_cache this - """ - if issubclass(cls, ModelSimple) or cls in PRIMITIVE_TYPES: - return True - elif issubclass(cls, ModelComposed): - if not cls._composed_schemas["oneOf"]: - return False - return any(allows_single_value_input(c) for c in cls._composed_schemas["oneOf"]) - return False - - -def composed_model_input_classes(cls): - """ - This function returns a list of the possible models that can be accepted as - inputs. - TODO: lru_cache this - """ - if issubclass(cls, ModelSimple) or cls in PRIMITIVE_TYPES: - return [cls] - elif issubclass(cls, ModelNormal): - if cls.discriminator is None: - return [cls] - else: - return get_discriminated_classes(cls) - elif issubclass(cls, ModelComposed): - if not cls._composed_schemas["oneOf"]: - return [] - if cls.discriminator is None: - input_classes = [] - for c in cls._composed_schemas["oneOf"]: - input_classes.extend(composed_model_input_classes(c)) - return input_classes - else: - return get_discriminated_classes(cls) - return [] - - -class OpenApiModel(object): - """The base class for all OpenAPIModels""" - - def set_attribute(self, name, value): - # this is only used to set properties on self - - path_to_item = [] - if self._path_to_item: - path_to_item.extend(self._path_to_item) - path_to_item.append(name) - - if name in self.openapi_types: - required_types_mixed = self.openapi_types[name] - elif self.additional_properties_type is None: - raise ApiAttributeError( - "{0} has no attribute '{1}'".format(type(self).__name__, name), - path_to_item, - ) - elif self.additional_properties_type is not None: - required_types_mixed = self.additional_properties_type - - if get_simple_class(name) != str: - error_msg = type_error_message( - var_name=name, var_value=name, valid_classes=(str,), key_type=True - ) - raise ApiTypeError( - error_msg, - path_to_item=path_to_item, - valid_classes=(str,), - key_type=True, - ) - - if self._check_type: - value = validate_and_convert_types( - value, - required_types_mixed, - path_to_item, - self._spec_property_naming, - self._check_type, - configuration=self._configuration, - ) - if (name,) in self.allowed_values: - check_allowed_values(self.allowed_values, (name,), value) - if (name,) in self.validations: - check_validations(self.validations, (name,), value, self._configuration) - self.__dict__["_data_store"][name] = value - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other - - def __setattr__(self, attr, value): - """set the value of an attribute using dot notation: `instance.attr = val`""" - self[attr] = value - - def __getattr__(self, attr): - """get the value of an attribute using dot notation: `instance.attr`""" - return self.__getitem__(attr) - - def __copy__(self): - cls = self.__class__ - if self.get("_spec_property_naming", False): - return cls._new_from_openapi_data(**self.__dict__) - else: - return cls.__new__(cls, **self.__dict__) - - def __deepcopy__(self, memo): - cls = self.__class__ - - if self.get("_spec_property_naming", False): - new_inst = cls._new_from_openapi_data() - else: - new_inst = cls.__new__(cls, **self.__dict__) - - for k, v in self.__dict__.items(): - setattr(new_inst, k, deepcopy(v, memo)) - return new_inst - - def __new__(cls, *args, **kwargs): - # this function uses the discriminator to - # pick a new schema/class to instantiate because a discriminator - # propertyName value was passed in - - if len(args) == 1: - arg = args[0] - if arg is None and is_type_nullable(cls): - # The input data is the 'null' value and the type is nullable. - return None - - if issubclass(cls, ModelComposed) and allows_single_value_input(cls): - model_kwargs = {} - oneof_instance = get_oneof_instance( - cls, model_kwargs, kwargs, model_arg=arg - ) - return oneof_instance - - visited_composed_classes = kwargs.get("_visited_composed_classes", ()) - if cls.discriminator is None or cls in visited_composed_classes: - # Use case 1: this openapi schema (cls) does not have a discriminator - # Use case 2: we have already visited this class before and are sure that we - # want to instantiate it this time. We have visited this class deserializing - # a payload with a discriminator. During that process we traveled through - # this class but did not make an instance of it. Now we are making an - # instance of a composed class which contains cls in it, so this time make an instance of cls. - # - # Here's an example of use case 2: If Animal has a discriminator - # petType and we pass in "Dog", and the class Dog - # allOf includes Animal, we move through Animal - # once using the discriminator, and pick Dog. - # Then in the composed schema dog Dog, we will make an instance of the - # Animal class (because Dal has allOf: Animal) but this time we won't travel - # through Animal's discriminator because we passed in - # _visited_composed_classes = (Animal,) - - return super(OpenApiModel, cls).__new__(cls) - - # Get the name and value of the discriminator property. - # The discriminator name is obtained from the discriminator meta-data - # and the discriminator value is obtained from the input data. - discr_propertyname_py = list(cls.discriminator.keys())[0] - discr_propertyname_js = cls.attribute_map[discr_propertyname_py] - if discr_propertyname_js in kwargs: - discr_value = kwargs[discr_propertyname_js] - elif discr_propertyname_py in kwargs: - discr_value = kwargs[discr_propertyname_py] - else: - # The input data does not contain the discriminator property. - path_to_item = kwargs.get("_path_to_item", ()) - raise ApiValueError( - "Cannot deserialize input data due to missing discriminator. " - "The discriminator property '%s' is missing at path: %s" - % (discr_propertyname_js, path_to_item) - ) - - # Implementation note: the last argument to get_discriminator_class - # is a list of visited classes. get_discriminator_class may recursively - # call itself and update the list of visited classes, and the initial - # value must be an empty list. Hence not using 'visited_composed_classes' - new_cls = get_discriminator_class(cls, discr_propertyname_py, discr_value, []) - if new_cls is None: - path_to_item = kwargs.get("_path_to_item", ()) - disc_prop_value = kwargs.get( - discr_propertyname_js, kwargs.get(discr_propertyname_py) - ) - raise ApiValueError( - "Cannot deserialize input data due to invalid discriminator " - "value. The OpenAPI document has no mapping for discriminator " - "property '%s'='%s' at path: %s" - % (discr_propertyname_js, disc_prop_value, path_to_item) - ) - - if new_cls in visited_composed_classes: - # if we are making an instance of a composed schema Descendent - # which allOf includes Ancestor, then Ancestor contains - # a discriminator that includes Descendent. - # So if we make an instance of Descendent, we have to make an - # instance of Ancestor to hold the allOf properties. - # This code detects that use case and makes the instance of Ancestor - # For example: - # When making an instance of Dog, _visited_composed_classes = (Dog,) - # then we make an instance of Animal to include in dog._composed_instances - # so when we are here, cls is Animal - # cls.discriminator != None - # cls not in _visited_composed_classes - # new_cls = Dog - # but we know we know that we already have Dog - # because it is in visited_composed_classes - # so make Animal here - return super(OpenApiModel, cls).__new__(cls) - - # Build a list containing all oneOf and anyOf descendants. - oneof_anyof_classes = None - if cls._composed_schemas is not None: - oneof_anyof_classes = cls._composed_schemas.get( - "oneOf", () - ) + cls._composed_schemas.get("anyOf", ()) - oneof_anyof_child = new_cls in oneof_anyof_classes - kwargs["_visited_composed_classes"] = visited_composed_classes + (cls,) - - if cls._composed_schemas.get("allOf") and oneof_anyof_child: - # Validate that we can make self because when we make the - # new_cls it will not include the allOf validations in self - self_inst = super(OpenApiModel, cls).__new__(cls) - self_inst.__init__(*args, **kwargs) - - if kwargs.get("_spec_property_naming", False): - # when true, implies new is from deserialization - new_inst = new_cls._new_from_openapi_data(*args, **kwargs) - else: - new_inst = new_cls.__new__(new_cls, *args, **kwargs) - new_inst.__init__(*args, **kwargs) - - return new_inst - - @classmethod - @convert_js_args_to_python_args - def _new_from_openapi_data(cls, *args, **kwargs): - # this function uses the discriminator to - # pick a new schema/class to instantiate because a discriminator - # propertyName value was passed in - - if len(args) == 1: - arg = args[0] - if arg is None and is_type_nullable(cls): - # The input data is the 'null' value and the type is nullable. - return None - - if issubclass(cls, ModelComposed) and allows_single_value_input(cls): - model_kwargs = {} - oneof_instance = get_oneof_instance( - cls, model_kwargs, kwargs, model_arg=arg - ) - return oneof_instance - - visited_composed_classes = kwargs.get("_visited_composed_classes", ()) - if cls.discriminator is None or cls in visited_composed_classes: - # Use case 1: this openapi schema (cls) does not have a discriminator - # Use case 2: we have already visited this class before and are sure that we - # want to instantiate it this time. We have visited this class deserializing - # a payload with a discriminator. During that process we traveled through - # this class but did not make an instance of it. Now we are making an - # instance of a composed class which contains cls in it, so this time make an instance of cls. - # - # Here's an example of use case 2: If Animal has a discriminator - # petType and we pass in "Dog", and the class Dog - # allOf includes Animal, we move through Animal - # once using the discriminator, and pick Dog. - # Then in the composed schema dog Dog, we will make an instance of the - # Animal class (because Dal has allOf: Animal) but this time we won't travel - # through Animal's discriminator because we passed in - # _visited_composed_classes = (Animal,) - - return cls._from_openapi_data(*args, **kwargs) - - # Get the name and value of the discriminator property. - # The discriminator name is obtained from the discriminator meta-data - # and the discriminator value is obtained from the input data. - discr_propertyname_py = list(cls.discriminator.keys())[0] - discr_propertyname_js = cls.attribute_map[discr_propertyname_py] - if discr_propertyname_js in kwargs: - discr_value = kwargs[discr_propertyname_js] - elif discr_propertyname_py in kwargs: - discr_value = kwargs[discr_propertyname_py] - else: - # The input data does not contain the discriminator property. - path_to_item = kwargs.get("_path_to_item", ()) - raise ApiValueError( - "Cannot deserialize input data due to missing discriminator. " - "The discriminator property '%s' is missing at path: %s" - % (discr_propertyname_js, path_to_item) - ) - - # Implementation note: the last argument to get_discriminator_class - # is a list of visited classes. get_discriminator_class may recursively - # call itself and update the list of visited classes, and the initial - # value must be an empty list. Hence not using 'visited_composed_classes' - new_cls = get_discriminator_class(cls, discr_propertyname_py, discr_value, []) - if new_cls is None: - path_to_item = kwargs.get("_path_to_item", ()) - disc_prop_value = kwargs.get( - discr_propertyname_js, kwargs.get(discr_propertyname_py) - ) - raise ApiValueError( - "Cannot deserialize input data due to invalid discriminator " - "value. The OpenAPI document has no mapping for discriminator " - "property '%s'='%s' at path: %s" - % (discr_propertyname_js, disc_prop_value, path_to_item) - ) - - if new_cls in visited_composed_classes: - # if we are making an instance of a composed schema Descendent - # which allOf includes Ancestor, then Ancestor contains - # a discriminator that includes Descendent. - # So if we make an instance of Descendent, we have to make an - # instance of Ancestor to hold the allOf properties. - # This code detects that use case and makes the instance of Ancestor - # For example: - # When making an instance of Dog, _visited_composed_classes = (Dog,) - # then we make an instance of Animal to include in dog._composed_instances - # so when we are here, cls is Animal - # cls.discriminator != None - # cls not in _visited_composed_classes - # new_cls = Dog - # but we know we know that we already have Dog - # because it is in visited_composed_classes - # so make Animal here - return cls._from_openapi_data(*args, **kwargs) - - # Build a list containing all oneOf and anyOf descendants. - oneof_anyof_classes = None - if cls._composed_schemas is not None: - oneof_anyof_classes = cls._composed_schemas.get( - "oneOf", () - ) + cls._composed_schemas.get("anyOf", ()) - oneof_anyof_child = new_cls in oneof_anyof_classes - kwargs["_visited_composed_classes"] = visited_composed_classes + (cls,) - - if cls._composed_schemas.get("allOf") and oneof_anyof_child: - # Validate that we can make self because when we make the - # new_cls it will not include the allOf validations in self - self_inst = cls._from_openapi_data(*args, **kwargs) - - new_inst = new_cls._new_from_openapi_data(*args, **kwargs) - return new_inst - - -class ModelSimple(OpenApiModel): - """the parent class of models whose type != object in their - swagger/openapi""" - - def __setitem__(self, name, value): - """set the value of an attribute using square-bracket notation: `instance[attr] = val`""" - if name in self.required_properties: - self.__dict__[name] = value - return - - self.set_attribute(name, value) - - def get(self, name, default=None): - """returns the value of an attribute or some default value if the attribute was not set""" - if name in self.required_properties: - return self.__dict__[name] - - return self.__dict__["_data_store"].get(name, default) - - def __getitem__(self, name): - """get the value of an attribute using square-bracket notation: `instance[attr]`""" - if name in self: - return self.get(name) - - raise ApiAttributeError( - "{0} has no attribute '{1}'".format(type(self).__name__, name), - [e for e in [self._path_to_item, name] if e], - ) - - def __contains__(self, name): - """used by `in` operator to check if an attribute value was set in an instance: `'attr' in instance`""" - if name in self.required_properties: - return name in self.__dict__ - - return name in self.__dict__["_data_store"] - - def to_str(self): - """Returns the string representation of the model""" - return str(self.value) - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, self.__class__): - return False - - this_val = self._data_store["value"] - that_val = other._data_store["value"] - types = set() - types.add(this_val.__class__) - types.add(that_val.__class__) - vals_equal = this_val == that_val - return vals_equal - - -class ModelNormal(OpenApiModel): - """the parent class of models whose type == object in their - swagger/openapi""" - - def __setitem__(self, name, value): - """set the value of an attribute using square-bracket notation: `instance[attr] = val`""" - if name in self.required_properties: - self.__dict__[name] = value - return - - self.set_attribute(name, value) - - def get(self, name, default=None): - """returns the value of an attribute or some default value if the attribute was not set""" - if name in self.required_properties: - return self.__dict__[name] - - return self.__dict__["_data_store"].get(name, default) - - def __getitem__(self, name): - """get the value of an attribute using square-bracket notation: `instance[attr]`""" - if name in self: - return self.get(name) - - raise ApiAttributeError( - "{0} has no attribute '{1}'".format(type(self).__name__, name), - [e for e in [self._path_to_item, name] if e], - ) - - def __contains__(self, name): - """used by `in` operator to check if an attribute value was set in an instance: `'attr' in instance`""" - if name in self.required_properties: - return name in self.__dict__ - - return name in self.__dict__["_data_store"] - - def to_dict(self): - """Returns the model properties as a dict""" - return model_to_dict(self, serialize=False) - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, self.__class__): - return False - - if not set(self._data_store.keys()) == set(other._data_store.keys()): - return False - for _var_name, this_val in self._data_store.items(): - that_val = other._data_store[_var_name] - types = set() - types.add(this_val.__class__) - types.add(that_val.__class__) - vals_equal = this_val == that_val - if not vals_equal: - return False - return True - - -class ModelComposed(OpenApiModel): - """the parent class of models whose type == object in their - swagger/openapi and have oneOf/allOf/anyOf - - When one sets a property we use var_name_to_model_instances to store the value in - the correct class instances + run any type checking + validation code. - When one gets a property we use var_name_to_model_instances to get the value - from the correct class instances. - This allows multiple composed schemas to contain the same property with additive - constraints on the value. - - _composed_schemas (dict) stores the anyOf/allOf/oneOf classes - key (str): allOf/oneOf/anyOf - value (list): the classes in the XOf definition. - Note: none_type can be included when the openapi document version >= 3.1.0 - _composed_instances (list): stores a list of instances of the composed schemas - defined in _composed_schemas. When properties are accessed in the self instance, - they are returned from the self._data_store or the data stores in the instances - in self._composed_schemas - _var_name_to_model_instances (dict): maps between a variable name on self and - the composed instances (self included) which contain that data - key (str): property name - value (list): list of class instances, self or instances in _composed_instances - which contain the value that the key is referring to. - """ - - def __setitem__(self, name, value): - """set the value of an attribute using square-bracket notation: `instance[attr] = val`""" - if name in self.required_properties: - self.__dict__[name] = value - return - - """ - Use cases: - 1. additional_properties_type is None (additionalProperties == False in spec) - Check for property presence in self.openapi_types - if not present then throw an error - if present set in self, set attribute - always set on composed schemas - 2. additional_properties_type exists - set attribute on self - always set on composed schemas - """ - if self.additional_properties_type is None: - """ - For an attribute to exist on a composed schema it must: - - fulfill schema_requirements in the self composed schema not considering oneOf/anyOf/allOf schemas AND - - fulfill schema_requirements in each oneOf/anyOf/allOf schemas - - schema_requirements: - For an attribute to exist on a schema it must: - - be present in properties at the schema OR - - have additionalProperties unset (defaults additionalProperties = any type) OR - - have additionalProperties set - """ - if name not in self.openapi_types: - raise ApiAttributeError( - "{0} has no attribute '{1}'".format(type(self).__name__, name), - [e for e in [self._path_to_item, name] if e], - ) - # attribute must be set on self and composed instances - self.set_attribute(name, value) - for model_instance in self._composed_instances: - setattr(model_instance, name, value) - if name not in self._var_name_to_model_instances: - # we assigned an additional property - self.__dict__["_var_name_to_model_instances"][name] = ( - self._composed_instances + [self] - ) - return None - - __unset_attribute_value__ = object() - - def get(self, name, default=None): - """returns the value of an attribute or some default value if the attribute was not set""" - if name in self.required_properties: - return self.__dict__[name] - - # get the attribute from the correct instance - model_instances = self._var_name_to_model_instances.get(name) - values = [] - # A composed model stores self and child (oneof/anyOf/allOf) models under - # self._var_name_to_model_instances. - # Any property must exist in self and all model instances - # The value stored in all model instances must be the same - if model_instances: - for model_instance in model_instances: - if name in model_instance._data_store: - v = model_instance._data_store[name] - if v not in values: - values.append(v) - len_values = len(values) - if len_values == 0: - return default - elif len_values == 1: - return values[0] - elif len_values > 1: - raise ApiValueError( - "Values stored for property {0} in {1} differ when looking " - "at self and self's composed instances. All values must be " - "the same".format(name, type(self).__name__), - [e for e in [self._path_to_item, name] if e], - ) - - def __getitem__(self, name): - """get the value of an attribute using square-bracket notation: `instance[attr]`""" - value = self.get(name, self.__unset_attribute_value__) - if value is self.__unset_attribute_value__: - raise ApiAttributeError( - "{0} has no attribute '{1}'".format(type(self).__name__, name), - [e for e in [self._path_to_item, name] if e], - ) - return value - - def __contains__(self, name): - """used by `in` operator to check if an attribute value was set in an instance: `'attr' in instance`""" - - if name in self.required_properties: - return name in self.__dict__ - - model_instances = self._var_name_to_model_instances.get( - name, self._additional_properties_model_instances - ) - - if model_instances: - for model_instance in model_instances: - if name in model_instance._data_store: - return True - - return False - - def to_dict(self): - """Returns the model properties as a dict""" - return model_to_dict(self, serialize=False) - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, self.__class__): - return False - - if not set(self._data_store.keys()) == set(other._data_store.keys()): - return False - for _var_name, this_val in self._data_store.items(): - that_val = other._data_store[_var_name] - types = set() - types.add(this_val.__class__) - types.add(that_val.__class__) - vals_equal = this_val == that_val - if not vals_equal: - return False - return True - - -COERCION_INDEX_BY_TYPE = { - ModelComposed: 0, - ModelNormal: 1, - ModelSimple: 2, - none_type: 3, # The type of 'None'. - list: 4, - dict: 5, - float: 6, - int: 7, - bool: 8, - datetime: 9, - date: 10, - str: 11, - file_type: 12, # 'file_type' is an alias for the built-in 'file' or 'io.IOBase' type. -} - -# these are used to limit what type conversions we try to do -# when we have a valid type already and we want to try converting -# to another type -UPCONVERSION_TYPE_PAIRS = ( - (str, datetime), - (str, date), - # A float may be serialized as an integer, e.g. '3' is a valid serialized float. - (int, float), - (list, ModelComposed), - (dict, ModelComposed), - (str, ModelComposed), - (int, ModelComposed), - (float, ModelComposed), - (list, ModelComposed), - (list, ModelNormal), - (dict, ModelNormal), - (str, ModelSimple), - (int, ModelSimple), - (float, ModelSimple), - (list, ModelSimple), -) - -COERCIBLE_TYPE_PAIRS = { - False: ( # client instantiation of a model with client data - # (dict, ModelComposed), - # (list, ModelComposed), - # (dict, ModelNormal), - # (list, ModelNormal), - # (str, ModelSimple), - # (int, ModelSimple), - # (float, ModelSimple), - # (list, ModelSimple), - # (str, int), - # (str, float), - # (str, datetime), - # (str, date), - # (int, str), - # (float, str), - ), - True: ( # server -> client data - (dict, ModelComposed), - (list, ModelComposed), - (dict, ModelNormal), - (list, ModelNormal), - (str, ModelSimple), - (int, ModelSimple), - (float, ModelSimple), - (list, ModelSimple), - # (str, int), - # (str, float), - (str, datetime), - (str, date), - # (int, str), - # (float, str), - (str, file_type), - ), -} - - -def get_simple_class(input_value): - """Returns an input_value's simple class that we will use for type checking - Python2: - float and int will return int, where int is the python3 int backport - str and unicode will return str, where str is the python3 str backport - Note: float and int ARE both instances of int backport - Note: str_py2 and unicode_py2 are NOT both instances of str backport - - Args: - input_value (class/class_instance): the item for which we will return - the simple class - """ - if isinstance(input_value, type): - # input_value is a class - return input_value - elif isinstance(input_value, tuple): - return tuple - elif isinstance(input_value, list): - return list - elif isinstance(input_value, dict): - return dict - elif isinstance(input_value, none_type): - return none_type - elif isinstance(input_value, file_type): - return file_type - elif isinstance(input_value, bool): - # this must be higher than the int check because - # isinstance(True, int) == True - return bool - elif isinstance(input_value, int): - return int - elif isinstance(input_value, datetime): - # this must be higher than the date check because - # isinstance(datetime_instance, date) == True - return datetime - elif isinstance(input_value, date): - return date - elif isinstance(input_value, str): - return str - return type(input_value) - - -def check_allowed_values(allowed_values, input_variable_path, input_values): - """Raises an exception if the input_values are not allowed - - Args: - allowed_values (dict): the allowed_values dict - input_variable_path (tuple): the path to the input variable - input_values (list/str/int/float/date/datetime): the values that we - are checking to see if they are in allowed_values - """ - these_allowed_values = list(allowed_values[input_variable_path].values()) - if isinstance(input_values, list) and not set(input_values).issubset( - set(these_allowed_values) - ): - invalid_values = ( - ", ".join(map(str, set(input_values) - set(these_allowed_values))), - ) - raise ApiValueError( - "Invalid values for `%s` [%s], must be a subset of [%s]" - % ( - input_variable_path[0], - invalid_values, - ", ".join(map(str, these_allowed_values)), - ) - ) - elif isinstance(input_values, dict) and not set(input_values.keys()).issubset( - set(these_allowed_values) - ): - invalid_values = ", ".join( - map(str, set(input_values.keys()) - set(these_allowed_values)) - ) - raise ApiValueError( - "Invalid keys in `%s` [%s], must be a subset of [%s]" - % ( - input_variable_path[0], - invalid_values, - ", ".join(map(str, these_allowed_values)), - ) - ) - elif ( - not isinstance(input_values, (list, dict)) - and input_values not in these_allowed_values - ): - raise ApiValueError( - "Invalid value for `%s` (%s), must be one of %s" - % (input_variable_path[0], input_values, these_allowed_values) - ) - - -def is_json_validation_enabled(schema_keyword, configuration=None): - """Returns true if JSON schema validation is enabled for the specified - validation keyword. This can be used to skip JSON schema structural validation - as requested in the configuration. - - Args: - schema_keyword (string): the name of a JSON schema validation keyword. - configuration (Configuration): the configuration class. - """ - - return ( - configuration is None - or not hasattr(configuration, "_disabled_client_side_validations") - or schema_keyword not in configuration._disabled_client_side_validations - ) - - -def check_validations( - validations, input_variable_path, input_values, configuration=None -): - """Raises an exception if the input_values are invalid - - Args: - validations (dict): the validation dictionary. - input_variable_path (tuple): the path to the input variable. - input_values (list/str/int/float/date/datetime): the values that we - are checking. - configuration (Configuration): the configuration class. - """ - - if input_values is None: - return - - current_validations = validations[input_variable_path] - if ( - is_json_validation_enabled("multipleOf", configuration) - and "multiple_of" in current_validations - and isinstance(input_values, (int, float)) - and not (float(input_values) / current_validations["multiple_of"]).is_integer() - ): - # Note 'multipleOf' will be as good as the floating point arithmetic. - raise ApiValueError( - "Invalid value for `%s`, value must be a multiple of " - "`%s`" % (input_variable_path[0], current_validations["multiple_of"]) - ) - - if ( - is_json_validation_enabled("maxLength", configuration) - and "max_length" in current_validations - and len(input_values) > current_validations["max_length"] - ): - raise ApiValueError( - "Invalid value for `%s`, length must be less than or equal to " - "`%s`" % (input_variable_path[0], current_validations["max_length"]) - ) - - if ( - is_json_validation_enabled("minLength", configuration) - and "min_length" in current_validations - and len(input_values) < current_validations["min_length"] - ): - raise ApiValueError( - "Invalid value for `%s`, length must be greater than or equal to " - "`%s`" % (input_variable_path[0], current_validations["min_length"]) - ) - - if ( - is_json_validation_enabled("maxItems", configuration) - and "max_items" in current_validations - and len(input_values) > current_validations["max_items"] - ): - raise ApiValueError( - "Invalid value for `%s`, number of items must be less than or " - "equal to `%s`" % (input_variable_path[0], current_validations["max_items"]) - ) - - if ( - is_json_validation_enabled("minItems", configuration) - and "min_items" in current_validations - and len(input_values) < current_validations["min_items"] - ): - raise ValueError( - "Invalid value for `%s`, number of items must be greater than or " - "equal to `%s`" % (input_variable_path[0], current_validations["min_items"]) - ) - - items = ( - "exclusive_maximum", - "inclusive_maximum", - "exclusive_minimum", - "inclusive_minimum", - ) - if any(item in current_validations for item in items): - if isinstance(input_values, list): - max_val = max(input_values) - min_val = min(input_values) - elif isinstance(input_values, dict): - max_val = max(input_values.values()) - min_val = min(input_values.values()) - else: - max_val = input_values - min_val = input_values - - if ( - is_json_validation_enabled("exclusiveMaximum", configuration) - and "exclusive_maximum" in current_validations - and max_val >= current_validations["exclusive_maximum"] - ): - raise ApiValueError( - "Invalid value for `%s`, must be a value less than `%s`" - % (input_variable_path[0], current_validations["exclusive_maximum"]) - ) - - if ( - is_json_validation_enabled("maximum", configuration) - and "inclusive_maximum" in current_validations - and max_val > current_validations["inclusive_maximum"] - ): - raise ApiValueError( - "Invalid value for `%s`, must be a value less than or equal to " - "`%s`" % (input_variable_path[0], current_validations["inclusive_maximum"]) - ) - - if ( - is_json_validation_enabled("exclusiveMinimum", configuration) - and "exclusive_minimum" in current_validations - and min_val <= current_validations["exclusive_minimum"] - ): - raise ApiValueError( - "Invalid value for `%s`, must be a value greater than `%s`" - % (input_variable_path[0], current_validations["exclusive_maximum"]) - ) - - if ( - is_json_validation_enabled("minimum", configuration) - and "inclusive_minimum" in current_validations - and min_val < current_validations["inclusive_minimum"] - ): - raise ApiValueError( - "Invalid value for `%s`, must be a value greater than or equal " - "to `%s`" - % (input_variable_path[0], current_validations["inclusive_minimum"]) - ) - flags = current_validations.get("regex", {}).get("flags", 0) - if ( - is_json_validation_enabled("pattern", configuration) - and "regex" in current_validations - and not re.search( - current_validations["regex"]["pattern"], input_values, flags=flags - ) - ): - err_msg = r"Invalid value for `%s`, must match regular expression `%s`" % ( - input_variable_path[0], - current_validations["regex"]["pattern"], - ) - if flags != 0: - # Don't print the regex flags if the flags are not - # specified in the OAS document. - err_msg = r"%s with flags=`%s`" % (err_msg, flags) - raise ApiValueError(err_msg) - - -def order_response_types(required_types): - """Returns the required types sorted in coercion order - - Args: - required_types (list/tuple): collection of classes or instance of - list or dict with class information inside it. - - Returns: - (list): coercion order sorted collection of classes or instance - of list or dict with class information inside it. - """ - - def index_getter(class_or_instance): - if isinstance(class_or_instance, list): - return COERCION_INDEX_BY_TYPE[list] - elif isinstance(class_or_instance, dict): - return COERCION_INDEX_BY_TYPE[dict] - elif inspect.isclass(class_or_instance) and issubclass( - class_or_instance, ModelComposed - ): - return COERCION_INDEX_BY_TYPE[ModelComposed] - elif inspect.isclass(class_or_instance) and issubclass( - class_or_instance, ModelNormal - ): - return COERCION_INDEX_BY_TYPE[ModelNormal] - elif inspect.isclass(class_or_instance) and issubclass( - class_or_instance, ModelSimple - ): - return COERCION_INDEX_BY_TYPE[ModelSimple] - elif class_or_instance in COERCION_INDEX_BY_TYPE: - return COERCION_INDEX_BY_TYPE[class_or_instance] - raise ApiValueError("Unsupported type: %s" % class_or_instance) - - sorted_types = sorted( - required_types, key=lambda class_or_instance: index_getter(class_or_instance) - ) - return sorted_types - - -def remove_uncoercible( - required_types_classes, current_item, spec_property_naming, must_convert=True -): - """Only keeps the type conversions that are possible - - Args: - required_types_classes (tuple): tuple of classes that are required - these should be ordered by COERCION_INDEX_BY_TYPE - spec_property_naming (bool): True if the variable names in the input - data are serialized names as specified in the OpenAPI document. - False if the variables names in the input data are python - variable names in PEP-8 snake case. - current_item (any): the current item (input data) to be converted - - Keyword Args: - must_convert (bool): if True the item to convert is of the wrong - type and we want a big list of coercibles - if False, we want a limited list of coercibles - - Returns: - (list): the remaining coercible required types, classes only - """ - current_type_simple = get_simple_class(current_item) - - results_classes = [] - for required_type_class in required_types_classes: - # convert our models to OpenApiModel - required_type_class_simplified = required_type_class - if isinstance(required_type_class_simplified, type): - if issubclass(required_type_class_simplified, ModelComposed): - required_type_class_simplified = ModelComposed - elif issubclass(required_type_class_simplified, ModelNormal): - required_type_class_simplified = ModelNormal - elif issubclass(required_type_class_simplified, ModelSimple): - required_type_class_simplified = ModelSimple - - if required_type_class_simplified == current_type_simple: - # don't consider converting to one's own class - continue - - class_pair = (current_type_simple, required_type_class_simplified) - if must_convert and class_pair in COERCIBLE_TYPE_PAIRS[spec_property_naming]: - results_classes.append(required_type_class) - elif class_pair in UPCONVERSION_TYPE_PAIRS: - results_classes.append(required_type_class) - return results_classes - - -def get_discriminated_classes(cls): - """ - Returns all the classes that a discriminator converts to - TODO: lru_cache this - """ - possible_classes = [] - key = list(cls.discriminator.keys())[0] - if is_type_nullable(cls): - possible_classes.append(cls) - for discr_cls in cls.discriminator[key].values(): - if hasattr(discr_cls, "discriminator") and discr_cls.discriminator is not None: - possible_classes.extend(get_discriminated_classes(discr_cls)) - else: - possible_classes.append(discr_cls) - return possible_classes - - -def get_possible_classes(cls, from_server_context): - # TODO: lru_cache this - possible_classes = [cls] - if from_server_context: - return possible_classes - if hasattr(cls, "discriminator") and cls.discriminator is not None: - possible_classes = [] - possible_classes.extend(get_discriminated_classes(cls)) - elif issubclass(cls, ModelComposed): - possible_classes.extend(composed_model_input_classes(cls)) - return possible_classes - - -def get_required_type_classes(required_types_mixed, spec_property_naming): - """Converts the tuple required_types into a tuple and a dict described - below - - Args: - required_types_mixed (tuple/list): will contain either classes or - instance of list or dict - spec_property_naming (bool): if True these values came from the - server, and we use the data types in our endpoints. - If False, we are client side and we need to include - oneOf and discriminator classes inside the data types in our endpoints - - Returns: - (valid_classes, dict_valid_class_to_child_types_mixed): - valid_classes (tuple): the valid classes that the current item - should be - dict_valid_class_to_child_types_mixed (dict): - valid_class (class): this is the key - child_types_mixed (list/dict/tuple): describes the valid child - types - """ - valid_classes = [] - child_req_types_by_current_type = {} - for required_type in required_types_mixed: - if isinstance(required_type, list): - valid_classes.append(list) - child_req_types_by_current_type[list] = required_type - elif isinstance(required_type, tuple): - valid_classes.append(tuple) - child_req_types_by_current_type[tuple] = required_type - elif isinstance(required_type, dict): - valid_classes.append(dict) - child_req_types_by_current_type[dict] = required_type[str] - else: - valid_classes.extend( - get_possible_classes(required_type, spec_property_naming) - ) - return tuple(valid_classes), child_req_types_by_current_type - - -def change_keys_js_to_python(input_dict, model_class): - """ - Converts from javascript_key keys in the input_dict to python_keys in - the output dict using the mapping in model_class. - If the input_dict contains a key which does not declared in the model_class, - the key is added to the output dict as is. The assumption is the model_class - may have undeclared properties (additionalProperties attribute in the OAS - document). - """ - - if getattr(model_class, "attribute_map", None) is None: - return input_dict - output_dict = {} - reversed_attr_map = {value: key for key, value in model_class.attribute_map.items()} - for javascript_key, value in input_dict.items(): - python_key = reversed_attr_map.get(javascript_key) - if python_key is None: - # if the key is unknown, it is in error or it is an - # additionalProperties variable - python_key = javascript_key - output_dict[python_key] = value - return output_dict - - -def get_type_error(var_value, path_to_item, valid_classes, key_type=False): - error_msg = type_error_message( - var_name=path_to_item[-1], - var_value=var_value, - valid_classes=valid_classes, - key_type=key_type, - ) - return ApiTypeError( - error_msg, - path_to_item=path_to_item, - valid_classes=valid_classes, - key_type=key_type, - ) - - -def deserialize_primitive(data, klass, path_to_item): - """Deserializes string to primitive type. - - :param data: str/int/float - :param klass: str/class the class to convert to - - :return: int, float, str, bool, date, datetime - """ - additional_message = "" - try: - if klass in {datetime, date}: - additional_message = ( - "If you need your parameter to have a fallback " - "string value, please set its type as `type: {}` in your " - "spec. That allows the value to be any type. " - ) - if klass == datetime: - if len(data) < 8: - raise ValueError("This is not a datetime") - # The string should be in iso8601 datetime format. - parsed_datetime = parse(data) - date_only = ( - parsed_datetime.hour == 0 - and parsed_datetime.minute == 0 - and parsed_datetime.second == 0 - and parsed_datetime.tzinfo is None - and 8 <= len(data) <= 10 - ) - if date_only: - raise ValueError("This is a date, not a datetime") - return parsed_datetime - elif klass == date: - if len(data) < 8: - raise ValueError("This is not a date") - return parse(data).date() - else: - converted_value = klass(data) - if isinstance(data, str) and klass == float: - if str(converted_value) != data: - # '7' -> 7.0 -> '7.0' != '7' - raise ValueError("This is not a float") - return converted_value - except (OverflowError, ValueError) as ex: - # parse can raise OverflowError - raise ApiValueError( - "{0}Failed to parse {1} as {2}".format( - additional_message, repr(data), klass.__name__ - ), - path_to_item=path_to_item, - ) from ex - - -def get_discriminator_class(model_class, discr_name, discr_value, cls_visited): - """Returns the child class specified by the discriminator. - - Args: - model_class (OpenApiModel): the model class. - discr_name (string): the name of the discriminator property. - discr_value (any): the discriminator value. - cls_visited (list): list of model classes that have been visited. - Used to determine the discriminator class without - visiting circular references indefinitely. - - Returns: - used_model_class (class/None): the chosen child class that will be used - to deserialize the data, for example dog.Dog. - If a class is not found, None is returned. - """ - - if model_class in cls_visited: - # The class has already been visited and no suitable class was found. - return None - cls_visited.append(model_class) - used_model_class = None - if discr_name in model_class.discriminator: - class_name_to_discr_class = model_class.discriminator[discr_name] - used_model_class = class_name_to_discr_class.get(discr_value) - if used_model_class is None: - # We didn't find a discriminated class in class_name_to_discr_class. - # So look in the ancestor or descendant discriminators - # The discriminator mapping may exist in a descendant (anyOf, oneOf) - # or ancestor (allOf). - # Ancestor example: in the GrandparentAnimal -> ParentPet -> ChildCat - # hierarchy, the discriminator mappings may be defined at any level - # in the hierarchy. - # Descendant example: mammal -> whale/zebra/Pig -> BasquePig/DanishPig - # if we try to make BasquePig from mammal, we need to travel through - # the oneOf descendant discriminators to find BasquePig - descendant_classes = model_class._composed_schemas.get( - "oneOf", () - ) + model_class._composed_schemas.get("anyOf", ()) - ancestor_classes = model_class._composed_schemas.get("allOf", ()) - possible_classes = descendant_classes + ancestor_classes - for cls in possible_classes: - # Check if the schema has inherited discriminators. - if hasattr(cls, "discriminator") and cls.discriminator is not None: - used_model_class = get_discriminator_class( - cls, discr_name, discr_value, cls_visited - ) - if used_model_class is not None: - return used_model_class - return used_model_class - - -def deserialize_model( - model_data, - model_class, - path_to_item, - check_type, - configuration, - spec_property_naming, -): - """Deserializes model_data to model instance. - - Args: - model_data (int/str/float/bool/none_type/list/dict): data to instantiate the model - model_class (OpenApiModel): the model class - path_to_item (list): path to the model in the received data - check_type (bool): whether to check the data tupe for the values in - the model - configuration (Configuration): the instance to use to convert files - spec_property_naming (bool): True if the variable names in the input - data are serialized names as specified in the OpenAPI document. - False if the variables names in the input data are python - variable names in PEP-8 snake case. - - Returns: - model instance - - Raise: - ApiTypeError - ApiValueError - ApiKeyError - """ - - kw_args = dict( - _check_type=check_type, - _path_to_item=path_to_item, - _configuration=configuration, - _spec_property_naming=spec_property_naming, - ) - - if issubclass(model_class, ModelSimple): - return model_class._new_from_openapi_data(model_data, **kw_args) - elif isinstance(model_data, list): - return model_class._new_from_openapi_data(*model_data, **kw_args) - if isinstance(model_data, dict): - kw_args.update(model_data) - return model_class._new_from_openapi_data(**kw_args) - elif isinstance(model_data, PRIMITIVE_TYPES): - return model_class._new_from_openapi_data(model_data, **kw_args) - - -def deserialize_file(response_data, configuration, content_disposition=None): - """Deserializes body to file - - Saves response body into a file in a temporary folder, - using the filename from the `Content-Disposition` header if provided. - - Args: - param response_data (str): the file data to write - configuration (Configuration): the instance to use to convert files - - Keyword Args: - content_disposition (str): the value of the Content-Disposition - header - - Returns: - (file_type): the deserialized file which is open - The user is responsible for closing and reading the file - """ - fd, path = tempfile.mkstemp(dir=configuration.temp_folder_path) - os.close(fd) - os.remove(path) - - if content_disposition: - filename = re.search( - r'filename=[\'"]?([^\'"\s]+)[\'"]?', content_disposition, flags=re.I - ) - if filename is not None: - filename = filename.group(1) - else: - filename = "default_" + str(uuid.uuid4()) - - path = os.path.join(os.path.dirname(path), filename) - - with open(path, "wb") as f: - if isinstance(response_data, str): - # change str to bytes so we can write it - response_data = response_data.encode("utf-8") - f.write(response_data) - - f = open(path, "rb") - return f - - -def attempt_convert_item( - input_value, - valid_classes, - path_to_item, - configuration, - spec_property_naming, - key_type=False, - must_convert=False, - check_type=True, -): - """ - Args: - input_value (any): the data to convert - valid_classes (any): the classes that are valid - path_to_item (list): the path to the item to convert - configuration (Configuration): the instance to use to convert files - spec_property_naming (bool): True if the variable names in the input - data are serialized names as specified in the OpenAPI document. - False if the variables names in the input data are python - variable names in PEP-8 snake case. - key_type (bool): if True we need to convert a key type (not supported) - must_convert (bool): if True we must convert - check_type (bool): if True we check the type or the returned data in - ModelComposed/ModelNormal/ModelSimple instances - - Returns: - instance (any) the fixed item - - Raises: - ApiTypeError - ApiValueError - ApiKeyError - """ - valid_classes_ordered = order_response_types(valid_classes) - valid_classes_coercible = remove_uncoercible( - valid_classes_ordered, input_value, spec_property_naming - ) - if not valid_classes_coercible or key_type: - # we do not handle keytype errors, json will take care - # of this for us - if configuration is None or not configuration.discard_unknown_keys: - raise get_type_error( - input_value, path_to_item, valid_classes, key_type=key_type - ) - for valid_class in valid_classes_coercible: - try: - if issubclass(valid_class, OpenApiModel): - return deserialize_model( - input_value, - valid_class, - path_to_item, - check_type, - configuration, - spec_property_naming, - ) - elif valid_class == file_type: - return deserialize_file(input_value, configuration) - return deserialize_primitive(input_value, valid_class, path_to_item) - except (ApiTypeError, ApiValueError, ApiKeyError) as conversion_exc: - if must_convert: - raise conversion_exc - # if we have conversion errors when must_convert == False - # we ignore the exception and move on to the next class - continue - # we were unable to convert, must_convert == False - return input_value - - -def is_type_nullable(input_type): - """ - Returns true if None is an allowed value for the specified input_type. - - A type is nullable if at least one of the following conditions is true: - 1. The OAS 'nullable' attribute has been specified, - 1. The type is the 'null' type, - 1. The type is a anyOf/oneOf composed schema, and a child schema is - the 'null' type. - Args: - input_type (type): the class of the input_value that we are - checking - Returns: - bool - """ - if input_type is none_type: - return True - if issubclass(input_type, OpenApiModel) and input_type._nullable: - return True - if issubclass(input_type, ModelComposed): - # If oneOf/anyOf, check if the 'null' type is one of the allowed types. - for t in input_type._composed_schemas.get("oneOf", ()): - if is_type_nullable(t): - return True - for t in input_type._composed_schemas.get("anyOf", ()): - if is_type_nullable(t): - return True - return False - - -def is_valid_type(input_class_simple, valid_classes): - """ - Args: - input_class_simple (class): the class of the input_value that we are - checking - valid_classes (tuple): the valid classes that the current item - should be - Returns: - bool - """ - if issubclass(input_class_simple, OpenApiModel) and valid_classes == ( - bool, - date, - datetime, - dict, - float, - int, - list, - str, - none_type, - ): - return True - valid_type = input_class_simple in valid_classes - if not valid_type and ( - issubclass(input_class_simple, OpenApiModel) or input_class_simple is none_type - ): - for valid_class in valid_classes: - if input_class_simple is none_type and is_type_nullable(valid_class): - # Schema is oneOf/anyOf and the 'null' type is one of the allowed types. - return True - if not ( - issubclass(valid_class, OpenApiModel) and valid_class.discriminator - ): - continue - discr_propertyname_py = list(valid_class.discriminator.keys())[0] - discriminator_classes = valid_class.discriminator[ - discr_propertyname_py - ].values() - valid_type = is_valid_type(input_class_simple, discriminator_classes) - if valid_type: - return True - return valid_type - - -def validate_and_convert_types( - input_value, - required_types_mixed, - path_to_item, - spec_property_naming, - _check_type, - configuration=None, -): - """Raises a TypeError is there is a problem, otherwise returns value - - Args: - input_value (any): the data to validate/convert - required_types_mixed (list/dict/tuple): A list of - valid classes, or a list tuples of valid classes, or a dict where - the value is a tuple of value classes - path_to_item: (list) the path to the data being validated - this stores a list of keys or indices to get to the data being - validated - spec_property_naming (bool): True if the variable names in the input - data are serialized names as specified in the OpenAPI document. - False if the variables names in the input data are python - variable names in PEP-8 snake case. - _check_type: (boolean) if true, type will be checked and conversion - will be attempted. - configuration: (Configuration): the configuration class to use - when converting file_type items. - If passed, conversion will be attempted when possible - If not passed, no conversions will be attempted and - exceptions will be raised - - Returns: - the correctly typed value - - Raises: - ApiTypeError - """ - results = get_required_type_classes(required_types_mixed, spec_property_naming) - valid_classes, child_req_types_by_current_type = results - - input_class_simple = get_simple_class(input_value) - valid_type = is_valid_type(input_class_simple, valid_classes) - if not valid_type: - if configuration or (input_class_simple == dict and dict not in valid_classes): - # if input_value is not valid_type try to convert it - converted_instance = attempt_convert_item( - input_value, - valid_classes, - path_to_item, - configuration, - spec_property_naming, - key_type=False, - must_convert=True, - check_type=_check_type, - ) - return converted_instance - else: - raise get_type_error( - input_value, path_to_item, valid_classes, key_type=False - ) - - # input_value's type is in valid_classes - if len(valid_classes) > 1 and configuration: - # there are valid classes which are not the current class - valid_classes_coercible = remove_uncoercible( - valid_classes, input_value, spec_property_naming, must_convert=False - ) - if valid_classes_coercible: - converted_instance = attempt_convert_item( - input_value, - valid_classes_coercible, - path_to_item, - configuration, - spec_property_naming, - key_type=False, - must_convert=False, - check_type=_check_type, - ) - return converted_instance - - if child_req_types_by_current_type == {}: - # all types are of the required types and there are no more inner - # variables left to look at - return input_value - inner_required_types = child_req_types_by_current_type.get(type(input_value)) - if inner_required_types is None: - # for this type, there are not more inner variables left to look at - return input_value - if isinstance(input_value, list): - if input_value == []: - # allow an empty list - return input_value - for index, inner_value in enumerate(input_value): - inner_path = list(path_to_item) - inner_path.append(index) - input_value[index] = validate_and_convert_types( - inner_value, - inner_required_types, - inner_path, - spec_property_naming, - _check_type, - configuration=configuration, - ) - elif isinstance(input_value, dict): - if input_value == {}: - # allow an empty dict - return input_value - for inner_key, inner_val in input_value.items(): - inner_path = list(path_to_item) - inner_path.append(inner_key) - if get_simple_class(inner_key) != str: - raise get_type_error( - inner_key, inner_path, valid_classes, key_type=True - ) - input_value[inner_key] = validate_and_convert_types( - inner_val, - inner_required_types, - inner_path, - spec_property_naming, - _check_type, - configuration=configuration, - ) - return input_value - - -def model_to_dict(model_instance, serialize=True): - """Returns the model properties as a dict - - Args: - model_instance (one of your model instances): the model instance that - will be converted to a dict. - - Keyword Args: - serialize (bool): if True, the keys in the dict will be values from - attribute_map - """ - result = {} - - def extract_item(item): - return ( - (item[0], model_to_dict(item[1], serialize=serialize)) - if hasattr(item[1], "_data_store") - else item - ) - - model_instances = [model_instance] - if model_instance._composed_schemas: - model_instances.extend(model_instance._composed_instances) - seen_json_attribute_names = set() - used_fallback_python_attribute_names = set() - py_to_json_map = {} - for model_instance in model_instances: - for attr, value in model_instance._data_store.items(): - if serialize: - # we use get here because additional property key names do not - # exist in attribute_map - try: - attr = model_instance.attribute_map[attr] - py_to_json_map.update(model_instance.attribute_map) - seen_json_attribute_names.add(attr) - except KeyError: - used_fallback_python_attribute_names.add(attr) - if isinstance(value, list): - if not value: - # empty list or None - result[attr] = value - else: - res = [] - for v in value: - if isinstance(v, PRIMITIVE_TYPES) or v is None: - res.append(v) - elif isinstance(v, ModelSimple): - res.append(v.value) - elif isinstance(v, dict): - res.append(dict(map(extract_item, v.items()))) - else: - res.append(model_to_dict(v, serialize=serialize)) - result[attr] = res - elif isinstance(value, dict): - result[attr] = dict(map(extract_item, value.items())) - elif isinstance(value, ModelSimple): - result[attr] = value.value - elif hasattr(value, "_data_store"): - result[attr] = model_to_dict(value, serialize=serialize) - else: - result[attr] = value - if serialize: - for python_key in used_fallback_python_attribute_names: - json_key = py_to_json_map.get(python_key) - if json_key is None: - continue - if python_key == json_key: - continue - json_key_assigned_no_need_for_python_key = ( - json_key in seen_json_attribute_names - ) - if json_key_assigned_no_need_for_python_key: - del result[python_key] - - return result - - -def type_error_message( - var_value=None, var_name=None, valid_classes=None, key_type=None -): - """ - Keyword Args: - var_value (any): the variable which has the type_error - var_name (str): the name of the variable which has the typ error - valid_classes (tuple): the accepted classes for current_item's - value - key_type (bool): False if our value is a value in a dict - True if it is a key in a dict - False if our item is an item in a list - """ - key_or_value = "value" - if key_type: - key_or_value = "key" - valid_classes_phrase = get_valid_classes_phrase(valid_classes) - msg = ( - "Invalid type for variable '{0}'. Required {1} type {2} and " - "passed type was {3}".format( - var_name, - key_or_value, - valid_classes_phrase, - type(var_value).__name__, - ) - ) - return msg - - -def get_valid_classes_phrase(input_classes): - """Returns a string phrase describing what types are allowed""" - all_classes = list(input_classes) - all_classes = sorted(all_classes, key=lambda cls: cls.__name__) - all_class_names = [cls.__name__ for cls in all_classes] - if len(all_class_names) == 1: - return "is {0}".format(all_class_names[0]) - return "is one of [{0}]".format(", ".join(all_class_names)) - - -def get_allof_instances(self, model_args, constant_args): - """ - Args: - self: the class we are handling - model_args (dict): var_name to var_value - used to make instances - constant_args (dict): - metadata arguments: - _check_type - _path_to_item - _spec_property_naming - _configuration - _visited_composed_classes - - Returns - composed_instances (list) - """ - composed_instances = [] - for allof_class in self._composed_schemas["allOf"]: - - try: - if constant_args.get("_spec_property_naming"): - allof_instance = allof_class._from_openapi_data( - **model_args, **constant_args - ) - else: - allof_instance = allof_class(**model_args, **constant_args) - composed_instances.append(allof_instance) - except Exception as ex: - raise ApiValueError( - "Invalid inputs given to generate an instance of '%s'. The " - "input data was invalid for the allOf schema '%s' in the composed " - "schema '%s'. Error=%s" - % ( - allof_class.__name__, - allof_class.__name__, - self.__class__.__name__, - str(ex), - ) - ) from ex - return composed_instances - - -def get_oneof_instance(cls, model_kwargs, constant_kwargs, model_arg=None): - """ - Find the oneOf schema that matches the input data (e.g. payload). - If exactly one schema matches the input data, an instance of that schema - is returned. - If zero or more than one schema match the input data, an exception is raised. - In OAS 3.x, the payload MUST, by validation, match exactly one of the - schemas described by oneOf. - - Args: - cls: the class we are handling - model_kwargs (dict): var_name to var_value - The input data, e.g. the payload that must match a oneOf schema - in the OpenAPI document. - constant_kwargs (dict): var_name to var_value - args that every model requires, including configuration, server - and path to item. - - Kwargs: - model_arg: (int, float, bool, str, date, datetime, ModelSimple, None): - the value to assign to a primitive class or ModelSimple class - Notes: - - this is only passed in when oneOf includes types which are not object - - None is used to suppress handling of model_arg, nullable models are handled in __new__ - - Returns - oneof_instance (instance) - """ - if len(cls._composed_schemas["oneOf"]) == 0: - return None - - oneof_instances = [] - # Iterate over each oneOf schema and determine if the input data - # matches the oneOf schemas. - for oneof_class in cls._composed_schemas["oneOf"]: - # The composed oneOf schema allows the 'null' type and the input data - # is the null value. This is a OAS >= 3.1 feature. - if oneof_class is none_type: - # skip none_types because we are deserializing dict data. - # none_type deserialization is handled in the __new__ method - continue - - single_value_input = allows_single_value_input(oneof_class) - - try: - if not single_value_input: - if constant_kwargs.get("_spec_property_naming"): - oneof_instance = oneof_class._from_openapi_data( - **model_kwargs, **constant_kwargs - ) - else: - oneof_instance = oneof_class(**model_kwargs, **constant_kwargs) - else: - if issubclass(oneof_class, ModelSimple): - if constant_kwargs.get("_spec_property_naming"): - oneof_instance = oneof_class._from_openapi_data( - model_arg, **constant_kwargs - ) - else: - oneof_instance = oneof_class(model_arg, **constant_kwargs) - elif oneof_class in PRIMITIVE_TYPES: - oneof_instance = validate_and_convert_types( - model_arg, - (oneof_class,), - constant_kwargs["_path_to_item"], - constant_kwargs["_spec_property_naming"], - constant_kwargs["_check_type"], - configuration=constant_kwargs["_configuration"], - ) - oneof_instances.append(oneof_instance) - except Exception: - pass - if len(oneof_instances) == 0: - raise ApiValueError( - "Invalid inputs given to generate an instance of %s. None " - "of the oneOf schemas matched the input data." % cls.__name__ - ) - elif len(oneof_instances) > 1: - raise ApiValueError( - "Invalid inputs given to generate an instance of %s. Multiple " - "oneOf schemas matched the inputs, but a max of one is allowed." - % cls.__name__ - ) - return oneof_instances[0] - - -def get_anyof_instances(self, model_args, constant_args): - """ - Args: - self: the class we are handling - model_args (dict): var_name to var_value - The input data, e.g. the payload that must match at least one - anyOf child schema in the OpenAPI document. - constant_args (dict): var_name to var_value - args that every model requires, including configuration, server - and path to item. - - Returns - anyof_instances (list) - """ - anyof_instances = [] - if len(self._composed_schemas["anyOf"]) == 0: - return anyof_instances - - for anyof_class in self._composed_schemas["anyOf"]: - # The composed oneOf schema allows the 'null' type and the input data - # is the null value. This is a OAS >= 3.1 feature. - if anyof_class is none_type: - # skip none_types because we are deserializing dict data. - # none_type deserialization is handled in the __new__ method - continue - - try: - if constant_args.get("_spec_property_naming"): - anyof_instance = anyof_class._from_openapi_data( - **model_args, **constant_args - ) - else: - anyof_instance = anyof_class(**model_args, **constant_args) - anyof_instances.append(anyof_instance) - except Exception: - pass - if len(anyof_instances) == 0: - raise ApiValueError( - "Invalid inputs given to generate an instance of %s. None of the " - "anyOf schemas matched the inputs." % self.__class__.__name__ - ) - return anyof_instances - - -def get_discarded_args(self, composed_instances, model_args): - """ - Gathers the args that were discarded by configuration.discard_unknown_keys - """ - model_arg_keys = model_args.keys() - discarded_args = set() - # arguments passed to self were already converted to python names - # before __init__ was called - for instance in composed_instances: - if instance.__class__ in self._composed_schemas["allOf"]: - try: - keys = instance.to_dict().keys() - discarded_keys = model_args - keys - discarded_args.update(discarded_keys) - except Exception: - # allOf integer schema will throw exception - pass - else: - try: - all_keys = set(model_to_dict(instance, serialize=False).keys()) - js_keys = model_to_dict(instance, serialize=True).keys() - all_keys.update(js_keys) - discarded_keys = model_arg_keys - all_keys - discarded_args.update(discarded_keys) - except Exception: - # allOf integer schema will throw exception - pass - return discarded_args - - -def validate_get_composed_info(constant_args, model_args, self): - """ - For composed schemas, generate schema instances for - all schemas in the oneOf/anyOf/allOf definition. If additional - properties are allowed, also assign those properties on - all matched schemas that contain additionalProperties. - Openapi schemas are python classes. - - Exceptions are raised if: - - 0 or > 1 oneOf schema matches the model_args input data - - no anyOf schema matches the model_args input data - - any of the allOf schemas do not match the model_args input data - - Args: - constant_args (dict): these are the args that every model requires - model_args (dict): these are the required and optional spec args that - were passed in to make this model - self (class): the class that we are instantiating - This class contains self._composed_schemas - - Returns: - composed_info (list): length three - composed_instances (list): the composed instances which are not - self - var_name_to_model_instances (dict): a dict going from var_name - to the model_instance which holds that var_name - the model_instance may be self or an instance of one of the - classes in self.composed_instances() - additional_properties_model_instances (list): a list of the - model instances which have the property - additional_properties_type. This list can include self - """ - # create composed_instances - composed_instances = [] - allof_instances = get_allof_instances(self, model_args, constant_args) - composed_instances.extend(allof_instances) - oneof_instance = get_oneof_instance(self.__class__, model_args, constant_args) - if oneof_instance is not None: - composed_instances.append(oneof_instance) - anyof_instances = get_anyof_instances(self, model_args, constant_args) - composed_instances.extend(anyof_instances) - """ - set additional_properties_model_instances - additional properties must be evaluated at the schema level - so self's additional properties are most important - If self is a composed schema with: - - no properties defined in self - - additionalProperties: False - Then for object payloads every property is an additional property - and they are not allowed, so only empty dict is allowed - - Properties must be set on all matching schemas - so when a property is assigned toa composed instance, it must be set on all - composed instances regardless of additionalProperties presence - keeping it to prevent breaking changes in v5.0.1 - TODO remove cls._additional_properties_model_instances in 6.0.0 - """ - additional_properties_model_instances = [] - if self.additional_properties_type is not None: - additional_properties_model_instances = [self] - - """ - no need to set properties on self in here, they will be set in __init__ - By here all composed schema oneOf/anyOf/allOf instances have their properties set using - model_args - """ - discarded_args = get_discarded_args(self, composed_instances, model_args) - - # map variable names to composed_instances - var_name_to_model_instances = {} - for prop_name in model_args: - if prop_name not in discarded_args: - var_name_to_model_instances[prop_name] = [self] + list( - filter(lambda x: prop_name in x.openapi_types, composed_instances) - ) - - return [ - composed_instances, - var_name_to_model_instances, - additional_properties_model_instances, - discarded_args, - ] diff --git a/libs/schematic/api-client-python/schematic_client/models/__init__.py b/libs/schematic/api-client-python/schematic_client/models/__init__.py deleted file mode 100644 index 5904a82fa..000000000 --- a/libs/schematic/api-client-python/schematic_client/models/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ -# flake8: noqa - -# import all models into this package -# if you have many models here with many references from one model to another this may -# raise a RecursionError -# to avoid this, import only the models that you directly need like: -# from from schematic_client.model.pet import Pet -# or import this package, but before doing it, use: -# import sys -# sys.setrecursionlimit(n) - -from schematic_client.model.basic_error import BasicError -from schematic_client.model.dataset import Dataset -from schematic_client.model.datasets_page import DatasetsPage -from schematic_client.model.datasets_page_all_of import DatasetsPageAllOf -from schematic_client.model.page_metadata import PageMetadata diff --git a/libs/schematic/api-client-python/schematic_client/rest.py b/libs/schematic/api-client-python/schematic_client/rest.py deleted file mode 100644 index 195555d50..000000000 --- a/libs/schematic/api-client-python/schematic_client/rest.py +++ /dev/null @@ -1,451 +0,0 @@ -""" - Schematic REST API - - No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501 - - The version of the OpenAPI document: 0.1.0 - Generated by: https://openapi-generator.tech -""" - -import io -import json -import logging -import re -import ssl -from urllib.parse import urlencode -from urllib.parse import urlparse -from urllib.request import proxy_bypass_environment -import urllib3 -import ipaddress - -from schematic_client.exceptions import ( - ApiException, - UnauthorizedException, - ForbiddenException, - NotFoundException, - ServiceException, - ApiValueError, -) - - -logger = logging.getLogger(__name__) - - -class RESTResponse(io.IOBase): - - def __init__(self, resp): - self.urllib3_response = resp - self.status = resp.status - self.reason = resp.reason - self.data = resp.data - - def getheaders(self): - """Returns a dictionary of the response headers.""" - return self.urllib3_response.getheaders() - - def getheader(self, name, default=None): - """Returns a given response header.""" - return self.urllib3_response.getheader(name, default) - - -class RESTClientObject(object): - - def __init__(self, configuration, pools_size=4, maxsize=None): - # urllib3.PoolManager will pass all kw parameters to connectionpool - # https://github.com/shazow/urllib3/blob/f9409436f83aeb79fbaf090181cd81b784f1b8ce/urllib3/poolmanager.py#L75 # noqa: E501 - # https://github.com/shazow/urllib3/blob/f9409436f83aeb79fbaf090181cd81b784f1b8ce/urllib3/connectionpool.py#L680 # noqa: E501 - # maxsize is the number of requests to host that are allowed in parallel # noqa: E501 - # Custom SSL certificates and client certificates: http://urllib3.readthedocs.io/en/latest/advanced-usage.html # noqa: E501 - - # cert_reqs - if configuration.verify_ssl: - cert_reqs = ssl.CERT_REQUIRED - else: - cert_reqs = ssl.CERT_NONE - - addition_pool_args = {} - if configuration.assert_hostname is not None: - addition_pool_args["assert_hostname"] = ( - configuration.assert_hostname - ) # noqa: E501 - - if configuration.retries is not None: - addition_pool_args["retries"] = configuration.retries - - if configuration.socket_options is not None: - addition_pool_args["socket_options"] = configuration.socket_options - - if maxsize is None: - if configuration.connection_pool_maxsize is not None: - maxsize = configuration.connection_pool_maxsize - else: - maxsize = 4 - - # https pool manager - if configuration.proxy and not should_bypass_proxies( - configuration.host, no_proxy=configuration.no_proxy or "" - ): - self.pool_manager = urllib3.ProxyManager( - num_pools=pools_size, - maxsize=maxsize, - cert_reqs=cert_reqs, - ca_certs=configuration.ssl_ca_cert, - cert_file=configuration.cert_file, - key_file=configuration.key_file, - proxy_url=configuration.proxy, - proxy_headers=configuration.proxy_headers, - **addition_pool_args - ) - else: - self.pool_manager = urllib3.PoolManager( - num_pools=pools_size, - maxsize=maxsize, - cert_reqs=cert_reqs, - ca_certs=configuration.ssl_ca_cert, - cert_file=configuration.cert_file, - key_file=configuration.key_file, - **addition_pool_args - ) - - def request( - self, - method, - url, - query_params=None, - headers=None, - body=None, - post_params=None, - _preload_content=True, - _request_timeout=None, - ): - """Perform requests. - - :param method: http request method - :param url: http request url - :param query_params: query parameters in the url - :param headers: http request headers - :param body: request json body, for `application/json` - :param post_params: request post parameters, - `application/x-www-form-urlencoded` - and `multipart/form-data` - :param _preload_content: if False, the urllib3.HTTPResponse object will - be returned without reading/decoding response - data. Default is True. - :param _request_timeout: timeout setting for this request. If one - number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - """ - method = method.upper() - assert method in ["GET", "HEAD", "DELETE", "POST", "PUT", "PATCH", "OPTIONS"] - - if post_params and body: - raise ApiValueError( - "body parameter cannot be used with post_params parameter." - ) - - post_params = post_params or {} - headers = headers or {} - - timeout = None - if _request_timeout: - if isinstance(_request_timeout, (int, float)): # noqa: E501,F821 - timeout = urllib3.Timeout(total=_request_timeout) - elif isinstance(_request_timeout, tuple) and len(_request_timeout) == 2: - timeout = urllib3.Timeout( - connect=_request_timeout[0], read=_request_timeout[1] - ) - - try: - # For `POST`, `PUT`, `PATCH`, `OPTIONS`, `DELETE` - if method in ["POST", "PUT", "PATCH", "OPTIONS", "DELETE"]: - # Only set a default Content-Type for POST, PUT, PATCH and OPTIONS requests - if (method != "DELETE") and ("Content-Type" not in headers): - headers["Content-Type"] = "application/json" - if query_params: - url += "?" + urlencode(query_params) - if ("Content-Type" not in headers) or ( - re.search("json", headers["Content-Type"], re.IGNORECASE) - ): - request_body = None - if body is not None: - request_body = json.dumps(body) - r = self.pool_manager.request( - method, - url, - body=request_body, - preload_content=_preload_content, - timeout=timeout, - headers=headers, - ) - elif ( - headers["Content-Type"] == "application/x-www-form-urlencoded" - ): # noqa: E501 - r = self.pool_manager.request( - method, - url, - fields=post_params, - encode_multipart=False, - preload_content=_preload_content, - timeout=timeout, - headers=headers, - ) - elif headers["Content-Type"] == "multipart/form-data": - # must del headers['Content-Type'], or the correct - # Content-Type which generated by urllib3 will be - # overwritten. - del headers["Content-Type"] - r = self.pool_manager.request( - method, - url, - fields=post_params, - encode_multipart=True, - preload_content=_preload_content, - timeout=timeout, - headers=headers, - ) - # Pass a `string` parameter directly in the body to support - # other content types than Json when `body` argument is - # provided in serialized form - elif isinstance(body, str) or isinstance(body, bytes): - request_body = body - r = self.pool_manager.request( - method, - url, - body=request_body, - preload_content=_preload_content, - timeout=timeout, - headers=headers, - ) - else: - # Cannot generate the request from given parameters - msg = """Cannot prepare a request message for provided - arguments. Please check that your arguments match - declared content type.""" - raise ApiException(status=0, reason=msg) - # For `GET`, `HEAD` - else: - r = self.pool_manager.request( - method, - url, - fields=query_params, - preload_content=_preload_content, - timeout=timeout, - headers=headers, - ) - except urllib3.exceptions.SSLError as e: - msg = "{0}\n{1}".format(type(e).__name__, str(e)) - raise ApiException(status=0, reason=msg) - - if _preload_content: - r = RESTResponse(r) - - # log response body - logger.debug("response body: %s", r.data) - - if not 200 <= r.status <= 299: - if r.status == 401: - raise UnauthorizedException(http_resp=r) - - if r.status == 403: - raise ForbiddenException(http_resp=r) - - if r.status == 404: - raise NotFoundException(http_resp=r) - - if 500 <= r.status <= 599: - raise ServiceException(http_resp=r) - - raise ApiException(http_resp=r) - - return r - - def GET( - self, - url, - headers=None, - query_params=None, - _preload_content=True, - _request_timeout=None, - ): - return self.request( - "GET", - url, - headers=headers, - _preload_content=_preload_content, - _request_timeout=_request_timeout, - query_params=query_params, - ) - - def HEAD( - self, - url, - headers=None, - query_params=None, - _preload_content=True, - _request_timeout=None, - ): - return self.request( - "HEAD", - url, - headers=headers, - _preload_content=_preload_content, - _request_timeout=_request_timeout, - query_params=query_params, - ) - - def OPTIONS( - self, - url, - headers=None, - query_params=None, - post_params=None, - body=None, - _preload_content=True, - _request_timeout=None, - ): - return self.request( - "OPTIONS", - url, - headers=headers, - query_params=query_params, - post_params=post_params, - _preload_content=_preload_content, - _request_timeout=_request_timeout, - body=body, - ) - - def DELETE( - self, - url, - headers=None, - query_params=None, - body=None, - _preload_content=True, - _request_timeout=None, - ): - return self.request( - "DELETE", - url, - headers=headers, - query_params=query_params, - _preload_content=_preload_content, - _request_timeout=_request_timeout, - body=body, - ) - - def POST( - self, - url, - headers=None, - query_params=None, - post_params=None, - body=None, - _preload_content=True, - _request_timeout=None, - ): - return self.request( - "POST", - url, - headers=headers, - query_params=query_params, - post_params=post_params, - _preload_content=_preload_content, - _request_timeout=_request_timeout, - body=body, - ) - - def PUT( - self, - url, - headers=None, - query_params=None, - post_params=None, - body=None, - _preload_content=True, - _request_timeout=None, - ): - return self.request( - "PUT", - url, - headers=headers, - query_params=query_params, - post_params=post_params, - _preload_content=_preload_content, - _request_timeout=_request_timeout, - body=body, - ) - - def PATCH( - self, - url, - headers=None, - query_params=None, - post_params=None, - body=None, - _preload_content=True, - _request_timeout=None, - ): - return self.request( - "PATCH", - url, - headers=headers, - query_params=query_params, - post_params=post_params, - _preload_content=_preload_content, - _request_timeout=_request_timeout, - body=body, - ) - - -# end of class RESTClientObject - - -def is_ipv4(target): - """Test if IPv4 address or not""" - try: - chk = ipaddress.IPv4Address(target) - return True - except ipaddress.AddressValueError: - return False - - -def in_ipv4net(target, net): - """Test if target belongs to given IPv4 network""" - try: - nw = ipaddress.IPv4Network(net) - ip = ipaddress.IPv4Address(target) - if ip in nw: - return True - return False - except ipaddress.AddressValueError: - return False - except ipaddress.NetmaskValueError: - return False - - -def should_bypass_proxies(url, no_proxy=None): - """Yet another requests.should_bypass_proxies - Test if proxies should not be used for a particular url. - """ - - parsed = urlparse(url) - - # special cases - if parsed.hostname in [None, ""]: - return True - - # special cases - if no_proxy in [None, ""]: - return False - if no_proxy == "*": - return True - - no_proxy = no_proxy.lower().replace(" ", "") - entries = (host for host in no_proxy.split(",") if host) - - if is_ipv4(parsed.hostname): - for item in entries: - if in_ipv4net(parsed.hostname, item): - return True - return proxy_bypass_environment(parsed.hostname, {"no": no_proxy}) diff --git a/libs/schematic/api-client-python/setup.py b/libs/schematic/api-client-python/setup.py deleted file mode 100644 index 21d092b06..000000000 --- a/libs/schematic/api-client-python/setup.py +++ /dev/null @@ -1,42 +0,0 @@ -""" - Schematic REST API - - No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501 - - The version of the OpenAPI document: 0.1.0 - Generated by: https://openapi-generator.tech -""" - -from setuptools import setup, find_packages # noqa: H301 - -NAME = "schematic-client" -VERSION = "1.0.0" -# To install the library, run the following -# -# python setup.py install -# -# prerequisite: setuptools -# http://pypi.python.org/pypi/setuptools - -REQUIRES = [ - "urllib3 >= 1.25.3", - "python-dateutil", -] - -setup( - name=NAME, - version=VERSION, - description="Schematic REST API", - author="Support", - author_email="team@openapitools.org", - url="", - keywords=["OpenAPI", "OpenAPI-Generator", "Schematic REST API"], - python_requires=">=3.6", - install_requires=REQUIRES, - packages=find_packages(exclude=["test", "tests"]), - include_package_data=True, - license="Apache 2.0", - long_description="""\ - No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501 - """, -) diff --git a/libs/schematic/api-client-python/test-requirements.txt b/libs/schematic/api-client-python/test-requirements.txt deleted file mode 100644 index bb4f22bb7..000000000 --- a/libs/schematic/api-client-python/test-requirements.txt +++ /dev/null @@ -1 +0,0 @@ -pytest-cov>=2.8.1 diff --git a/libs/schematic/api-client-python/test/__init__.py b/libs/schematic/api-client-python/test/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/libs/schematic/api-client-python/test/test_basic_error.py b/libs/schematic/api-client-python/test/test_basic_error.py deleted file mode 100644 index 36b99c570..000000000 --- a/libs/schematic/api-client-python/test/test_basic_error.py +++ /dev/null @@ -1,34 +0,0 @@ -""" - Schematic REST API - - No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501 - - The version of the OpenAPI document: 0.1.0 - Generated by: https://openapi-generator.tech -""" - -import sys -import unittest - -import openapi_client -from openapi_client.model.basic_error import BasicError - - -class TestBasicError(unittest.TestCase): - """BasicError unit test stubs""" - - def setUp(self): - pass - - def tearDown(self): - pass - - def testBasicError(self): - """Test BasicError""" - # FIXME: construct object with mandatory attributes with example values - # model = BasicError() # noqa: E501 - pass - - -if __name__ == "__main__": - unittest.main() diff --git a/libs/schematic/api-client-python/test/test_dataset.py b/libs/schematic/api-client-python/test/test_dataset.py deleted file mode 100644 index 18be2e40d..000000000 --- a/libs/schematic/api-client-python/test/test_dataset.py +++ /dev/null @@ -1,34 +0,0 @@ -""" - Schematic REST API - - No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501 - - The version of the OpenAPI document: 0.1.0 - Generated by: https://openapi-generator.tech -""" - -import sys -import unittest - -import openapi_client -from openapi_client.model.dataset import Dataset - - -class TestDataset(unittest.TestCase): - """Dataset unit test stubs""" - - def setUp(self): - pass - - def tearDown(self): - pass - - def testDataset(self): - """Test Dataset""" - # FIXME: construct object with mandatory attributes with example values - # model = Dataset() # noqa: E501 - pass - - -if __name__ == "__main__": - unittest.main() diff --git a/libs/schematic/api-client-python/test/test_datasets_page.py b/libs/schematic/api-client-python/test/test_datasets_page.py deleted file mode 100644 index 631ce0dee..000000000 --- a/libs/schematic/api-client-python/test/test_datasets_page.py +++ /dev/null @@ -1,41 +0,0 @@ -""" - Schematic REST API - - No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501 - - The version of the OpenAPI document: 0.1.0 - Generated by: https://openapi-generator.tech -""" - -import sys -import unittest - -import openapi_client -from openapi_client.model.dataset import Dataset -from openapi_client.model.datasets_page_all_of import DatasetsPageAllOf -from openapi_client.model.page_metadata import PageMetadata - -globals()["Dataset"] = Dataset -globals()["DatasetsPageAllOf"] = DatasetsPageAllOf -globals()["PageMetadata"] = PageMetadata -from openapi_client.model.datasets_page import DatasetsPage - - -class TestDatasetsPage(unittest.TestCase): - """DatasetsPage unit test stubs""" - - def setUp(self): - pass - - def tearDown(self): - pass - - def testDatasetsPage(self): - """Test DatasetsPage""" - # FIXME: construct object with mandatory attributes with example values - # model = DatasetsPage() # noqa: E501 - pass - - -if __name__ == "__main__": - unittest.main() diff --git a/libs/schematic/api-client-python/test/test_datasets_page_all_of.py b/libs/schematic/api-client-python/test/test_datasets_page_all_of.py deleted file mode 100644 index 962b0ed15..000000000 --- a/libs/schematic/api-client-python/test/test_datasets_page_all_of.py +++ /dev/null @@ -1,37 +0,0 @@ -""" - Schematic REST API - - No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501 - - The version of the OpenAPI document: 0.1.0 - Generated by: https://openapi-generator.tech -""" - -import sys -import unittest - -import openapi_client -from openapi_client.model.dataset import Dataset - -globals()["Dataset"] = Dataset -from openapi_client.model.datasets_page_all_of import DatasetsPageAllOf - - -class TestDatasetsPageAllOf(unittest.TestCase): - """DatasetsPageAllOf unit test stubs""" - - def setUp(self): - pass - - def tearDown(self): - pass - - def testDatasetsPageAllOf(self): - """Test DatasetsPageAllOf""" - # FIXME: construct object with mandatory attributes with example values - # model = DatasetsPageAllOf() # noqa: E501 - pass - - -if __name__ == "__main__": - unittest.main() diff --git a/libs/schematic/api-client-python/test/test_page_metadata.py b/libs/schematic/api-client-python/test/test_page_metadata.py deleted file mode 100644 index 8416ac386..000000000 --- a/libs/schematic/api-client-python/test/test_page_metadata.py +++ /dev/null @@ -1,34 +0,0 @@ -""" - Schematic REST API - - No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501 - - The version of the OpenAPI document: 0.1.0 - Generated by: https://openapi-generator.tech -""" - -import sys -import unittest - -import openapi_client -from openapi_client.model.page_metadata import PageMetadata - - -class TestPageMetadata(unittest.TestCase): - """PageMetadata unit test stubs""" - - def setUp(self): - pass - - def tearDown(self): - pass - - def testPageMetadata(self): - """Test PageMetadata""" - # FIXME: construct object with mandatory attributes with example values - # model = PageMetadata() # noqa: E501 - pass - - -if __name__ == "__main__": - unittest.main() diff --git a/libs/schematic/api-client-python/test/test_storage_api.py b/libs/schematic/api-client-python/test/test_storage_api.py deleted file mode 100644 index 11af01415..000000000 --- a/libs/schematic/api-client-python/test/test_storage_api.py +++ /dev/null @@ -1,34 +0,0 @@ -""" - Schematic REST API - - No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501 - - The version of the OpenAPI document: 0.1.0 - Generated by: https://openapi-generator.tech -""" - -import unittest - -import openapi_client -from openapi_client.api.storage_api import StorageApi # noqa: E501 - - -class TestStorageApi(unittest.TestCase): - """StorageApi unit test stubs""" - - def setUp(self): - self.api = StorageApi() # noqa: E501 - - def tearDown(self): - pass - - def test_list_storage_project_datasets(self): - """Test case for list_storage_project_datasets - - Gets all datasets in folder under a given storage project that the current user has access to. # noqa: E501 - """ - pass - - -if __name__ == "__main__": - unittest.main() diff --git a/libs/schematic/api-client-python/tox.ini b/libs/schematic/api-client-python/tox.ini deleted file mode 100644 index 60d53e739..000000000 --- a/libs/schematic/api-client-python/tox.ini +++ /dev/null @@ -1,9 +0,0 @@ -[tox] -envlist = py3 - -[testenv] -deps=-r{toxinidir}/requirements.txt - -r{toxinidir}/test-requirements.txt - -commands= - pytest --cov=schematic_client diff --git a/libs/schematic/api-description/.gitignore b/libs/schematic/api-description/.gitignore deleted file mode 100644 index 684ae5d80..000000000 --- a/libs/schematic/api-description/.gitignore +++ /dev/null @@ -1 +0,0 @@ -!build \ No newline at end of file diff --git a/libs/schematic/api-description/README.md b/libs/schematic/api-description/README.md deleted file mode 100644 index 08efae00a..000000000 --- a/libs/schematic/api-description/README.md +++ /dev/null @@ -1,7 +0,0 @@ -# api-spec - -This library was generated with [Nx](https://nx.dev). - -## Running unit tests - -Run `nx test api-spec` to execute the unit tests. diff --git a/libs/schematic/api-description/build/api.yaml b/libs/schematic/api-description/build/api.yaml deleted file mode 100644 index f6d06965f..000000000 --- a/libs/schematic/api-description/build/api.yaml +++ /dev/null @@ -1,990 +0,0 @@ -openapi: 3.0.0 -info: - title: Schematic REST API - version: 0.1.0 - description: >- - This service exposes core functionalities from schematic as REST API - endpoints - -servers: - - url: /v1 - -paths: - /manifest/generate: - get: - summary: Endpoint to facilitate manifest generation - description: Endpoint to create dynamically create metadata manifest files - parameters: - - in: query - name: schema_url - schema: - type: string - description: Data Model URL - example: >- - https://raw.githubusercontent.com/Sage-Bionetworks/schematic/develop/tests/data/example.model.jsonld - required: true - - in: query - name: title - schema: - type: string - description: > - Title of Manifest or Title Prefix, - if making multiple manifests - example: Example - required: false - - in: query - name: data_type - style: form - schema: - type: array - items: - type: string - nullable: true - description: > - Data Model Component(s). - To make all manifests, enter "all manifests". - example: - - Patient - - Biospecimen - required: true - - in: query - name: use_annotations - schema: - type: boolean - default: false - description: To Use Annotations - required: false - - in: query - name: dataset_id - style: form - schema: - type: array - items: - type: string - nullable: true - description: > - Dataset ID. If you want to get an existing manifest, this dataset_id should be the parent ID of the manifest. Can enter multiple dataset_ids, corresponding to order of multiple data_types entered above. Do not enter multiple if calling 'all manifests' for data_type. - required: false - - in: query - name: asset_view - schema: - type: string - nullable: true - description: ID of view listing all project data assets. E.g. for Synapse this would be the Synapse ID of the fileview listing all data assets for a given project.(i.e. master_fileview in config.yml) - required: false - - in: query - name: output_format - schema: - type: string - enum: ["excel", "google_sheet", "dataframe (only if getting existing manifests)"] - description: If "excel" gets selected, this approach would avoid sending metadata to Google sheet APIs; if "google_sheet" gets selected, this would return a Google sheet URL. This parameter could potentially override sheet_url parameter. - required: false - operationId: api.routes.get_manifest_route - responses: - "200": - description: Googlesheet link created OR an excel file gets returned OR pandas dataframe gets returned - content: - application/vnd.ms-excel: - schema: - type: string - format: binary - application/json: - schema: - type: string - tags: - - Manifest Operations - /manifest/download: - get: - summary: Endpoint to download an existing manifest - description: Endpoint to download an existing manifest - parameters: - - in: query - name: input_token - schema: - type: string - nullable: false - description: Token - example: Token - required: true - - in: query - name: asset_view - schema: - type: string - nullable: false - description: ID of view listing all project data assets. For example, for Synapse this would be the Synapse ID of the fileview listing all data assets for a given project.(i.e. master_fileview in config.yml) - example: syn28559058 - required: true - - in: query - name: dataset_id - schema: - type: string - nullable: true - description: this dataset_id should be the parent ID of the manifest. - example: syn28268700 - required: true - - in: query - name: as_json - schema: - type: boolean - default: false - description: if True return the manifest in JSON format - required: false - - in: query - name: new_manifest_name - schema: - type: string - nullable: true - description: Fill in if you want to change the filename of the downloaded manifest. - required: false - operationId: api.routes.download_manifest - responses: - "200": - description: A manifest gets downloaded and local file path of the manifest gets returned. - content: - text/csv: - schema: - type: string - tags: - - Manifest Operations - /model/validate: - post: - summary: Endpoint to facilitate manifest validation - description: Endpoint to validate metadata manifest files - requestBody: - content: - multipart/form-data: - schema: - type: object - properties: - # file_name will be the field name in - # this multipart request - file_name: - description: Upload a json or a csv file. - type: string - format: binary - parameters: - - in: query - name: schema_url - schema: - type: string - description: Data Model URL - example: >- - https://raw.githubusercontent.com/Sage-Bionetworks/schematic/develop/tests/data/example.model.jsonld - required: true - - in: query - name: data_type - schema: - type: string - nullable: true - description: Data Model Component - example: Patient - required: true - - in: query - name: json_str - required: false - schema: - type: string - nullable: false - description: A JSON object - example: '[{ - "Patient ID": 123, - "Sex": "Female", - "Year of Birth": "", - "Diagnosis": "Healthy", - "Component": "Patient", - "Cancer Type": "Breast", - "Family History": "Breast, Lung", - }]' - - operationId: api.routes.validate_manifest_route - responses: - "200": - description: Manifest Validated - content: - application/json: - schema: - type: array - items: - type: array - items: - anyOf: - - type: integer - - type: string - - type: array - items: - type: string - tags: - - Model Operations - /model/submit: - post: - summary: Endpoint to facilitate manifest submission - description: Endpoint to submit annotated manifest files - requestBody: - content: - multipart/form-data: - schema: - type: object - properties: - file_name: - description: Upload a json or a csv file. - type: string - format: binary - parameters: - - in: query - name: schema_url - schema: - type: string - description: Data Model URL - example: >- - https://raw.githubusercontent.com/Sage-Bionetworks/schematic/develop/tests/data/example.model.jsonld - required: true - - in: query - name: data_type - schema: - type: string - nullable: true - description: Data Model Component - example: Patient - required: true - - in: query - name: dataset_id - schema: - type: string - nullable: true - description: Dataset SynID - required: true - - in: query - name: manifest_record_type - schema: - type: string - enum: [ "table", "entity", "both"] - description: Manifest storage type. - example: 'table' - - in: query - name: restrict_rules - schema: - type: boolean - default: false - description: If True, validation suite will only run with in-house validation rule. If False, the Great Expectations suite will be utilized and all rules will be available. - required: true - - in: query - name: input_token - schema: - type: string - nullable: false - description: Token - required: true - - in: query - name: asset_view - schema: - type: string - nullable: false - description: ID of view listing all project data assets. For example, for Synapse this would be the Synapse ID of the fileview listing all data assets for a given project.(i.e. master_fileview in config.yml) - example: syn28559058 - required: true - - in: query - name: json_str - required: false - schema: - type: string - nullable: false - description: A JSON object - example: '[{ - "Patient ID": 123, - "Sex": "Female", - "Year of Birth": "", - "Diagnosis": "Healthy", - "Component": "Patient", - "Cancer Type": "Breast", - "Family History": "Breast, Lung", - }]' - operationId: api.routes.submit_manifest_route - responses: - "200": - description: Manifest ID (e.g. Synapse ID if your asset management platform is Synapse) - content: - application/json: - schema: - type: string - "500": - description: Check schematic log - tags: - - Model Operations - /model/component-requirements: - get: - summary: Given a source model component (see https://w3id.org/biolink/vocab/category for definnition of component), return all components required by it. - description: Given a source model component (see https://w3id.org/biolink/vocab/category for definnition of component), return all components required by it. Useful to construct requirement dependencies not only between specific attributes but also between categories/components of attributes; it can be utilized to track metadata completion progress across multiple categories of attributes. - parameters: - - in: query - name: schema_url - schema: - type: string - description: Data Model URL - example: >- - https://raw.githubusercontent.com/Sage-Bionetworks/schematic/develop/tests/data/example.model.jsonld - required: true - - in: query - name: source_component - schema: - type: string - description: an attribute label indicating the source component. (i.e. Patient, Biospecimen, ScRNA-seqLevel1, ScRNA-seqLevel2) - example: Biospecimen - required: true - - in: query - name: as_graph - schema: - type: boolean - default: false - description: if False return component requirements as a list; if True return component requirements as a dependency graph (i.e. a DAG) - required: true - operationId: api.routes.get_component_requirements - responses: - "200": - description: A list of required components associated with the source component. - content: - application/json: - schema: - type: array - example: ["MolecularTest","Therapy","Diagnosis","FollowUp","Exposure","FamilyHistory","Demographics","Patient","BreastCancerTier3"] - tags: - - Manifest Operations - /manifest/populate: - post: - summary: Create a Google sheet link based on an existing manifest. - description: Create a Google sheet link based on an existing manifest. - requestBody: - content: - multipart/form-data: - schema: - type: object - properties: - csv_file: - type: string - format: binary - parameters: - - in: query - name: schema_url - schema: - type: string - description: Data Model URL - example: >- - https://raw.githubusercontent.com/Sage-Bionetworks/schematic/develop/tests/data/example.model.jsonld - required: true - - in: query - name: data_type - schema: - type: string - nullable: true - description: Data Model Component - example: Patient - required: true - - in: query - name: title - schema: - type: string - description: Title of Manifest - example: Example - required: false - - in: query - name: return_excel - schema: - type: boolean - nullable: true - description: If true, this would return an Excel spreadsheet.(This approach would avoid sending metadata to Google sheet APIs) - required: false - operationId: api.routes.populate_manifest_route - responses: - "200": - description: Googlesheet link created - content: - application/json: - schema: - type: string - "500": - description: Check schematic log - tags: - - Manifest Operations - /get/datatype/manifest: - get: - summary: Get datatype of attributes in manifest - description: Get datatype of attributes in manifest - operationId: api.routes.get_manifest_datatype - parameters: - - in: query - name: input_token - schema: - type: string - nullable: false - description: Token - example: Token - required: true - - in: query - name: asset_view - schema: - type: string - nullable: false - description: ID of view listing all project data assets. For example, for Synapse this would be the Synapse ID of the fileview listing all data assets for a given project.(i.e. master_fileview in config.yml) - example: syn23643253 - required: true - - in: query - name: manifest_id - schema: - type: string - nullable: false - description: Manifest ID - example: syn27600110 - required: true - responses: - "200": - description: A list of json - "500": - description: Check schematic log. - tags: - - Manifest Operations - /storage/projects: - get: - summary: Get all storage projects the current user has access to - description: Gets all storage projects the current user has access to, within the scope of the 'storageFileview' attribute. - operationId: api.routes.get_storage_projects - parameters: - - in: query - name: input_token - schema: - type: string - nullable: false - description: Token - example: Token - required: true - - in: query - name: asset_view - schema: - type: string - nullable: false - description: ID of view listing all project data assets. For example, for Synapse this would be the Synapse ID of the fileview listing all data assets for a given project.(i.e. master_fileview in config.yml) - example: syn23643253 - required: true - responses: - "200": - description: A list of tuples - "500": - description: Check log - tags: - - Synapse Storage - /storage/project/datasets: - get: - summary: Gets all datasets in folder under a given storage project that the current user has access to. - description: Gets all datasets in folder under a given storage project that the current user has access to. - operationId: api.routes.get_storage_projects_datasets - parameters: - - in: query - name: input_token - schema: - type: string - nullable: false - description: Token - example: Token - required: true - - in: query - name: asset_view - schema: - type: string - nullable: false - description: ID of view listing all project data assets. For example, for Synapse this would be the Synapse ID of the fileview listing all data assets for a given project.(i.e. master_fileview in config.yml) - example: syn23643253 - required: true - - in: query - name: project_id - schema: - type: string - nullable: false - description: synapse ID of a storage project. - example: syn26251192 - required: true - responses: - "200": - description: A list of tuples - "500": - description: Check log - tags: - - Synapse Storage - /storage/dataset/files: - get: - summary: Get all files in a given dataset folder - description: Get all files in a given dataset folder - operationId: api.routes.get_files_storage_dataset - parameters: - - in: query - name: input_token - schema: - type: string - nullable: false - description: Token - example: Token - required: true - - in: query - name: asset_view - schema: - type: string - nullable: false - description: ID of view listing all project data assets. For example, for Synapse this would be the Synapse ID of the fileview listing all data assets for a given project.(i.e. master_fileview in config.yml) - example: syn23643253 - required: true - - in: query - name: dataset_id - schema: - type: string - nullable: false - description: synapse ID of a storage dataset. - example: syn23643250 - required: true - - in: query - name: file_names - schema: - type: array - items: - type: string - nullable: true - description: a list of files with particular names (i.e. Sample_A.txt). If you leave it empty, it will return all dataset files under the dataset ID. - required: false - - in: query - name: full_path - schema: - type: boolean - nullable: false - description: if True return the full path as part of this filename; otherwise return just base filename - required: true - responses: - "200": - description: A list of tuples - "500": - description: Check schematic log - tags: - - Synapse Storage - /storage/assets/tables: - get: - summary: Retrieve asset view table as a dataframe. - description: Retrieve asset view table as a dataframe. - operationId: api.routes.get_asset_view_table - parameters: - - in: query - name: input_token - schema: - type: string - nullable: false - description: Token - example: Token - required: true - - in: query - name: asset_view - schema: - type: string - nullable: false - description: ID of view listing all project data assets. For example, for Synapse this would be the Synapse ID of the fileview listing all data assets for a given project.(i.e. master_fileview in config.yml) - example: syn23643253 - required: true - - in: query - name: return_type - schema: - type: string - enum: ["json", "csv"] - description: Type of return - example: 'json' - required: true - responses: - "200": - description: csv file path or json - "500": - description: Check schematic log. - tags: - - Synapse Storage - /storage/project/manifests: - get: - summary: Gets all metadata manifest files across all datasets in a specified project. - description: Gets all metadata manifest files across all datasets in a specified project. - operationId: api.routes.get_project_manifests - parameters: - - in: query - name: input_token - schema: - type: string - nullable: false - description: Token - example: Token - required: true - - in: query - name: project_id - schema: - type: string - nullable: false - description: Project ID - example: syn30988314 - required: true - - in: query - name: asset_view - schema: - type: string - nullable: false - description: ID of view listing all project data assets. For example, for Synapse this would be the Synapse ID of the fileview listing all data assets for a given project.(i.e. master_fileview in config.yml) - example: syn23643253 - required: true - responses: - "200": - description: A list of tuples(json). - content: - application/json: - schema: - type: array - example: [ - [ - [datasetId, dataName], - [manifestId, manifestName], - [componentSchemaLabel, componentSchemaLabel] - ] - ] - "500": - description: Check schematic log. - tags: - - Synapse Storage - /schemas/get/schema: - get: - summary: Return schema as a pickle file - description: Return schema as a pickle file - operationId: api.routes.get_schema_pickle - parameters: - - in: query - name: schema_url - schema: - type: string - description: Data Model URL - example: >- - https://raw.githubusercontent.com/Sage-Bionetworks/schematic/develop/tests/data/example.model.jsonld - required: true - responses: - "200": - description: A pickle file gets downloaded and local file path of the pickle file gets returned. - content: - text/plain: - schema: - type: string - "500": - description: Check schematic log. - tags: - - Schema Operation - - /explorer/find_class_specific_properties: - get: - summary: Find properties specifically associated with a given class - description: Find properties specifically associated with a given class - operationId: api.routes.find_class_specific_properties - parameters: - - in: query - name: schema_url - schema: - type: string - description: Data Model URL - example: >- - https://raw.githubusercontent.com/Sage-Bionetworks/schematic/develop/tests/data/example.model.jsonld - required: true - - in: query - name: schema_class - schema: - type: string - nullable: false - description: schema class - example: MolecularEntity - required: true - responses: - "200": - description: A list of properties of a given class. - "500": - description: Check schematic log. - tags: - - Schema Operation - /schemas/get/graph_by_edge_type: - get: - summary: Get a subgraph containing all edges of a given type (aka relationship) - description: Get a subgraph containing all edges of a given type (aka relationship) - operationId: api.routes.get_subgraph_by_edge_type - parameters: - - in: query - name: schema_url - schema: - type: string - description: Data Model URL - example: >- - https://raw.githubusercontent.com/Sage-Bionetworks/schematic/develop/tests/data/example.model.jsonld - required: true - - in: query - name: relationship - schema: - type: string - nullable: false - description: Relationship (i.e. parentOf, requiresDependency, rangeValue, domainValue) - example: requiresDependency - required: true - responses: - "200": - description: A list of tuples. - content: - application/json: - schema: - type: array - example: [ - [ - [Patient, PatientID], - [Patient,Sex], - [Patient, YearofBirth] - ] - ] - "500": - description: Check schematic log. - tags: - - Schema Operation - /schemas/is_node_required: - get: - summary: Check if a node is required or not - description: Check if a node is required or not - operationId: api.routes.get_if_node_required - parameters: - - in: query - name: schema_url - schema: - type: string - description: Data Model URL - example: >- - https://raw.githubusercontent.com/Sage-Bionetworks/schematic/develop/tests/data/example.model.jsonld - required: true - - in: query - name: node_display_name - schema: - type: string - nullable: false - description: Display label of a node - example: FamilyHistory - required: true - responses: - "200": - description: return a boolean - "500": - description: Check schematic log. - tags: - - Schema Operation - - - - - /explorer/get_node_dependencies: - get: - summary: Get the immediate dependencies that are related to a given source node - description: Get the immediate dependencies that are related to a given source node - operationId: api.routes.get_node_dependencies - parameters: - - in: query - name: schema_url - schema: - type: string - description: Data Model URL - example: >- - https://raw.githubusercontent.com/Sage-Bionetworks/schematic/develop/tests/data/example.model.jsonld - required: true - - in: query - name: source_node - schema: - type: string - nullable: false - description: The node whose dependencies are needed - example: Patient - required: true - - in: query - name: return_display_names - schema: - type: boolean - nullable: true - description: Return display names or not - required: false - example: true - - in: query - name: return_schema_ordered - schema: - type: boolean - nullable: true - description: Return schema ordered or not - required: false - example: true - responses: - "200": - description: List of nodes that are dependent on the source node. - "500": - description: Check schematic log. - tags: - - Schema Operation - - /explorer/get_property_label_from_display_name: - get: - summary: Converts a given display name string into a proper property label string - description: Converts a given display name string into a proper property label string - operationId: api.routes.get_property_label_from_display_name - parameters: - - in: query - name: schema_url - schema: - type: string - description: Data Model URL - example: >- - https://raw.githubusercontent.com/Sage-Bionetworks/schematic/develop/tests/data/example.model.jsonld - required: true - - in: query - name: display_name - schema: - type: string - nullable: false - description: The display name to be converted - example: MolecularEntity - required: true - - in: query - name: strict_camel_case - schema: - type: boolean - nullable: false - description: If true the more strict way of - converting to camel case is used. - responses: - "200": - description: The property label of the display name. - "500": - description: Check schematic log. - tags: - - Schema Operation - - /explorer/get_node_range: - get: - summary: Get all the valid values that are associated with a node label. - description: Get all the valid values that are associated with a node label. - operationId: api.routes.get_node_range - parameters: - - in: query - name: schema_url - schema: - type: string - description: Data Model URL - example: >- - https://raw.githubusercontent.com/Sage-Bionetworks/schematic/develop/tests/data/example.model.jsonld - required: true - - in: query - name: node_label - schema: - type: string - nullable: false - description: Node / term for which you need to retrieve the range. - example: FamilyHistory - required: true - - in: query - name: return_display_names - schema: - type: boolean - description: If true returns the display names of the nodes. - required: false - responses: - "200": - description: A list of nodes. - "500": - description: Check schematic log. - tags: - - Schema Operation - /visualize/tangled_tree/layers: - get: - summary: Get layers of tangled tree. - description: >- - Get tangled tree node layers to display for a given data model and figure type - operationId: api.routes.get_viz_tangled_tree_layers - parameters: - - in: query - name: schema_url - schema: - type: string - description: Data Model URL - example: >- - https://raw.githubusercontent.com/Sage-Bionetworks/schematic/develop/tests/data/example.model.jsonld - required: true - - in: query - name: figure_type - schema: - type: string - enum: ["component", "dependency"] - description: Figure type to generate. - example: 'component' - required: true - responses: - "200": - description: Returns a dataframe as a JSON String. - content: - text/json: - schema: - type: string - tags: - - Visualization Operations - /visualize/tangled_tree/text: - get: - summary: Get text to display on tangled tree. - description: >- - Get tangled tree plain or higlighted text to display for a given data model, text formatting and figure type - operationId: api.routes.get_viz_tangled_tree_text - parameters: - - in: query - name: schema_url - schema: - type: string - description: Data Model URL - example: >- - https://raw.githubusercontent.com/Sage-Bionetworks/schematic/develop/tests/data/example.model.jsonld - required: true - - in: query - name: figure_type - schema: - type: string - enum: ["component", "dependency"] - description: Figure type to generate. - example: 'component' - required: true - - in: query - name: text_format - schema: - type: string - enum: ["plain", "highlighted"] - description: Text formatting type. - example: 'plain' - required: true - responses: - "200": - description: Returns a dataframe as a JSON String. - content: - text/csv: - schema: - type: string - tags: - - Visualization Operations - /visualize/attributes: - get: - summary: Get an attributes table for a data model, as a CSV (JSON String) - description: >- - Get all the attributes associated with a data model formatted as a - dataframe (stored as a JSON String) for use in Observable visualization. - operationId: api.routes.get_viz_attributes_explorer - parameters: - - in: query - name: schema_url - schema: - type: string - description: Data Model URL - example: >- - https://raw.githubusercontent.com/Sage-Bionetworks/schematic/develop/tests/data/example.model.jsonld - required: true - responses: - "200": - description: Returns a CSV as a JSON String. - content: - text/csv: - schema: - type: string - tags: - - Visualization Operations diff --git a/libs/schematic/api-description/build/openapi.yaml b/libs/schematic/api-description/build/openapi.yaml deleted file mode 100644 index 36e3de8c3..000000000 --- a/libs/schematic/api-description/build/openapi.yaml +++ /dev/null @@ -1,1801 +0,0 @@ -openapi: 3.0.3 -info: - title: Schematic REST API - version: 0.1.0 - license: - name: Apache 2.0 - url: https://github.com/Sage-Bionetworks/sage-monorepo - contact: - name: Support - url: https://github.com/Sage-Bionetworks/sage-monorepo - x-logo: - url: https://Sage-Bionetworks.github.io/rocc-schemas/logo.png -servers: - - url: http://localhost/api/v1 -tags: - - name: Storage - description: Operations about storages. -paths: - /schematicVersion: - get: - tags: - - Versions - summary: Gets the version of the schematic library currently used by the API - description: Gets the version of the schematic library currently used by the API - operationId: getSchematicVersion - responses: - '200': - description: Success - content: - application/json: - schema: - type: string - example: v21.1.1 - '500': - $ref: '#/components/responses/InternalServerError' - /assetTypes/{assetType}/projects/{projectId}/datasetMetadataArray: - parameters: - - $ref: '#/components/parameters/projectId' - - $ref: '#/components/parameters/assetType' - get: - tags: - - Storage - summary: Gets all dataset metadata in folder under a given storage project that the current user has access to. - description: Gets all dataset meatdata in folder under a given storage project that the current user has access to. - operationId: getProjectDatasetMetadataArray - parameters: - - $ref: '#/components/parameters/assetViewIdQuery' - security: - - bearerAuth: [] - responses: - '200': - description: Success - content: - application/json: - schema: - $ref: '#/components/schemas/DatasetMetadataArray' - '400': - $ref: '#/components/responses/BadRequest' - '401': - $ref: '#/components/responses/Unauthorized' - '403': - $ref: '#/components/responses/Unauthorized' - '404': - $ref: '#/components/responses/NotFound' - '500': - $ref: '#/components/responses/InternalServerError' - /assetTypes/{assetType}/projects/{projectId}/datasetMetadataPage: - parameters: - - $ref: '#/components/parameters/projectId' - - $ref: '#/components/parameters/assetType' - get: - tags: - - Storage - summary: Gets a page of dataset metadata in folder under a given storage project that the current user has access to. - description: Gets a page of dataset meatdata in folder under a given storage project that the current user has access to. - operationId: getProjectDatasetMetadataPage - parameters: - - $ref: '#/components/parameters/assetViewIdQuery' - - $ref: '#/components/parameters/pageNumber' - - $ref: '#/components/parameters/pageMaxItems' - security: - - bearerAuth: [] - responses: - '200': - description: Success - content: - application/json: - schema: - $ref: '#/components/schemas/DatasetMetadataPage' - '400': - $ref: '#/components/responses/BadRequest' - '401': - $ref: '#/components/responses/Unauthorized' - '403': - $ref: '#/components/responses/Unauthorized' - '404': - $ref: '#/components/responses/NotFound' - '500': - $ref: '#/components/responses/InternalServerError' - /assetTypes/{assetType}/projects/{projectId}/manifestMetadataArray: - parameters: - - $ref: '#/components/parameters/projectId' - - $ref: '#/components/parameters/assetType' - get: - tags: - - Storage - summary: Gets all manifests in a project folder that users have access to - description: Gets all manifests in a project folder that the current user has access to. - operationId: getProjectManifestMetadataArray - parameters: - - $ref: '#/components/parameters/assetViewIdQuery' - security: - - bearerAuth: [] - responses: - '200': - description: Success - content: - application/json: - schema: - $ref: '#/components/schemas/ManifestMetadataArray' - '400': - $ref: '#/components/responses/BadRequest' - '401': - $ref: '#/components/responses/Unauthorized' - '403': - $ref: '#/components/responses/Unauthorized' - '404': - $ref: '#/components/responses/NotFound' - '500': - $ref: '#/components/responses/InternalServerError' - /assetTypes/{assetType}/projects/{projectId}/manifestMetadataPage: - parameters: - - $ref: '#/components/parameters/projectId' - - $ref: '#/components/parameters/assetType' - get: - tags: - - Storage - summary: Gets all manifests in a project folder that users have access to - description: Gets all manifests in a project folder that the current user has access to. - operationId: getProjectManifestMetadataPage - parameters: - - $ref: '#/components/parameters/assetViewIdQuery' - - $ref: '#/components/parameters/pageNumber' - - $ref: '#/components/parameters/pageMaxItems' - security: - - bearerAuth: [] - responses: - '200': - description: Success - content: - application/json: - schema: - $ref: '#/components/schemas/ManifestMetadataPage' - '400': - $ref: '#/components/responses/BadRequest' - '401': - $ref: '#/components/responses/Unauthorized' - '403': - $ref: '#/components/responses/Unauthorized' - '404': - $ref: '#/components/responses/NotFound' - '500': - $ref: '#/components/responses/InternalServerError' - /assetTypes/{assetType}/assetViews/{assetViewId}/json: - parameters: - - $ref: '#/components/parameters/assetViewId' - - $ref: '#/components/parameters/assetType' - get: - tags: - - Storage - summary: Gets the asset view table in json form - description: Gets the asset view table in json form - operationId: getAssetViewJson - security: - - bearerAuth: [] - responses: - '200': - description: Success - content: - application/json: - schema: - $ref: '#/components/schemas/AssetViewJson' - '400': - $ref: '#/components/responses/BadRequest' - '401': - $ref: '#/components/responses/Unauthorized' - '403': - $ref: '#/components/responses/Unauthorized' - '404': - $ref: '#/components/responses/NotFound' - '500': - $ref: '#/components/responses/InternalServerError' - /assetTypes/{assetType}/assetViews/{assetViewId}/csv: - parameters: - - $ref: '#/components/parameters/assetViewId' - - $ref: '#/components/parameters/assetType' - get: - tags: - - Storage - summary: Gets the asset view table in csv file form - description: Gets the asset view table in csv file form - operationId: getAssetViewCsv - security: - - bearerAuth: [] - responses: - '200': - description: Success - content: - text/csv: - schema: - type: string - '400': - $ref: '#/components/responses/BadRequest' - '401': - $ref: '#/components/responses/Unauthorized' - '403': - $ref: '#/components/responses/Unauthorized' - '404': - $ref: '#/components/responses/NotFound' - '500': - $ref: '#/components/responses/InternalServerError' - /assetTypes/{assetType}/datasets/{datasetId}/manifestJson: - parameters: - - $ref: '#/components/parameters/assetType' - - $ref: '#/components/parameters/datasetId' - get: - tags: - - Storage - summary: Gets the manifest in json form - description: Gets the manifest in json form - operationId: getDatasetManifestJson - parameters: - - $ref: '#/components/parameters/assetViewIdQuery' - security: - - bearerAuth: [] - responses: - '200': - description: Success - content: - application/json: - schema: - $ref: '#/components/schemas/ManifestJson' - '400': - $ref: '#/components/responses/BadRequest' - '401': - $ref: '#/components/responses/Unauthorized' - '403': - $ref: '#/components/responses/Unauthorized' - '404': - $ref: '#/components/responses/NotFound' - '500': - $ref: '#/components/responses/InternalServerError' - /assetTypes/{assetType}/datasets/{datasetId}/manifestCsv: - parameters: - - $ref: '#/components/parameters/assetType' - - $ref: '#/components/parameters/datasetId' - get: - tags: - - Storage - summary: Gets the manifest in csv form - description: Gets the manifest in csv form - operationId: getDatasetManifestCsv - parameters: - - $ref: '#/components/parameters/assetViewIdQuery' - security: - - bearerAuth: [] - responses: - '200': - description: Success - content: - text/csv: - schema: - type: string - '400': - $ref: '#/components/responses/BadRequest' - '401': - $ref: '#/components/responses/Unauthorized' - '403': - $ref: '#/components/responses/Unauthorized' - '404': - $ref: '#/components/responses/NotFound' - '500': - $ref: '#/components/responses/InternalServerError' - /assetTypes/{assetType}/assetViews/{assetViewId}/projectMetadataArray: - parameters: - - $ref: '#/components/parameters/assetViewId' - - $ref: '#/components/parameters/assetType' - get: - tags: - - Storage - summary: Gets all storage projects the current user has access to. - description: Gets all storage projects the current user has access to. - operationId: getProjectMetadataArray - security: - - bearerAuth: [] - responses: - '200': - description: Success - content: - application/json: - schema: - $ref: '#/components/schemas/ProjectMetadataArray' - '400': - $ref: '#/components/responses/BadRequest' - '401': - $ref: '#/components/responses/Unauthorized' - '403': - $ref: '#/components/responses/Unauthorized' - '404': - $ref: '#/components/responses/NotFound' - '500': - $ref: '#/components/responses/InternalServerError' - /assetTypes/{assetType}/assetViews/{assetViewId}/projectMetadataPage: - parameters: - - $ref: '#/components/parameters/assetViewId' - - $ref: '#/components/parameters/assetType' - get: - tags: - - Storage - summary: Gets all storage projects the current user has access to. - description: Gets all storage projects the current user has access to. - operationId: getProjectMetadataPage - parameters: - - $ref: '#/components/parameters/pageNumber' - - $ref: '#/components/parameters/pageMaxItems' - security: - - bearerAuth: [] - responses: - '200': - description: Success - content: - application/json: - schema: - $ref: '#/components/schemas/ProjectMetadataPage' - '400': - $ref: '#/components/responses/BadRequest' - '401': - $ref: '#/components/responses/Unauthorized' - '403': - $ref: '#/components/responses/Unauthorized' - '404': - $ref: '#/components/responses/NotFound' - '500': - $ref: '#/components/responses/InternalServerError' - /assetTypes/{assetType}/datasets/{datasetId}/fileMetadataArray: - parameters: - - $ref: '#/components/parameters/datasetId' - - $ref: '#/components/parameters/assetType' - get: - tags: - - Storage - summary: Gets all files associated with a dataset. - description: Gets all files associated with a dataset. - operationId: getDatasetFileMetadataArray - parameters: - - $ref: '#/components/parameters/fileNames' - - $ref: '#/components/parameters/useFullFilePath' - - $ref: '#/components/parameters/assetViewIdQuery' - security: - - bearerAuth: [] - responses: - '200': - description: Success - content: - application/json: - schema: - $ref: '#/components/schemas/FileMetadataArray' - '400': - $ref: '#/components/responses/BadRequest' - '401': - $ref: '#/components/responses/Unauthorized' - '403': - $ref: '#/components/responses/Unauthorized' - '404': - $ref: '#/components/responses/NotFound' - '500': - $ref: '#/components/responses/InternalServerError' - /assetTypes/{assetType}/datasets/{datasetId}/fileMetadataPage: - parameters: - - $ref: '#/components/parameters/datasetId' - - $ref: '#/components/parameters/assetType' - get: - tags: - - Storage - summary: Gets all files associated with a dataset. - description: Gets all files associated with a dataset. - operationId: getDatasetFileMetadataPage - parameters: - - $ref: '#/components/parameters/fileNames' - - $ref: '#/components/parameters/useFullFilePath' - - $ref: '#/components/parameters/assetViewIdQuery' - - $ref: '#/components/parameters/pageNumber' - - $ref: '#/components/parameters/pageMaxItems' - security: - - bearerAuth: [] - responses: - '200': - description: Success - content: - application/json: - schema: - $ref: '#/components/schemas/FileMetadataPage' - '400': - $ref: '#/components/responses/BadRequest' - '401': - $ref: '#/components/responses/Unauthorized' - '403': - $ref: '#/components/responses/Unauthorized' - '404': - $ref: '#/components/responses/NotFound' - '500': - $ref: '#/components/responses/InternalServerError' - /assetTypes/{assetType}/manifests/{manifestId}/json: - parameters: - - $ref: '#/components/parameters/assetType' - - $ref: '#/components/parameters/manifestId' - get: - tags: - - Storage - summary: Gets the manifest in json form - description: Gets the manifest in json form - operationId: getManifestJson - security: - - bearerAuth: [] - responses: - '200': - description: Success - content: - application/json: - schema: - $ref: '#/components/schemas/ManifestJson' - '400': - $ref: '#/components/responses/BadRequest' - '401': - $ref: '#/components/responses/Unauthorized' - '403': - $ref: '#/components/responses/Unauthorized' - '404': - $ref: '#/components/responses/NotFound' - '500': - $ref: '#/components/responses/InternalServerError' - /assetTypes/{assetType}/manifests/{manifestId}/csv: - parameters: - - $ref: '#/components/parameters/assetType' - - $ref: '#/components/parameters/manifestId' - get: - tags: - - Storage - summary: Gets the manifest in csv form - description: Gets the manifest in csv form - operationId: getManifestCsv - security: - - bearerAuth: [] - responses: - '200': - description: Success - content: - text/csv: - schema: - type: string - '400': - $ref: '#/components/responses/BadRequest' - '401': - $ref: '#/components/responses/Unauthorized' - '403': - $ref: '#/components/responses/Unauthorized' - '404': - $ref: '#/components/responses/NotFound' - '500': - $ref: '#/components/responses/InternalServerError' - /nodes/{nodeLabel}/dependencyArray: - parameters: - - $ref: '#/components/parameters/nodeLabel' - get: - tags: - - Schema - summary: Gets the immediate dependencies that are related to the given source node - description: Gets the immediate dependencies that are related to the given source node - operationId: getNodeDependencyArray - parameters: - - $ref: '#/components/parameters/schemaUrl' - - $ref: '#/components/parameters/returnDisplayNames' - - $ref: '#/components/parameters/returnOrderedBySchema' - - $ref: '#/components/parameters/displayLabelType' - responses: - '200': - description: Success - content: - application/json: - schema: - $ref: '#/components/schemas/NodeArray' - '500': - $ref: '#/components/responses/InternalServerError' - /nodes/{nodeLabel}/dependencyPage: - parameters: - - $ref: '#/components/parameters/nodeLabel' - get: - tags: - - Schema - summary: Gets the immediate dependencies that are related to the given source node - description: Gets the immediate dependencies that are related to the given source node - operationId: getNodeDependencyPage - parameters: - - $ref: '#/components/parameters/schemaUrl' - - $ref: '#/components/parameters/returnDisplayNames' - - $ref: '#/components/parameters/returnOrderedBySchema' - - $ref: '#/components/parameters/pageNumber' - - $ref: '#/components/parameters/pageMaxItems' - - $ref: '#/components/parameters/displayLabelType' - responses: - '200': - description: Success - content: - application/json: - schema: - $ref: '#/components/schemas/NodePage' - '500': - $ref: '#/components/responses/InternalServerError' - /nodes/{nodeDisplay}/isRequired: - parameters: - - $ref: '#/components/parameters/nodeDisplay' - get: - tags: - - Schema - summary: Gets whether or not the node is required in the schema - description: Gets whether or not the node is required in the schema - operationId: getNodeIsRequired - parameters: - - $ref: '#/components/parameters/schemaUrl' - - $ref: '#/components/parameters/displayLabelType' - responses: - '200': - description: Success - content: - application/json: - schema: - description: Whether or not the node is required in the schema - type: boolean - '500': - $ref: '#/components/responses/InternalServerError' - /nodes/{nodeLabel}/nodeProperties: - parameters: - - $ref: '#/components/parameters/nodeLabel' - get: - tags: - - Schema - summary: Gets properties associated with a given node - description: Gets properties associated with a given node - operationId: getNodeProperties - parameters: - - $ref: '#/components/parameters/schemaUrl' - - $ref: '#/components/parameters/displayLabelType' - responses: - '200': - description: Success - content: - application/json: - schema: - $ref: '#/components/schemas/NodePropertyArray' - '500': - $ref: '#/components/responses/InternalServerError' - /nodes/{nodeDisplay}/propertyLabel: - parameters: - - $ref: '#/components/parameters/nodeDisplay' - get: - tags: - - Schema - summary: Gets the property label of the node - description: Gets the property label of the node - operationId: getPropertyLabel - parameters: - - $ref: '#/components/parameters/useStrictCamelCase' - responses: - '200': - description: Success - content: - application/json: - schema: - $ref: '#/components/schemas/PropertyLabel' - '500': - $ref: '#/components/responses/InternalServerError' - /nodes/{nodeDisplay}/validationRules: - parameters: - - $ref: '#/components/parameters/nodeDisplay' - get: - tags: - - Schema - summary: Gets the validation rules, along with the arguments for each given rule associated with a given node - description: Gets the validation rules, along with the arguments for each given rule associated with a given node - operationId: getNodeValidationRules - parameters: - - $ref: '#/components/parameters/schemaUrl' - - $ref: '#/components/parameters/displayLabelType' - responses: - '200': - description: Success - content: - application/json: - schema: - $ref: '#/components/schemas/ValidationRuleArray' - '500': - $ref: '#/components/responses/InternalServerError' - /components/{componentLabel}/: - parameters: - - $ref: '#/components/parameters/componentLabel' - get: - tags: - - Schema - summary: Get all the attributes associated with a specific data model component formatted as a dataframe (stored as a JSON String). - description: Get all the attributes associated with a specific data model component formatted as a dataframe (stored as a JSON String). - operationId: getComponent - parameters: - - $ref: '#/components/parameters/schemaUrl' - - $ref: '#/components/parameters/includeIndex' - - $ref: '#/components/parameters/displayLabelType' - responses: - '200': - description: Success - content: - application/json: - schema: - description: The component as a json string - type: string - '500': - $ref: '#/components/responses/InternalServerError' - /components/{componentLabel}/requirementsArray: - parameters: - - $ref: '#/components/parameters/componentLabel' - get: - tags: - - Schema - summary: Given a source model component (see https://w3id.org/biolink/vocab/category for definnition of component), return all components required by it in array form. - description: Given a source model component (see https://w3id.org/biolink/vocab/category for definnition of component), return all components required by it in array form. - operationId: getComponentRequirementsArray - parameters: - - $ref: '#/components/parameters/schemaUrl' - - $ref: '#/components/parameters/displayLabelType' - responses: - '200': - description: Success - content: - application/json: - schema: - $ref: '#/components/schemas/ComponentRequirementArray' - '500': - $ref: '#/components/responses/InternalServerError' - /components/{componentLabel}/requirementsGraph: - parameters: - - $ref: '#/components/parameters/componentLabel' - get: - tags: - - Schema - summary: Given a source model component (see https://w3id.org/biolink/vocab/category for definnition of component), return all components required by it in graph form. - description: Given a source model component (see https://w3id.org/biolink/vocab/category for definnition of component), return all components required by it in graph form. - operationId: getComponentRequirementsGraph - parameters: - - $ref: '#/components/parameters/schemaUrl' - - $ref: '#/components/parameters/displayLabelType' - responses: - '200': - description: Success - content: - application/json: - schema: - $ref: '#/components/schemas/ComponentRequirementGraph' - '500': - $ref: '#/components/responses/InternalServerError' - /schemaAttributes: - get: - tags: - - Schema - summary: Get all the attributes associated with a data model formatted as a dataframe (stored as a JSON String). - description: Get all the attributes associated with a data model formatted as a dataframe (stored as a JSON String). - operationId: getSchemaAttributes - parameters: - - $ref: '#/components/parameters/schemaUrl' - - $ref: '#/components/parameters/displayLabelType' - responses: - '200': - description: Success - content: - application/json: - schema: - description: The schema as a json string - type: string - '500': - $ref: '#/components/responses/InternalServerError' - /connectedNodePairArray: - get: - tags: - - Schema - summary: Gets an array of connected node pairs - description: Gets a array of connected node pairs - operationId: getConnectedNodePairArray - parameters: - - $ref: '#/components/parameters/schemaUrl' - - $ref: '#/components/parameters/relationshipType' - - $ref: '#/components/parameters/displayLabelType' - responses: - '200': - description: Success - content: - application/json: - schema: - $ref: '#/components/schemas/ConnectedNodePairArray' - '500': - $ref: '#/components/responses/InternalServerError' - /connectedNodePairPage: - get: - tags: - - Schema - summary: Gets a page of connected node pairs - description: Gets a page of connected node pairs - operationId: getConnectedNodePairPage - parameters: - - $ref: '#/components/parameters/schemaUrl' - - $ref: '#/components/parameters/relationshipType' - - $ref: '#/components/parameters/pageNumber' - - $ref: '#/components/parameters/pageMaxItems' - - $ref: '#/components/parameters/displayLabelType' - responses: - '200': - description: Success - content: - application/json: - schema: - $ref: '#/components/schemas/ConnectedNodePairPage' - '500': - $ref: '#/components/responses/InternalServerError' - /validateManifestJson: - post: - tags: - - ManifestValidation - summary: Validates a manifest in json form - description: Validates a manifest in json form - requestBody: - description: A manifest in json form - content: - text/plain: - schema: - type: string - operationId: validateManifestJson - parameters: - - $ref: '#/components/parameters/schemaUrl' - - $ref: '#/components/parameters/componentLabelQuery' - - $ref: '#/components/parameters/restrictRules' - - $ref: '#/components/parameters/displayLabelType' - responses: - '200': - description: Success - content: - application/json: - schema: - $ref: '#/components/schemas/ManifestValidationResult' - '500': - $ref: '#/components/responses/InternalServerError' - /validateManifestCsv: - post: - tags: - - ManifestValidation - summary: Validates a manifest in csv form - description: Validates a manifest in csv form - requestBody: - description: .csv file - content: - application/csv: - schema: - type: string - format: binary - required: true - operationId: validateManifestCsv - parameters: - - $ref: '#/components/parameters/schemaUrl' - - $ref: '#/components/parameters/componentLabelQuery' - - $ref: '#/components/parameters/restrictRules' - - $ref: '#/components/parameters/displayLabelType' - security: - - bearerAuth: [] - responses: - '200': - description: Success - content: - application/json: - schema: - $ref: '#/components/schemas/ManifestValidationResult' - '500': - $ref: '#/components/responses/InternalServerError' - /submitManifestJson: - post: - tags: - - ManifestValidation - summary: Validates a manifest in json form, then submits it - description: Validates a manifest in json form, then submits it in csv form - requestBody: - description: A manifest in json form - content: - text/plain: - schema: - type: string - operationId: submitManifestJson - parameters: - - $ref: '#/components/parameters/schemaUrl' - - $ref: '#/components/parameters/component' - - $ref: '#/components/parameters/restrictRules' - - $ref: '#/components/parameters/datasetIdQuery' - - $ref: '#/components/parameters/storageMethod' - - $ref: '#/components/parameters/hideBlanks' - - $ref: '#/components/parameters/assetViewIdQuery' - - $ref: '#/components/parameters/tableManipulationMethod' - - $ref: '#/components/parameters/displayLabelType' - - $ref: '#/components/parameters/annotationKeyStyle' - - $ref: '#/components/parameters/tableColumnNameStyle' - security: - - bearerAuth: [] - responses: - '200': - description: Success - content: - application/json: - schema: - $ref: '#/components/schemas/ManifestId' - '500': - $ref: '#/components/responses/InternalServerError' - /submitManifestCsv: - post: - tags: - - ManifestValidation - summary: Validates manifest in csv form, then submits it - description: Validates manifest in csv form, then submits it - requestBody: - description: .csv file - content: - application/csv: - schema: - type: string - format: binary - required: true - operationId: submitManifestCsv - parameters: - - $ref: '#/components/parameters/schemaUrl' - - $ref: '#/components/parameters/component' - - $ref: '#/components/parameters/restrictRules' - - $ref: '#/components/parameters/datasetIdQuery' - - $ref: '#/components/parameters/storageMethod' - - $ref: '#/components/parameters/hideBlanks' - - $ref: '#/components/parameters/assetViewIdQuery' - - $ref: '#/components/parameters/tableManipulationMethod' - - $ref: '#/components/parameters/displayLabelType' - - $ref: '#/components/parameters/annotationKeyStyle' - - $ref: '#/components/parameters/tableColumnNameStyle' - security: - - bearerAuth: [] - responses: - '200': - description: Success - content: - application/json: - schema: - $ref: '#/components/schemas/ManifestId' - '500': - $ref: '#/components/responses/InternalServerError' - /tangledTreeLayers: - get: - tags: - - TangledTree - summary: Get tangled tree node layers to display for a given data model and figure type - description: Get tangled tree node layers to display for a given data model and figure type - operationId: getTangledTreeLayers - parameters: - - $ref: '#/components/parameters/schemaUrl' - - $ref: '#/components/parameters/figureType' - - $ref: '#/components/parameters/displayLabelType' - responses: - '200': - description: Success - content: - application/json: - schema: - $ref: '#/components/schemas/TangledTreeLayers' - '500': - $ref: '#/components/responses/InternalServerError' - /tangledTreeText: - get: - tags: - - TangledTree - summary: Get tangled tree plain or highlighted text to display for a given data model, text formatting and figure type - description: Get tangled tree plain or highlighted text to display for a given data model, text formatting and figure type - operationId: getTangledTreeText - parameters: - - $ref: '#/components/parameters/schemaUrl' - - $ref: '#/components/parameters/figureType' - - $ref: '#/components/parameters/textFormat' - - $ref: '#/components/parameters/displayLabelType' - responses: - '200': - description: Success - content: - application/json: - schema: - $ref: '#/components/schemas/TangledTreeText' - '500': - $ref: '#/components/responses/InternalServerError' - /generateGoogleSheetManifests: - get: - tags: - - ManifestGeneration - summary: Generates a list of google sheet links - description: Generates a list of google sheet links - operationId: generateGoogleSheetManifests - parameters: - - $ref: '#/components/parameters/schemaUrl' - - $ref: '#/components/parameters/addAnnotations' - - $ref: '#/components/parameters/manifestTitle' - - $ref: '#/components/parameters/displayLabelType' - - $ref: '#/components/parameters/useStrictValidation' - - name: datasetIdArray - in: query - description: An array of dataset ids - required: false - schema: - $ref: '#/components/schemas/DatasetIdArray' - - name: dataTypeArray - in: query - description: An array of data types - required: false - schema: - $ref: '#/components/schemas/DataTypeArray' - - name: assetViewId - in: query - description: ID of view listing all project data assets. E.g. for Synapse this would be the Synapse ID of the fileview listing all data assets for a given project - required: false - schema: - $ref: '#/components/schemas/AssetViewId' - - name: generateAllManifests - in: query - description: If true, a manifest for all components will be generated, datasetIds will be ignored. If false, manifests for each id in datasetIds will be generated. - required: false - schema: - type: boolean - default: false - security: - - bearerAuth: [] - responses: - '200': - description: Success - content: - application/json: - schema: - $ref: '#/components/schemas/GoogleSheetLinks' - '500': - $ref: '#/components/responses/InternalServerError' - /generateExcelManifest: - get: - tags: - - ManifestGeneration - summary: Generates an excel file - description: Generates an excel file - operationId: generateExcelManifest - parameters: - - $ref: '#/components/parameters/schemaUrl' - - $ref: '#/components/parameters/addAnnotations' - - $ref: '#/components/parameters/manifestTitle' - - $ref: '#/components/parameters/displayLabelType' - - name: datasetId - in: query - description: The ID of a dataset. - required: false - schema: - $ref: '#/components/schemas/DatasetId' - - name: dataType - in: query - description: A data type - required: true - schema: - $ref: '#/components/schemas/DataType' - - name: assetViewId - in: query - description: ID of view listing all project data assets. E.g. for Synapse this would be the Synapse ID of the fileview listing all data assets for a given project - required: false - schema: - $ref: '#/components/schemas/AssetViewId' - security: - - bearerAuth: [] - responses: - '200': - description: Success - content: - application/vnd.ms-excel: - schema: - type: string - format: binary - '500': - $ref: '#/components/responses/InternalServerError' -components: - securitySchemes: - bearerAuth: - type: http - scheme: bearer - bearerFormat: JWT - schemas: - BasicError: - type: object - description: Problem details (tools.ietf.org/html/rfc7807) - properties: - title: - type: string - description: A human readable documentation for the problem type - status: - type: integer - description: The HTTP status code - detail: - type: string - description: A human readable explanation specific to this occurrence of the problem - type: - type: string - description: An absolute URI that identifies the problem type - required: - - title - - status - x-java-class-annotations: - - '@lombok.Builder' - ProjectId: - description: A project ID. - type: string - example: syn26251192 - AssetType: - description: Type of asset store, such as Synapse - type: string - example: synapse - enum: - - synapse - AssetViewId: - description: An asset view ID. - type: string - example: syn23643253 - DatasetMetadata: - type: object - description: The metadata of a dataset. - properties: - name: - type: string - description: The name of the dataset. - example: Example dataset - id: - type: string - description: The ID of the dataset. - example: Syn1 - required: - - name - - id - x-java-class-annotations: - - '@lombok.Builder' - DatasetMetadataArray: - type: object - description: An array of dataset metadata. - properties: - datasets: - description: An array of dataset meatdata. - type: array - items: - $ref: '#/components/schemas/DatasetMetadata' - PageMetadata: - type: object - description: The metadata of a page. - properties: - number: - description: The page number. - type: integer - format: int32 - example: 99 - size: - description: The number of items in a single page. - type: integer - format: int32 - example: 99 - totalElements: - description: Total number of elements in the result set. - type: integer - format: int64 - example: 99 - totalPages: - description: Total number of pages in the result set. - type: integer - format: int32 - example: 99 - hasNext: - description: Returns if there is a next page. - type: boolean - example: true - hasPrevious: - description: Returns if there is a previous page. - type: boolean - example: true - required: - - number - - size - - totalElements - - totalPages - - hasNext - - hasPrevious - DatasetMetadataPage: - type: object - description: A page of dataset metadata. - allOf: - - $ref: '#/components/schemas/PageMetadata' - - type: object - properties: - datasets: - description: An array of dataset meatdata. - type: array - items: - $ref: '#/components/schemas/DatasetMetadata' - required: - - datasets - x-java-class-annotations: - - '@lombok.Builder' - ManifestMetadata: - type: object - description: The metadata for a manifest file - properties: - name: - type: string - description: The name of the manifest file. - example: synapse_storage_manifest.csv - id: - type: string - description: The id of the manifest file. - example: syn1 - datasetName: - type: string - description: The name of the dataset the manifest belongs to. - example: dataset_X - datasetId: - type: string - description: The id of the dataset the manifest belongs to. - example: syn2 - componentName: - type: string - description: The name of the component the manifest is of. - example: patient - required: - - name - - id - ManifestMetadataArray: - type: object - description: An array of manifest metadata - properties: - manifests: - description: A list of manifest metadata - type: array - items: - $ref: '#/components/schemas/ManifestMetadata' - ManifestMetadataPage: - type: object - description: A page of manifest metadata - allOf: - - $ref: '#/components/schemas/PageMetadata' - - type: object - properties: - manifests: - description: A list of manifest metadata - type: array - items: - $ref: '#/components/schemas/ManifestMetadata' - required: - - manifests - x-java-class-annotations: - - '@lombok.Builder' - AssetViewJson: - description: An asset view in json format - type: object - DatasetId: - description: A dataset ID. - type: string - example: syn23643250 - ManifestJson: - description: A manifest in json format - type: object - ProjectMetadata: - type: object - description: The metadata for a project - properties: - name: - type: string - description: The name of the project. - example: Example project - id: - type: string - description: The ID of the project. - example: Syn1 - required: - - name - - id - x-java-class-annotations: - - '@lombok.Builder' - ProjectMetadataArray: - type: object - description: An array of project metadata. - properties: - projects: - description: An array of project metadata. - type: array - items: - $ref: '#/components/schemas/ProjectMetadata' - ProjectMetadataPage: - type: object - description: A page of project metadata. - allOf: - - $ref: '#/components/schemas/PageMetadata' - - type: object - properties: - projects: - description: An array of project metadata. - type: array - items: - $ref: '#/components/schemas/ProjectMetadata' - required: - - projects - x-java-class-annotations: - - '@lombok.Builder' - FileNames: - description: A list of file names. - type: array - items: - type: string - UseFullFilePath: - description: Whether or not to return the full path of output, or just the basename. - type: boolean - default: false - FileMetadata: - type: object - description: The metadata for a file - properties: - name: - type: string - description: The name of the file. - example: file.txt - id: - type: string - description: The ID of the file. - example: Syn1 - required: - - name - - id - x-java-class-annotations: - - '@lombok.Builder' - FileMetadataArray: - type: object - description: A list of file metadata. - properties: - files: - description: A list of file metadata. - type: array - items: - $ref: '#/components/schemas/FileMetadata' - FileMetadataPage: - type: object - description: A page of file metadata. - allOf: - - $ref: '#/components/schemas/PageMetadata' - - type: object - properties: - files: - description: A list of file metadata. - type: array - items: - $ref: '#/components/schemas/FileMetadata' - required: - - files - x-java-class-annotations: - - '@lombok.Builder' - ManifestId: - description: A manifest ID. - type: string - example: syn51078535 - NodeLabel: - description: The label of a node in a schema - type: string - example: MolecularEntity - SchemaUrl: - description: The URL of a schema in jsonld form - type: string - example: https://raw.githubusercontent.com/Sage-Bionetworks/schematic/develop/tests/data/example.model.jsonld - ReturnDisplayNames: - description: Whether or not to return the display names of the node otherwise the label - type: boolean - default: true - ReturnOrderedBySchema: - description: Whether or not to order the components by their order in the schema, otherwise random - type: boolean - default: true - Node: - type: object - description: A node of a schema. - properties: - name: - type: string - description: The name of the node. - example: Patient - required: - - name - x-java-class-annotations: - - '@lombok.Builder' - NodeArray: - type: object - description: An array of nodes. - properties: - nodes: - description: An array of nodes. - type: array - items: - $ref: '#/components/schemas/Node' - NodePage: - type: object - description: A page of nodes. - allOf: - - $ref: '#/components/schemas/PageMetadata' - - type: object - properties: - nodes: - description: An array of nodes. - type: array - items: - $ref: '#/components/schemas/Node' - required: - - nodes - x-java-class-annotations: - - '@lombok.Builder' - NodeDisplay: - description: The display name of a node in a schema - type: string - example: MolecularEntity - NodePropertyArray: - type: object - description: An array of node properties. - properties: - node_properties: - description: An array of node properties. - type: array - items: - type: string - UseStrictCamelCase: - description: Whether or not to use the more strict way of converting to camel case - type: boolean - default: true - PropertyLabel: - description: The property label of a node in a schema - type: string - example: MolecularEntity - ValidationRule: - type: object - description: A validation rule. - properties: - name: - type: string - description: The name of the rule, along with the arguments for the given rule. - example: list strict - required: - - name - x-java-class-annotations: - - '@lombok.Builder' - ValidationRuleArray: - type: object - description: An array of validation rules. - properties: - validation_rules: - description: An array of validation rules. - type: array - items: - $ref: '#/components/schemas/ValidationRule' - ComponentLabel: - description: The label of a component in a schema - type: string - example: Patient - ComponentRequirementArray: - type: object - description: An array of components - properties: - componentRequirementsList: - type: array - items: - type: string - ComponentRequirementSubgraph: - type: object - description: A pair of components - properties: - component1: - type: string - description: The display name of the first component in the graph - example: component1 - component2: - type: string - description: The display name of the second component in the graph - example: component2 - required: - - component1 - - component2 - x-java-class-annotations: - - '@lombok.Builder' - ComponentRequirementGraph: - type: object - description: A graph of components - properties: - componentRequirementsGraph: - type: array - items: - $ref: '#/components/schemas/ComponentRequirementSubgraph' - RelationshipType: - description: A type of schema relationship - type: string - example: requiresDependency - ConnectedNodePair: - type: object - description: A pair of conncted nodes - properties: - node1: - type: string - description: The disaplay name of the first node. - example: Node1 - node2: - type: string - description: The display name of the second node. - example: Node2 - required: - - node1 - - node2 - x-java-class-annotations: - - '@lombok.Builder' - ConnectedNodePairArray: - type: object - description: An array of conncted node pairs - properties: - connectedNodes: - description: An array of conncted node pairs. - type: array - items: - $ref: '#/components/schemas/ConnectedNodePair' - ConnectedNodePairPage: - type: object - description: A page of conncted node pairs - allOf: - - $ref: '#/components/schemas/PageMetadata' - - type: object - properties: - connectedNodes: - description: An array of conncted node pairs. - type: array - items: - $ref: '#/components/schemas/ConnectedNodePair' - required: - - connectedNodes - x-java-class-annotations: - - '@lombok.Builder' - RestrictRules: - description: If True, validation suite will only run with in-house validation rule. If False, the Great Expectations suite will be utilized and all rules will be available. - type: boolean - default: false - ManifestValidationResult: - type: object - description: The results of manifest validation - properties: - errors: - description: Any errors from validation - type: array - items: - type: string - warnings: - description: Any warnings from validation - type: array - items: - type: string - TangledTreeLayers: - description: Tangled tree node layers to display for a given data model - type: string - TangledTreeText: - description: Tangled tree plain or higlighted text to display for a given data model - type: object - DatasetIdArray: - type: array - description: An array of dataset ids - items: - $ref: '#/components/schemas/DatasetId' - DataType: - description: A data type - type: string - example: Patient - DataTypeArray: - description: An array of data types - type: array - items: - $ref: '#/components/schemas/DataType' - GoogleSheetLinks: - type: object - description: An array of google sheet links - properties: - links: - type: array - items: - type: string - responses: - InternalServerError: - description: The request cannot be fulfilled due to an unexpected server error - content: - application/problem+json: - schema: - $ref: '#/components/schemas/BasicError' - BadRequest: - description: Invalid request - content: - application/problem+json: - schema: - $ref: '#/components/schemas/BasicError' - Unauthorized: - description: Unauthorized - content: - application/problem+json: - schema: - $ref: '#/components/schemas/BasicError' - NotFound: - description: The specified resource was not found - content: - application/problem+json: - schema: - $ref: '#/components/schemas/BasicError' - parameters: - projectId: - name: projectId - in: path - description: The Synapse ID of a storage project. - required: true - schema: - $ref: '#/components/schemas/ProjectId' - assetType: - name: assetType - in: path - description: Type of asset, such as Synapse - required: true - schema: - $ref: '#/components/schemas/AssetType' - assetViewIdQuery: - name: assetViewId - in: query - description: ID of view listing all project data assets. E.g. for Synapse this would be the Synapse ID of the fileview listing all data assets for a given project - required: true - schema: - $ref: '#/components/schemas/AssetViewId' - pageNumber: - name: pageNumber - in: query - description: The page number to get for a paginated query - required: false - schema: - type: integer - default: 1 - minimum: 1 - pageMaxItems: - name: pageMaxItems - in: query - description: The maximum number of items per page (up to 100,000) for paginated endpoints - required: false - schema: - type: integer - default: 100000 - minimum: 1 - assetViewId: - name: assetViewId - in: path - description: ID of view listing all project data assets. E.g. for Synapse this would be the Synapse ID of the fileview listing all data assets for a given project - required: true - schema: - $ref: '#/components/schemas/AssetViewId' - datasetId: - name: datasetId - in: path - description: The ID of a dataset. - required: true - schema: - $ref: '#/components/schemas/DatasetId' - fileNames: - name: fileNames - description: A list of file names used to filter the output. - in: query - required: false - schema: - $ref: '#/components/schemas/FileNames' - useFullFilePath: - name: useFullFilePath - in: query - description: Whether or not to return the full path of output, or just the basename. - required: false - schema: - $ref: '#/components/schemas/UseFullFilePath' - manifestId: - name: manifestId - in: path - description: ID of a manifest - required: true - schema: - $ref: '#/components/schemas/ManifestId' - nodeLabel: - name: nodeLabel - in: path - description: The label of the source node in a schema to get the dependencies of - required: true - schema: - $ref: '#/components/schemas/NodeLabel' - schemaUrl: - name: schemaUrl - in: query - description: The URL of a schema in jsonld or csv form - required: true - schema: - $ref: '#/components/schemas/SchemaUrl' - returnDisplayNames: - name: returnDisplayNames - in: query - description: Whether or not to return the display names of the component, otherwise the label - required: false - schema: - $ref: '#/components/schemas/ReturnDisplayNames' - returnOrderedBySchema: - name: returnOrderedBySchema - in: query - description: Whether or not to order the components by their order in the schema, otherwise random - required: false - schema: - $ref: '#/components/schemas/ReturnOrderedBySchema' - displayLabelType: - name: displayLabelType - in: query - description: The type of label to display - required: false - schema: - type: string - enum: - - class_label - - display_label - default: class_label - nodeDisplay: - name: nodeDisplay - in: path - description: The display name of the node in a schema - required: true - schema: - $ref: '#/components/schemas/NodeDisplay' - useStrictCamelCase: - name: useStrictCamelCase - in: query - description: Whether or not to use the more strict way of converting to camel case - schema: - $ref: '#/components/schemas/UseStrictCamelCase' - componentLabel: - name: componentLabel - in: path - description: The label of a component in a schema - required: true - schema: - $ref: '#/components/schemas/ComponentLabel' - includeIndex: - name: includeIndex - in: query - description: Whether to include the indexes of the dataframe in the returned JSON string. - required: false - schema: - type: boolean - default: false - relationshipType: - name: relationshipType - in: query - description: Type of relationship in a schema, such as requiresDependency - required: true - schema: - $ref: '#/components/schemas/RelationshipType' - componentLabelQuery: - name: componentLabel - in: query - description: The label of a component in a schema - required: true - schema: - $ref: '#/components/schemas/ComponentLabel' - restrictRules: - name: restrictRules - in: query - description: If True, validation suite will only run with in-house validation rule. If False, the Great Expectations suite will be utilized and all rules will be available. - required: false - schema: - $ref: '#/components/schemas/RestrictRules' - component: - name: component - in: query - description: A component in a schema, either the dsplay label or schema label - required: true - schema: - type: string - datasetIdQuery: - name: datasetId - in: query - description: The ID of a dataset. - required: true - schema: - $ref: '#/components/schemas/DatasetId' - storageMethod: - name: storageMethod - in: query - description: file_and_entities will store the manifest as a csv and create Synapse files for each row in the manifest. table_and_file will store the manifest as a table and a csv on Synapse. file_only will store the manifest as a csv only on Synapse. table_file_and_entities will perform the options file_with_entites and table in combination. - required: false - schema: - type: string - enum: - - file_and_entities - - table_and_file - - file_only - - table_file_and_entities - default: table_file_and_entities - hideBlanks: - name: hideBlanks - in: query - description: If true, annotations with blank values will be hidden from a dataset's annotation list in Synaspe. If false, annotations with blank values will be displayed. - required: false - schema: - type: boolean - default: false - tableManipulationMethod: - name: tableManipulationMethod - in: query - description: replace will remove the rows and columns from the existing table and store the new rows and columns, preserving the name and synID. upsert will add the new rows to the table and preserve the exisitng rows and columns in the existing table. - required: false - schema: - type: string - enum: - - replace - - upsert - default: replace - annotationKeyStyle: - name: annotationKeyStyle - in: query - description: The labeling style for annotation keys. - required: false - schema: - type: string - enum: - - class_label - - display_label - default: class_label - tableColumnNameStyle: - name: tableColumnNameStyle - in: query - description: The labeling syle for table column names. - required: false - schema: - type: string - enum: - - class_label - - display_label - - display_name - default: class_label - figureType: - name: figureType - in: query - description: Figure type to generate. - required: false - schema: - type: string - enum: - - component - - dependency - default: component - textFormat: - name: textFormat - in: query - description: Text formatting type. - required: false - schema: - type: string - enum: - - plain - - highlighted - default: plain - addAnnotations: - name: addAnnotations - in: query - description: If true, annotations are added to the manifest - required: false - schema: - type: boolean - default: false - manifestTitle: - name: manifestTitle - in: query - description: If making one manifest, the title of the manifest. If making multiple manifests, the prefix of the title of the manifests. - required: false - schema: - type: string - useStrictValidation: - name: useStrictValidation - in: query - description: If true, users are blocked from entering incorrect values. If false, users will get a warning when using incorrect values. - required: false - schema: - type: boolean - default: true diff --git a/libs/schematic/api-description/project.json b/libs/schematic/api-description/project.json deleted file mode 100644 index 71c9c9450..000000000 --- a/libs/schematic/api-description/project.json +++ /dev/null @@ -1,24 +0,0 @@ -{ - "name": "schematic-api-description", - "$schema": "../../../node_modules/nx/schemas/project-schema.json", - "sourceRoot": "libs/schematic/api-description/src", - "projectType": "library", - "targets": { - "build": { - "executor": "nx:run-commands", - "options": { - "command": "redocly bundle --output build/openapi.yaml src/openapi.yaml", - "cwd": "{projectRoot}" - } - }, - "lint": { - "executor": "nx:run-commands", - "options": { - "command": "redocly lint --config tools/redocly/config.yaml {projectName}" - }, - "dependsOn": ["build"] - } - }, - "tags": ["language:openapi"], - "implicitDependencies": [] -} diff --git a/libs/schematic/api-description/src/components/README.md b/libs/schematic/api-description/src/components/README.md deleted file mode 100644 index 7be6c7d69..000000000 --- a/libs/schematic/api-description/src/components/README.md +++ /dev/null @@ -1,13 +0,0 @@ -# Reusable components - -- You can create the following folders here: - - `schemas` - reusable [Schema Objects](https://github.com/OAI/OpenAPI-Specification/blob/master/versions/3.0.2.md#schemaObject) - - `responses` - reusable [Response Objects](https://github.com/OAI/OpenAPI-Specification/blob/master/versions/3.0.2.md#responseObject) - - `parameters` - reusable [Parameter Objects](https://github.com/OAI/OpenAPI-Specification/blob/master/versions/3.0.2.md#parameterObject) - - `examples` - reusable [Example Objects](https://github.com/OAI/OpenAPI-Specification/blob/master/versions/3.0.2.md#exampleObject) - - `headers` - reusable [Header Objects](https://github.com/OAI/OpenAPI-Specification/blob/master/versions/3.0.2.md#headerObject) - - `requestBodies` - reusable [Request Body Objects](https://github.com/OAI/OpenAPI-Specification/blob/master/versions/3.0.2.md#requestBodyObject) - - `links` - reusable [Link Objects](https://github.com/OAI/OpenAPI-Specification/blob/master/versions/3.0.2.md#linkObject) - - `callbacks` - reusable [Callback Objects](https://github.com/OAI/OpenAPI-Specification/blob/master/versions/3.0.2.md#callbackObject) - - `securitySchemes` - reusable [Security Scheme Objects](https://github.com/OAI/OpenAPI-Specification/blob/master/versions/3.0.2.md#securitySchemeObject) -- Filename of files inside the folders represent component name, e.g. `Customer.yaml` diff --git a/libs/schematic/api-description/src/components/headers/ExpiresAfter.yaml b/libs/schematic/api-description/src/components/headers/ExpiresAfter.yaml deleted file mode 100644 index 0cbe9a510..000000000 --- a/libs/schematic/api-description/src/components/headers/ExpiresAfter.yaml +++ /dev/null @@ -1,4 +0,0 @@ -description: date in UTC when token expires -schema: - type: string - format: date-time diff --git a/libs/schematic/api-description/src/components/parameters/path/assetType.yaml b/libs/schematic/api-description/src/components/parameters/path/assetType.yaml deleted file mode 100644 index eb87ca15b..000000000 --- a/libs/schematic/api-description/src/components/parameters/path/assetType.yaml +++ /dev/null @@ -1,6 +0,0 @@ -name: assetType -in: path -description: Type of asset, such as Synapse -required: true -schema: - $ref: ../../schemas/AssetType.yaml diff --git a/libs/schematic/api-description/src/components/parameters/path/assetViewId.yaml b/libs/schematic/api-description/src/components/parameters/path/assetViewId.yaml deleted file mode 100644 index dc525f322..000000000 --- a/libs/schematic/api-description/src/components/parameters/path/assetViewId.yaml +++ /dev/null @@ -1,6 +0,0 @@ -name: assetViewId -in: path -description: ID of view listing all project data assets. E.g. for Synapse this would be the Synapse ID of the fileview listing all data assets for a given project -required: true -schema: - $ref: ../../schemas/AssetViewId.yaml diff --git a/libs/schematic/api-description/src/components/parameters/path/componentDisplay.yaml b/libs/schematic/api-description/src/components/parameters/path/componentDisplay.yaml deleted file mode 100644 index f847eec40..000000000 --- a/libs/schematic/api-description/src/components/parameters/path/componentDisplay.yaml +++ /dev/null @@ -1,6 +0,0 @@ -name: componentDisplay -in: path -description: The display name of a component in a schema -required: true -schema: - $ref: ../../schemas/ComponentDisplay.yaml diff --git a/libs/schematic/api-description/src/components/parameters/path/componentLabel.yaml b/libs/schematic/api-description/src/components/parameters/path/componentLabel.yaml deleted file mode 100644 index 0747031be..000000000 --- a/libs/schematic/api-description/src/components/parameters/path/componentLabel.yaml +++ /dev/null @@ -1,6 +0,0 @@ -name: componentLabel -in: path -description: The label of a component in a schema -required: true -schema: - $ref: ../../schemas/ComponentLabel.yaml diff --git a/libs/schematic/api-description/src/components/parameters/path/datasetId.yaml b/libs/schematic/api-description/src/components/parameters/path/datasetId.yaml deleted file mode 100644 index 6000e98a4..000000000 --- a/libs/schematic/api-description/src/components/parameters/path/datasetId.yaml +++ /dev/null @@ -1,6 +0,0 @@ -name: datasetId -in: path -description: The ID of a dataset. -required: true -schema: - $ref: ../../schemas/DatasetId.yaml diff --git a/libs/schematic/api-description/src/components/parameters/path/manifestId.yaml b/libs/schematic/api-description/src/components/parameters/path/manifestId.yaml deleted file mode 100644 index 90dafa824..000000000 --- a/libs/schematic/api-description/src/components/parameters/path/manifestId.yaml +++ /dev/null @@ -1,6 +0,0 @@ -name: manifestId -in: path -description: ID of a manifest -required: true -schema: - $ref: ../../schemas/ManifestId.yaml diff --git a/libs/schematic/api-description/src/components/parameters/path/nodeDisplay.yaml b/libs/schematic/api-description/src/components/parameters/path/nodeDisplay.yaml deleted file mode 100644 index 14ba92b61..000000000 --- a/libs/schematic/api-description/src/components/parameters/path/nodeDisplay.yaml +++ /dev/null @@ -1,6 +0,0 @@ -name: nodeDisplay -in: path -description: The display name of the node in a schema -required: true -schema: - $ref: ../../schemas/NodeDisplay.yaml diff --git a/libs/schematic/api-description/src/components/parameters/path/nodeLabel.yaml b/libs/schematic/api-description/src/components/parameters/path/nodeLabel.yaml deleted file mode 100644 index 1237ae6e9..000000000 --- a/libs/schematic/api-description/src/components/parameters/path/nodeLabel.yaml +++ /dev/null @@ -1,6 +0,0 @@ -name: nodeLabel -in: path -description: The label of the source node in a schema to get the dependencies of -required: true -schema: - $ref: ../../schemas/NodeLabel.yaml diff --git a/libs/schematic/api-description/src/components/parameters/path/projectId.yaml b/libs/schematic/api-description/src/components/parameters/path/projectId.yaml deleted file mode 100644 index 016e417c4..000000000 --- a/libs/schematic/api-description/src/components/parameters/path/projectId.yaml +++ /dev/null @@ -1,6 +0,0 @@ -name: projectId -in: path -description: The Synapse ID of a storage project. -required: true -schema: - $ref: ../../schemas/ProjectId.yaml diff --git a/libs/schematic/api-description/src/components/parameters/query/addAnnotations.yaml b/libs/schematic/api-description/src/components/parameters/query/addAnnotations.yaml deleted file mode 100644 index 843b896be..000000000 --- a/libs/schematic/api-description/src/components/parameters/query/addAnnotations.yaml +++ /dev/null @@ -1,7 +0,0 @@ -name: addAnnotations -in: query -description: If true, annotations are added to the manifest -required: false -schema: - type: boolean - default: false diff --git a/libs/schematic/api-description/src/components/parameters/query/annotationKeyStyle.yaml b/libs/schematic/api-description/src/components/parameters/query/annotationKeyStyle.yaml deleted file mode 100644 index f79a1030a..000000000 --- a/libs/schematic/api-description/src/components/parameters/query/annotationKeyStyle.yaml +++ /dev/null @@ -1,8 +0,0 @@ -name: annotationKeyStyle -in: query -description: 'The labeling style for annotation keys.' -required: false -schema: - type: string - enum: ['class_label', 'display_label'] - default: 'class_label' diff --git a/libs/schematic/api-description/src/components/parameters/query/assetViewIdQuery.yaml b/libs/schematic/api-description/src/components/parameters/query/assetViewIdQuery.yaml deleted file mode 100644 index 7438d46b3..000000000 --- a/libs/schematic/api-description/src/components/parameters/query/assetViewIdQuery.yaml +++ /dev/null @@ -1,6 +0,0 @@ -name: assetViewId -in: query -description: ID of view listing all project data assets. E.g. for Synapse this would be the Synapse ID of the fileview listing all data assets for a given project -required: true -schema: - $ref: ../../schemas/AssetViewId.yaml diff --git a/libs/schematic/api-description/src/components/parameters/query/component.yaml b/libs/schematic/api-description/src/components/parameters/query/component.yaml deleted file mode 100644 index 84f7b13b3..000000000 --- a/libs/schematic/api-description/src/components/parameters/query/component.yaml +++ /dev/null @@ -1,6 +0,0 @@ -name: component -in: query -description: A component in a schema, either the dsplay label or schema label -required: true -schema: - type: string diff --git a/libs/schematic/api-description/src/components/parameters/query/componentLabelQuery.yaml b/libs/schematic/api-description/src/components/parameters/query/componentLabelQuery.yaml deleted file mode 100644 index 13a91b22f..000000000 --- a/libs/schematic/api-description/src/components/parameters/query/componentLabelQuery.yaml +++ /dev/null @@ -1,6 +0,0 @@ -name: componentLabel -in: query -description: The label of a component in a schema -required: true -schema: - $ref: ../../schemas/ComponentLabel.yaml diff --git a/libs/schematic/api-description/src/components/parameters/query/dataType.yaml b/libs/schematic/api-description/src/components/parameters/query/dataType.yaml deleted file mode 100644 index 5bd38b8cc..000000000 --- a/libs/schematic/api-description/src/components/parameters/query/dataType.yaml +++ /dev/null @@ -1,6 +0,0 @@ -name: dataType -in: query -description: A data type -required: false -schema: - $ref: ../../schemas/DataType.yaml diff --git a/libs/schematic/api-description/src/components/parameters/query/dataTypeArray.yaml b/libs/schematic/api-description/src/components/parameters/query/dataTypeArray.yaml deleted file mode 100644 index e5c17ab96..000000000 --- a/libs/schematic/api-description/src/components/parameters/query/dataTypeArray.yaml +++ /dev/null @@ -1,6 +0,0 @@ -name: dataTypeArray -in: query -description: An array of data types -required: false -schema: - $ref: ../../schemas/DataTypeArray.yaml diff --git a/libs/schematic/api-description/src/components/parameters/query/datasetIdArray.yaml b/libs/schematic/api-description/src/components/parameters/query/datasetIdArray.yaml deleted file mode 100644 index f52e3b681..000000000 --- a/libs/schematic/api-description/src/components/parameters/query/datasetIdArray.yaml +++ /dev/null @@ -1,6 +0,0 @@ -name: datasetIdArray -in: query -description: An array of dataset ids -required: false -schema: - $ref: ../../schemas/DatasetIdArray.yaml diff --git a/libs/schematic/api-description/src/components/parameters/query/datasetIdQuery.yaml b/libs/schematic/api-description/src/components/parameters/query/datasetIdQuery.yaml deleted file mode 100644 index 62a5c6374..000000000 --- a/libs/schematic/api-description/src/components/parameters/query/datasetIdQuery.yaml +++ /dev/null @@ -1,6 +0,0 @@ -name: datasetId -in: query -description: The ID of a dataset. -required: true -schema: - $ref: ../../schemas/DatasetId.yaml diff --git a/libs/schematic/api-description/src/components/parameters/query/displayLabelType.yaml b/libs/schematic/api-description/src/components/parameters/query/displayLabelType.yaml deleted file mode 100644 index 23c8ac179..000000000 --- a/libs/schematic/api-description/src/components/parameters/query/displayLabelType.yaml +++ /dev/null @@ -1,8 +0,0 @@ -name: displayLabelType -in: query -description: 'The type of label to display' -required: false -schema: - type: string - enum: ['class_label', 'display_label'] - default: 'class_label' diff --git a/libs/schematic/api-description/src/components/parameters/query/figureType.yaml b/libs/schematic/api-description/src/components/parameters/query/figureType.yaml deleted file mode 100644 index 12496bef1..000000000 --- a/libs/schematic/api-description/src/components/parameters/query/figureType.yaml +++ /dev/null @@ -1,8 +0,0 @@ -name: figureType -in: query -description: Figure type to generate. -required: false -schema: - type: string - enum: [component, dependency] - default: component diff --git a/libs/schematic/api-description/src/components/parameters/query/fileNames.yaml b/libs/schematic/api-description/src/components/parameters/query/fileNames.yaml deleted file mode 100644 index 83b6ea7f6..000000000 --- a/libs/schematic/api-description/src/components/parameters/query/fileNames.yaml +++ /dev/null @@ -1,6 +0,0 @@ -name: fileNames -description: A list of file names used to filter the output. -in: query -required: false -schema: - $ref: ../../schemas/FileNames.yaml diff --git a/libs/schematic/api-description/src/components/parameters/query/hideBlanks.yaml b/libs/schematic/api-description/src/components/parameters/query/hideBlanks.yaml deleted file mode 100644 index 345f2dc18..000000000 --- a/libs/schematic/api-description/src/components/parameters/query/hideBlanks.yaml +++ /dev/null @@ -1,9 +0,0 @@ -name: hideBlanks -in: query -description: - If true, annotations with blank values will be hidden from a dataset's annotation list in Synaspe. - If false, annotations with blank values will be displayed. -required: false -schema: - type: boolean - default: false diff --git a/libs/schematic/api-description/src/components/parameters/query/includeIndex.yaml b/libs/schematic/api-description/src/components/parameters/query/includeIndex.yaml deleted file mode 100644 index f33483485..000000000 --- a/libs/schematic/api-description/src/components/parameters/query/includeIndex.yaml +++ /dev/null @@ -1,7 +0,0 @@ -name: includeIndex -in: query -description: Whether to include the indexes of the dataframe in the returned JSON string. -required: false -schema: - type: boolean - default: false diff --git a/libs/schematic/api-description/src/components/parameters/query/manifestJson.yaml b/libs/schematic/api-description/src/components/parameters/query/manifestJson.yaml deleted file mode 100644 index 61c957949..000000000 --- a/libs/schematic/api-description/src/components/parameters/query/manifestJson.yaml +++ /dev/null @@ -1,6 +0,0 @@ -name: manifestJson -in: query -description: A manifest in json string form -required: true -schema: - $ref: ../../schemas/ManifestJson.yaml diff --git a/libs/schematic/api-description/src/components/parameters/query/manifestTitle.yaml b/libs/schematic/api-description/src/components/parameters/query/manifestTitle.yaml deleted file mode 100644 index be3c89b22..000000000 --- a/libs/schematic/api-description/src/components/parameters/query/manifestTitle.yaml +++ /dev/null @@ -1,6 +0,0 @@ -name: manifestTitle -in: query -description: If making one manifest, the title of the manifest. If making multiple manifests, the prefix of the title of the manifests. -required: false -schema: - type: string diff --git a/libs/schematic/api-description/src/components/parameters/query/nodeLabelArray.yaml b/libs/schematic/api-description/src/components/parameters/query/nodeLabelArray.yaml deleted file mode 100644 index f08678a55..000000000 --- a/libs/schematic/api-description/src/components/parameters/query/nodeLabelArray.yaml +++ /dev/null @@ -1,6 +0,0 @@ -name: nodeLabelArray -in: query -description: An array of nodel labels -required: false -schema: - $ref: ../../schemas/NodeLabelArray.yaml diff --git a/libs/schematic/api-description/src/components/parameters/query/pageMaxItems.yaml b/libs/schematic/api-description/src/components/parameters/query/pageMaxItems.yaml deleted file mode 100644 index c49d0c6b0..000000000 --- a/libs/schematic/api-description/src/components/parameters/query/pageMaxItems.yaml +++ /dev/null @@ -1,8 +0,0 @@ -name: pageMaxItems -in: query -description: The maximum number of items per page (up to 100,000) for paginated endpoints -required: false -schema: - type: integer - default: 100000 - minimum: 1 diff --git a/libs/schematic/api-description/src/components/parameters/query/pageNumber.yaml b/libs/schematic/api-description/src/components/parameters/query/pageNumber.yaml deleted file mode 100644 index 9cd8f8e4f..000000000 --- a/libs/schematic/api-description/src/components/parameters/query/pageNumber.yaml +++ /dev/null @@ -1,8 +0,0 @@ -name: pageNumber -in: query -description: The page number to get for a paginated query -required: false -schema: - type: integer - default: 1 - minimum: 1 diff --git a/libs/schematic/api-description/src/components/parameters/query/relationshipType.yaml b/libs/schematic/api-description/src/components/parameters/query/relationshipType.yaml deleted file mode 100644 index 5d3d9b488..000000000 --- a/libs/schematic/api-description/src/components/parameters/query/relationshipType.yaml +++ /dev/null @@ -1,6 +0,0 @@ -name: relationshipType -in: query -description: Type of relationship in a schema, such as requiresDependency -required: true -schema: - $ref: ../../schemas/RelationshipType.yaml diff --git a/libs/schematic/api-description/src/components/parameters/query/restrictRules.yaml b/libs/schematic/api-description/src/components/parameters/query/restrictRules.yaml deleted file mode 100644 index b4bdd58a2..000000000 --- a/libs/schematic/api-description/src/components/parameters/query/restrictRules.yaml +++ /dev/null @@ -1,6 +0,0 @@ -name: restrictRules -in: query -description: If True, validation suite will only run with in-house validation rule. If False, the Great Expectations suite will be utilized and all rules will be available. -required: false -schema: - $ref: ../../schemas/RestrictRules.yaml diff --git a/libs/schematic/api-description/src/components/parameters/query/returnDisplayNames.yaml b/libs/schematic/api-description/src/components/parameters/query/returnDisplayNames.yaml deleted file mode 100644 index 73b24f905..000000000 --- a/libs/schematic/api-description/src/components/parameters/query/returnDisplayNames.yaml +++ /dev/null @@ -1,6 +0,0 @@ -name: returnDisplayNames -in: query -description: Whether or not to return the display names of the component, otherwise the label -required: false -schema: - $ref: ../../schemas/ReturnDisplayNames.yaml diff --git a/libs/schematic/api-description/src/components/parameters/query/returnOrderedBySchema.yaml b/libs/schematic/api-description/src/components/parameters/query/returnOrderedBySchema.yaml deleted file mode 100644 index c18394e72..000000000 --- a/libs/schematic/api-description/src/components/parameters/query/returnOrderedBySchema.yaml +++ /dev/null @@ -1,6 +0,0 @@ -name: returnOrderedBySchema -in: query -description: Whether or not to order the components by their order in the schema, otherwise random -required: false -schema: - $ref: ../../schemas/ReturnOrderedBySchema.yaml diff --git a/libs/schematic/api-description/src/components/parameters/query/schemaUrl.yaml b/libs/schematic/api-description/src/components/parameters/query/schemaUrl.yaml deleted file mode 100644 index d19438d3b..000000000 --- a/libs/schematic/api-description/src/components/parameters/query/schemaUrl.yaml +++ /dev/null @@ -1,6 +0,0 @@ -name: schemaUrl -in: query -description: The URL of a schema in jsonld or csv form -required: true -schema: - $ref: ../../schemas/SchemaUrl.yaml diff --git a/libs/schematic/api-description/src/components/parameters/query/storageMethod.yaml b/libs/schematic/api-description/src/components/parameters/query/storageMethod.yaml deleted file mode 100644 index 933b1f130..000000000 --- a/libs/schematic/api-description/src/components/parameters/query/storageMethod.yaml +++ /dev/null @@ -1,12 +0,0 @@ -name: storageMethod -in: query -description: - file_and_entities will store the manifest as a csv and create Synapse files for each row in the manifest. - table_and_file will store the manifest as a table and a csv on Synapse. - file_only will store the manifest as a csv only on Synapse. - table_file_and_entities will perform the options file_with_entites and table in combination. -required: false -schema: - type: string - enum: [file_and_entities, table_and_file, file_only, table_file_and_entities] - default: table_file_and_entities diff --git a/libs/schematic/api-description/src/components/parameters/query/tableColumnNameStyle.yaml b/libs/schematic/api-description/src/components/parameters/query/tableColumnNameStyle.yaml deleted file mode 100644 index 235fffdca..000000000 --- a/libs/schematic/api-description/src/components/parameters/query/tableColumnNameStyle.yaml +++ /dev/null @@ -1,8 +0,0 @@ -name: tableColumnNameStyle -in: query -description: 'The labeling syle for table column names.' -required: false -schema: - type: string - enum: ['class_label', 'display_label', 'display_name'] - default: 'class_label' diff --git a/libs/schematic/api-description/src/components/parameters/query/tableManipulationMethod.yaml b/libs/schematic/api-description/src/components/parameters/query/tableManipulationMethod.yaml deleted file mode 100644 index 6890eb1ee..000000000 --- a/libs/schematic/api-description/src/components/parameters/query/tableManipulationMethod.yaml +++ /dev/null @@ -1,10 +0,0 @@ -name: tableManipulationMethod -in: query -description: - replace will remove the rows and columns from the existing table and store the new rows and columns, preserving the name and synID. - upsert will add the new rows to the table and preserve the exisitng rows and columns in the existing table. -required: false -schema: - type: string - enum: [replace, upsert] - default: replace diff --git a/libs/schematic/api-description/src/components/parameters/query/textFormat.yaml b/libs/schematic/api-description/src/components/parameters/query/textFormat.yaml deleted file mode 100644 index a2cdfa32e..000000000 --- a/libs/schematic/api-description/src/components/parameters/query/textFormat.yaml +++ /dev/null @@ -1,8 +0,0 @@ -name: textFormat -in: query -description: Text formatting type. -required: false -schema: - type: string - enum: [plain, highlighted] - default: plain diff --git a/libs/schematic/api-description/src/components/parameters/query/useFullFilePath.yaml b/libs/schematic/api-description/src/components/parameters/query/useFullFilePath.yaml deleted file mode 100644 index e6507fc7a..000000000 --- a/libs/schematic/api-description/src/components/parameters/query/useFullFilePath.yaml +++ /dev/null @@ -1,6 +0,0 @@ -name: useFullFilePath -in: query -description: Whether or not to return the full path of output, or just the basename. -required: false -schema: - $ref: ../../schemas/UseFullFilePath.yaml diff --git a/libs/schematic/api-description/src/components/parameters/query/useStrictCamelCase.yaml b/libs/schematic/api-description/src/components/parameters/query/useStrictCamelCase.yaml deleted file mode 100644 index e5592640a..000000000 --- a/libs/schematic/api-description/src/components/parameters/query/useStrictCamelCase.yaml +++ /dev/null @@ -1,5 +0,0 @@ -name: useStrictCamelCase -in: query -description: Whether or not to use the more strict way of converting to camel case -schema: - $ref: ../../schemas/UseStrictCamelCase.yaml diff --git a/libs/schematic/api-description/src/components/parameters/query/useStrictValidation.yaml b/libs/schematic/api-description/src/components/parameters/query/useStrictValidation.yaml deleted file mode 100644 index b403090dc..000000000 --- a/libs/schematic/api-description/src/components/parameters/query/useStrictValidation.yaml +++ /dev/null @@ -1,8 +0,0 @@ -name: useStrictValidation -in: query -description: If true, users are blocked from entering incorrect values. - If false, users will get a warning when using incorrect values. -required: false -schema: - type: boolean - default: true diff --git a/libs/schematic/api-description/src/components/responses/BadRequest.yaml b/libs/schematic/api-description/src/components/responses/BadRequest.yaml deleted file mode 100644 index f1a3d5dd2..000000000 --- a/libs/schematic/api-description/src/components/responses/BadRequest.yaml +++ /dev/null @@ -1,5 +0,0 @@ -description: Invalid request -content: - application/problem+json: - schema: - $ref: ../schemas/BasicError.yaml diff --git a/libs/schematic/api-description/src/components/responses/Conflict.yaml b/libs/schematic/api-description/src/components/responses/Conflict.yaml deleted file mode 100644 index d725a306c..000000000 --- a/libs/schematic/api-description/src/components/responses/Conflict.yaml +++ /dev/null @@ -1,5 +0,0 @@ -description: The request conflicts with current state of the target resource -content: - application/problem+json: - schema: - $ref: ../schemas/BasicError.yaml diff --git a/libs/schematic/api-description/src/components/responses/InternalServerError.yaml b/libs/schematic/api-description/src/components/responses/InternalServerError.yaml deleted file mode 100644 index b660ed8eb..000000000 --- a/libs/schematic/api-description/src/components/responses/InternalServerError.yaml +++ /dev/null @@ -1,5 +0,0 @@ -description: The request cannot be fulfilled due to an unexpected server error -content: - application/problem+json: - schema: - $ref: ../schemas/BasicError.yaml diff --git a/libs/schematic/api-description/src/components/responses/NotFound.yaml b/libs/schematic/api-description/src/components/responses/NotFound.yaml deleted file mode 100644 index 695a10631..000000000 --- a/libs/schematic/api-description/src/components/responses/NotFound.yaml +++ /dev/null @@ -1,5 +0,0 @@ -description: The specified resource was not found -content: - application/problem+json: - schema: - $ref: ../schemas/BasicError.yaml diff --git a/libs/schematic/api-description/src/components/responses/Unauthorized.yaml b/libs/schematic/api-description/src/components/responses/Unauthorized.yaml deleted file mode 100644 index b7955fdcd..000000000 --- a/libs/schematic/api-description/src/components/responses/Unauthorized.yaml +++ /dev/null @@ -1,5 +0,0 @@ -description: Unauthorized -content: - application/problem+json: - schema: - $ref: ../schemas/BasicError.yaml diff --git a/libs/schematic/api-description/src/components/schemas/AssetType.yaml b/libs/schematic/api-description/src/components/schemas/AssetType.yaml deleted file mode 100644 index acb70e2c3..000000000 --- a/libs/schematic/api-description/src/components/schemas/AssetType.yaml +++ /dev/null @@ -1,4 +0,0 @@ -description: 'Type of asset store, such as Synapse' -type: string -example: synapse -enum: [synapse] diff --git a/libs/schematic/api-description/src/components/schemas/AssetViewId.yaml b/libs/schematic/api-description/src/components/schemas/AssetViewId.yaml deleted file mode 100644 index f7ecca99a..000000000 --- a/libs/schematic/api-description/src/components/schemas/AssetViewId.yaml +++ /dev/null @@ -1,3 +0,0 @@ -description: An asset view ID. -type: string -example: syn23643253 diff --git a/libs/schematic/api-description/src/components/schemas/AssetViewJson.yaml b/libs/schematic/api-description/src/components/schemas/AssetViewJson.yaml deleted file mode 100644 index 7a304b106..000000000 --- a/libs/schematic/api-description/src/components/schemas/AssetViewJson.yaml +++ /dev/null @@ -1,2 +0,0 @@ -description: An asset view in json format -type: object diff --git a/libs/schematic/api-description/src/components/schemas/BasicError.yaml b/libs/schematic/api-description/src/components/schemas/BasicError.yaml deleted file mode 100644 index ee1641031..000000000 --- a/libs/schematic/api-description/src/components/schemas/BasicError.yaml +++ /dev/null @@ -1,21 +0,0 @@ -type: object -description: Problem details (tools.ietf.org/html/rfc7807) -properties: - title: - type: string - description: A human readable documentation for the problem type - status: - type: integer - description: The HTTP status code - detail: - type: string - description: A human readable explanation specific to this occurrence of - the problem - type: - type: string - description: An absolute URI that identifies the problem type -required: - - title - - status -x-java-class-annotations: - - '@lombok.Builder' diff --git a/libs/schematic/api-description/src/components/schemas/Component.yaml b/libs/schematic/api-description/src/components/schemas/Component.yaml deleted file mode 100644 index c0974bc28..000000000 --- a/libs/schematic/api-description/src/components/schemas/Component.yaml +++ /dev/null @@ -1,11 +0,0 @@ -type: object -description: A component of a schema. -properties: - name: - type: string - description: The name of the component. - example: Patient -required: - - name -x-java-class-annotations: - - '@lombok.Builder' diff --git a/libs/schematic/api-description/src/components/schemas/ComponentDisplay.yaml b/libs/schematic/api-description/src/components/schemas/ComponentDisplay.yaml deleted file mode 100644 index 2aad3cb2a..000000000 --- a/libs/schematic/api-description/src/components/schemas/ComponentDisplay.yaml +++ /dev/null @@ -1,3 +0,0 @@ -description: The display name of a component in a schema -type: string -example: Patient diff --git a/libs/schematic/api-description/src/components/schemas/ComponentLabel.yaml b/libs/schematic/api-description/src/components/schemas/ComponentLabel.yaml deleted file mode 100644 index f64684b56..000000000 --- a/libs/schematic/api-description/src/components/schemas/ComponentLabel.yaml +++ /dev/null @@ -1,3 +0,0 @@ -description: The label of a component in a schema -type: string -example: Patient diff --git a/libs/schematic/api-description/src/components/schemas/ComponentRequirementArray.yaml b/libs/schematic/api-description/src/components/schemas/ComponentRequirementArray.yaml deleted file mode 100644 index 1a6ff6436..000000000 --- a/libs/schematic/api-description/src/components/schemas/ComponentRequirementArray.yaml +++ /dev/null @@ -1,7 +0,0 @@ -type: object -description: An array of components -properties: - componentRequirementsList: - type: array - items: - type: string diff --git a/libs/schematic/api-description/src/components/schemas/ComponentRequirementGraph.yaml b/libs/schematic/api-description/src/components/schemas/ComponentRequirementGraph.yaml deleted file mode 100644 index a528c2d8e..000000000 --- a/libs/schematic/api-description/src/components/schemas/ComponentRequirementGraph.yaml +++ /dev/null @@ -1,7 +0,0 @@ -type: object -description: A graph of components -properties: - componentRequirementsGraph: - type: array - items: - $ref: ComponentRequirementSubgraph.yaml diff --git a/libs/schematic/api-description/src/components/schemas/ComponentRequirementSubgraph.yaml b/libs/schematic/api-description/src/components/schemas/ComponentRequirementSubgraph.yaml deleted file mode 100644 index 6a22a9c1b..000000000 --- a/libs/schematic/api-description/src/components/schemas/ComponentRequirementSubgraph.yaml +++ /dev/null @@ -1,16 +0,0 @@ -type: object -description: A pair of components -properties: - component1: - type: string - description: The display name of the first component in the graph - example: component1 - component2: - type: string - description: The display name of the second component in the graph - example: component2 -required: - - component1 - - component2 -x-java-class-annotations: - - '@lombok.Builder' diff --git a/libs/schematic/api-description/src/components/schemas/ComponentsPage.yaml b/libs/schematic/api-description/src/components/schemas/ComponentsPage.yaml deleted file mode 100644 index e04593c2a..000000000 --- a/libs/schematic/api-description/src/components/schemas/ComponentsPage.yaml +++ /dev/null @@ -1,15 +0,0 @@ -type: object -description: A page of components. -allOf: - - $ref: PageMetadata.yaml - - type: object - properties: - components: - description: A list of components. - type: array - items: - $ref: Component.yaml - required: - - components -x-java-class-annotations: - - '@lombok.Builder' diff --git a/libs/schematic/api-description/src/components/schemas/ConnectedNodePair.yaml b/libs/schematic/api-description/src/components/schemas/ConnectedNodePair.yaml deleted file mode 100644 index 9cca5ee51..000000000 --- a/libs/schematic/api-description/src/components/schemas/ConnectedNodePair.yaml +++ /dev/null @@ -1,16 +0,0 @@ -type: object -description: A pair of conncted nodes -properties: - node1: - type: string - description: The disaplay name of the first node. - example: Node1 - node2: - type: string - description: The display name of the second node. - example: Node2 -required: - - node1 - - node2 -x-java-class-annotations: - - '@lombok.Builder' diff --git a/libs/schematic/api-description/src/components/schemas/ConnectedNodePairArray.yaml b/libs/schematic/api-description/src/components/schemas/ConnectedNodePairArray.yaml deleted file mode 100644 index cb5f7e48f..000000000 --- a/libs/schematic/api-description/src/components/schemas/ConnectedNodePairArray.yaml +++ /dev/null @@ -1,8 +0,0 @@ -type: object -description: An array of conncted node pairs -properties: - connectedNodes: - description: An array of conncted node pairs. - type: array - items: - $ref: ConnectedNodePair.yaml diff --git a/libs/schematic/api-description/src/components/schemas/ConnectedNodePairPage.yaml b/libs/schematic/api-description/src/components/schemas/ConnectedNodePairPage.yaml deleted file mode 100644 index 145a4abbb..000000000 --- a/libs/schematic/api-description/src/components/schemas/ConnectedNodePairPage.yaml +++ /dev/null @@ -1,15 +0,0 @@ -type: object -description: A page of conncted node pairs -allOf: - - $ref: PageMetadata.yaml - - type: object - properties: - connectedNodes: - description: An array of conncted node pairs. - type: array - items: - $ref: ConnectedNodePair.yaml - required: - - connectedNodes -x-java-class-annotations: - - '@lombok.Builder' diff --git a/libs/schematic/api-description/src/components/schemas/DataType.yaml b/libs/schematic/api-description/src/components/schemas/DataType.yaml deleted file mode 100644 index 746e65125..000000000 --- a/libs/schematic/api-description/src/components/schemas/DataType.yaml +++ /dev/null @@ -1,3 +0,0 @@ -description: A data type -type: string -example: Patient diff --git a/libs/schematic/api-description/src/components/schemas/DataTypeArray.yaml b/libs/schematic/api-description/src/components/schemas/DataTypeArray.yaml deleted file mode 100644 index 8500bbedf..000000000 --- a/libs/schematic/api-description/src/components/schemas/DataTypeArray.yaml +++ /dev/null @@ -1,4 +0,0 @@ -description: An array of data types -type: array -items: - $ref: DataType.yaml diff --git a/libs/schematic/api-description/src/components/schemas/DatasetId.yaml b/libs/schematic/api-description/src/components/schemas/DatasetId.yaml deleted file mode 100644 index 46b40394f..000000000 --- a/libs/schematic/api-description/src/components/schemas/DatasetId.yaml +++ /dev/null @@ -1,3 +0,0 @@ -description: A dataset ID. -type: string -example: syn23643250 diff --git a/libs/schematic/api-description/src/components/schemas/DatasetIdArray.yaml b/libs/schematic/api-description/src/components/schemas/DatasetIdArray.yaml deleted file mode 100644 index 42ad11843..000000000 --- a/libs/schematic/api-description/src/components/schemas/DatasetIdArray.yaml +++ /dev/null @@ -1,4 +0,0 @@ -type: array -description: An array of dataset ids -items: - $ref: DatasetId.yaml diff --git a/libs/schematic/api-description/src/components/schemas/DatasetMetadata.yaml b/libs/schematic/api-description/src/components/schemas/DatasetMetadata.yaml deleted file mode 100644 index 5295d6abf..000000000 --- a/libs/schematic/api-description/src/components/schemas/DatasetMetadata.yaml +++ /dev/null @@ -1,16 +0,0 @@ -type: object -description: The metadata of a dataset. -properties: - name: - type: string - description: The name of the dataset. - example: Example dataset - id: - type: string - description: The ID of the dataset. - example: Syn1 -required: - - name - - id -x-java-class-annotations: - - '@lombok.Builder' diff --git a/libs/schematic/api-description/src/components/schemas/DatasetMetadataArray.yaml b/libs/schematic/api-description/src/components/schemas/DatasetMetadataArray.yaml deleted file mode 100644 index e8054da9f..000000000 --- a/libs/schematic/api-description/src/components/schemas/DatasetMetadataArray.yaml +++ /dev/null @@ -1,8 +0,0 @@ -type: object -description: An array of dataset metadata. -properties: - datasets: - description: An array of dataset meatdata. - type: array - items: - $ref: DatasetMetadata.yaml diff --git a/libs/schematic/api-description/src/components/schemas/DatasetMetadataPage.yaml b/libs/schematic/api-description/src/components/schemas/DatasetMetadataPage.yaml deleted file mode 100644 index c5f6a7b0d..000000000 --- a/libs/schematic/api-description/src/components/schemas/DatasetMetadataPage.yaml +++ /dev/null @@ -1,15 +0,0 @@ -type: object -description: A page of dataset metadata. -allOf: - - $ref: PageMetadata.yaml - - type: object - properties: - datasets: - description: An array of dataset meatdata. - type: array - items: - $ref: DatasetMetadata.yaml - required: - - datasets -x-java-class-annotations: - - '@lombok.Builder' diff --git a/libs/schematic/api-description/src/components/schemas/FileMetadata.yaml b/libs/schematic/api-description/src/components/schemas/FileMetadata.yaml deleted file mode 100644 index 7446f4f0f..000000000 --- a/libs/schematic/api-description/src/components/schemas/FileMetadata.yaml +++ /dev/null @@ -1,16 +0,0 @@ -type: object -description: The metadata for a file -properties: - name: - type: string - description: The name of the file. - example: file.txt - id: - type: string - description: The ID of the file. - example: Syn1 -required: - - name - - id -x-java-class-annotations: - - '@lombok.Builder' diff --git a/libs/schematic/api-description/src/components/schemas/FileMetadataArray.yaml b/libs/schematic/api-description/src/components/schemas/FileMetadataArray.yaml deleted file mode 100644 index 2a11cc4e3..000000000 --- a/libs/schematic/api-description/src/components/schemas/FileMetadataArray.yaml +++ /dev/null @@ -1,8 +0,0 @@ -type: object -description: A list of file metadata. -properties: - files: - description: A list of file metadata. - type: array - items: - $ref: FileMetadata.yaml diff --git a/libs/schematic/api-description/src/components/schemas/FileMetadataPage.yaml b/libs/schematic/api-description/src/components/schemas/FileMetadataPage.yaml deleted file mode 100644 index a4d2afd42..000000000 --- a/libs/schematic/api-description/src/components/schemas/FileMetadataPage.yaml +++ /dev/null @@ -1,15 +0,0 @@ -type: object -description: A page of file metadata. -allOf: - - $ref: PageMetadata.yaml - - type: object - properties: - files: - description: A list of file metadata. - type: array - items: - $ref: FileMetadata.yaml - required: - - files -x-java-class-annotations: - - '@lombok.Builder' diff --git a/libs/schematic/api-description/src/components/schemas/FileNames.yaml b/libs/schematic/api-description/src/components/schemas/FileNames.yaml deleted file mode 100644 index 4989b6f1f..000000000 --- a/libs/schematic/api-description/src/components/schemas/FileNames.yaml +++ /dev/null @@ -1,4 +0,0 @@ -description: A list of file names. -type: array -items: - type: string diff --git a/libs/schematic/api-description/src/components/schemas/GoogleSheetLinks.yaml b/libs/schematic/api-description/src/components/schemas/GoogleSheetLinks.yaml deleted file mode 100644 index 8a46c9557..000000000 --- a/libs/schematic/api-description/src/components/schemas/GoogleSheetLinks.yaml +++ /dev/null @@ -1,7 +0,0 @@ -type: object -description: An array of google sheet links -properties: - links: - type: array - items: - type: string diff --git a/libs/schematic/api-description/src/components/schemas/ManifestId.yaml b/libs/schematic/api-description/src/components/schemas/ManifestId.yaml deleted file mode 100644 index 63feb31b9..000000000 --- a/libs/schematic/api-description/src/components/schemas/ManifestId.yaml +++ /dev/null @@ -1,3 +0,0 @@ -description: A manifest ID. -type: string -example: syn51078535 diff --git a/libs/schematic/api-description/src/components/schemas/ManifestJson.yaml b/libs/schematic/api-description/src/components/schemas/ManifestJson.yaml deleted file mode 100644 index bd69e7303..000000000 --- a/libs/schematic/api-description/src/components/schemas/ManifestJson.yaml +++ /dev/null @@ -1,2 +0,0 @@ -description: A manifest in json format -type: object diff --git a/libs/schematic/api-description/src/components/schemas/ManifestMetadata.yaml b/libs/schematic/api-description/src/components/schemas/ManifestMetadata.yaml deleted file mode 100644 index 2f3359f3e..000000000 --- a/libs/schematic/api-description/src/components/schemas/ManifestMetadata.yaml +++ /dev/null @@ -1,26 +0,0 @@ -type: object -description: The metadata for a manifest file -properties: - name: - type: string - description: The name of the manifest file. - example: synapse_storage_manifest.csv - id: - type: string - description: The id of the manifest file. - example: syn1 - datasetName: - type: string - description: The name of the dataset the manifest belongs to. - example: dataset_X - datasetId: - type: string - description: The id of the dataset the manifest belongs to. - example: syn2 - componentName: - type: string - description: The name of the component the manifest is of. - example: patient -required: - - name - - id diff --git a/libs/schematic/api-description/src/components/schemas/ManifestMetadataArray.yaml b/libs/schematic/api-description/src/components/schemas/ManifestMetadataArray.yaml deleted file mode 100644 index 7e93f7009..000000000 --- a/libs/schematic/api-description/src/components/schemas/ManifestMetadataArray.yaml +++ /dev/null @@ -1,8 +0,0 @@ -type: object -description: An array of manifest metadata -properties: - manifests: - description: A list of manifest metadata - type: array - items: - $ref: ManifestMetadata.yaml diff --git a/libs/schematic/api-description/src/components/schemas/ManifestMetadataPage.yaml b/libs/schematic/api-description/src/components/schemas/ManifestMetadataPage.yaml deleted file mode 100644 index 53cff5188..000000000 --- a/libs/schematic/api-description/src/components/schemas/ManifestMetadataPage.yaml +++ /dev/null @@ -1,15 +0,0 @@ -type: object -description: A page of manifest metadata -allOf: - - $ref: PageMetadata.yaml - - type: object - properties: - manifests: - description: A list of manifest metadata - type: array - items: - $ref: ManifestMetadata.yaml - required: - - manifests -x-java-class-annotations: - - '@lombok.Builder' diff --git a/libs/schematic/api-description/src/components/schemas/ManifestValidationResult.yaml b/libs/schematic/api-description/src/components/schemas/ManifestValidationResult.yaml deleted file mode 100644 index bcc4e5238..000000000 --- a/libs/schematic/api-description/src/components/schemas/ManifestValidationResult.yaml +++ /dev/null @@ -1,13 +0,0 @@ -type: object -description: The results of manifest validation -properties: - errors: - description: Any errors from validation - type: array - items: - type: string - warnings: - description: Any warnings from validation - type: array - items: - type: string diff --git a/libs/schematic/api-description/src/components/schemas/Node.yaml b/libs/schematic/api-description/src/components/schemas/Node.yaml deleted file mode 100644 index a73788faa..000000000 --- a/libs/schematic/api-description/src/components/schemas/Node.yaml +++ /dev/null @@ -1,11 +0,0 @@ -type: object -description: A node of a schema. -properties: - name: - type: string - description: The name of the node. - example: Patient -required: - - name -x-java-class-annotations: - - '@lombok.Builder' diff --git a/libs/schematic/api-description/src/components/schemas/NodeArray.yaml b/libs/schematic/api-description/src/components/schemas/NodeArray.yaml deleted file mode 100644 index 8bb9d140a..000000000 --- a/libs/schematic/api-description/src/components/schemas/NodeArray.yaml +++ /dev/null @@ -1,8 +0,0 @@ -type: object -description: An array of nodes. -properties: - nodes: - description: An array of nodes. - type: array - items: - $ref: Node.yaml diff --git a/libs/schematic/api-description/src/components/schemas/NodeDisplay.yaml b/libs/schematic/api-description/src/components/schemas/NodeDisplay.yaml deleted file mode 100644 index 2d14558ff..000000000 --- a/libs/schematic/api-description/src/components/schemas/NodeDisplay.yaml +++ /dev/null @@ -1,3 +0,0 @@ -description: The display name of a node in a schema -type: string -example: MolecularEntity diff --git a/libs/schematic/api-description/src/components/schemas/NodeLabel.yaml b/libs/schematic/api-description/src/components/schemas/NodeLabel.yaml deleted file mode 100644 index 48ac42d60..000000000 --- a/libs/schematic/api-description/src/components/schemas/NodeLabel.yaml +++ /dev/null @@ -1,3 +0,0 @@ -description: The label of a node in a schema -type: string -example: MolecularEntity diff --git a/libs/schematic/api-description/src/components/schemas/NodeLabelArray.yaml b/libs/schematic/api-description/src/components/schemas/NodeLabelArray.yaml deleted file mode 100644 index 9c12d8d8a..000000000 --- a/libs/schematic/api-description/src/components/schemas/NodeLabelArray.yaml +++ /dev/null @@ -1,4 +0,0 @@ -description: An array of node labels -type: array -items: - $ref: NodeLabel.yaml diff --git a/libs/schematic/api-description/src/components/schemas/NodePage.yaml b/libs/schematic/api-description/src/components/schemas/NodePage.yaml deleted file mode 100644 index 3b86b371b..000000000 --- a/libs/schematic/api-description/src/components/schemas/NodePage.yaml +++ /dev/null @@ -1,15 +0,0 @@ -type: object -description: A page of nodes. -allOf: - - $ref: PageMetadata.yaml - - type: object - properties: - nodes: - description: An array of nodes. - type: array - items: - $ref: Node.yaml - required: - - nodes -x-java-class-annotations: - - '@lombok.Builder' diff --git a/libs/schematic/api-description/src/components/schemas/NodePropertyArray.yaml b/libs/schematic/api-description/src/components/schemas/NodePropertyArray.yaml deleted file mode 100644 index c84ee116b..000000000 --- a/libs/schematic/api-description/src/components/schemas/NodePropertyArray.yaml +++ /dev/null @@ -1,8 +0,0 @@ -type: object -description: An array of node properties. -properties: - node_properties: - description: An array of node properties. - type: array - items: - type: string diff --git a/libs/schematic/api-description/src/components/schemas/PageMetadata.yaml b/libs/schematic/api-description/src/components/schemas/PageMetadata.yaml deleted file mode 100644 index 3658436cf..000000000 --- a/libs/schematic/api-description/src/components/schemas/PageMetadata.yaml +++ /dev/null @@ -1,38 +0,0 @@ -type: object -description: The metadata of a page. -properties: - number: - description: The page number. - type: integer - format: int32 - example: 99 - size: - description: The number of items in a single page. - type: integer - format: int32 - example: 99 - totalElements: - description: Total number of elements in the result set. - type: integer - format: int64 - example: 99 - totalPages: - description: Total number of pages in the result set. - type: integer - format: int32 - example: 99 - hasNext: - description: Returns if there is a next page. - type: boolean - example: true - hasPrevious: - description: Returns if there is a previous page. - type: boolean - example: true -required: - - number - - size - - totalElements - - totalPages - - hasNext - - hasPrevious diff --git a/libs/schematic/api-description/src/components/schemas/ProjectId.yaml b/libs/schematic/api-description/src/components/schemas/ProjectId.yaml deleted file mode 100644 index ccebc439d..000000000 --- a/libs/schematic/api-description/src/components/schemas/ProjectId.yaml +++ /dev/null @@ -1,3 +0,0 @@ -description: A project ID. -type: string -example: syn26251192 diff --git a/libs/schematic/api-description/src/components/schemas/ProjectMetadata.yaml b/libs/schematic/api-description/src/components/schemas/ProjectMetadata.yaml deleted file mode 100644 index 1ae97023d..000000000 --- a/libs/schematic/api-description/src/components/schemas/ProjectMetadata.yaml +++ /dev/null @@ -1,16 +0,0 @@ -type: object -description: The metadata for a project -properties: - name: - type: string - description: The name of the project. - example: Example project - id: - type: string - description: The ID of the project. - example: Syn1 -required: - - name - - id -x-java-class-annotations: - - '@lombok.Builder' diff --git a/libs/schematic/api-description/src/components/schemas/ProjectMetadataArray.yaml b/libs/schematic/api-description/src/components/schemas/ProjectMetadataArray.yaml deleted file mode 100644 index 611f317fe..000000000 --- a/libs/schematic/api-description/src/components/schemas/ProjectMetadataArray.yaml +++ /dev/null @@ -1,8 +0,0 @@ -type: object -description: An array of project metadata. -properties: - projects: - description: An array of project metadata. - type: array - items: - $ref: ProjectMetadata.yaml diff --git a/libs/schematic/api-description/src/components/schemas/ProjectMetadataPage.yaml b/libs/schematic/api-description/src/components/schemas/ProjectMetadataPage.yaml deleted file mode 100644 index e4377e549..000000000 --- a/libs/schematic/api-description/src/components/schemas/ProjectMetadataPage.yaml +++ /dev/null @@ -1,15 +0,0 @@ -type: object -description: A page of project metadata. -allOf: - - $ref: PageMetadata.yaml - - type: object - properties: - projects: - description: An array of project metadata. - type: array - items: - $ref: ProjectMetadata.yaml - required: - - projects -x-java-class-annotations: - - '@lombok.Builder' diff --git a/libs/schematic/api-description/src/components/schemas/PropertyLabel.yaml b/libs/schematic/api-description/src/components/schemas/PropertyLabel.yaml deleted file mode 100644 index 16ecbb06e..000000000 --- a/libs/schematic/api-description/src/components/schemas/PropertyLabel.yaml +++ /dev/null @@ -1,3 +0,0 @@ -description: The property label of a node in a schema -type: string -example: MolecularEntity diff --git a/libs/schematic/api-description/src/components/schemas/RelationshipType.yaml b/libs/schematic/api-description/src/components/schemas/RelationshipType.yaml deleted file mode 100644 index 8869735e4..000000000 --- a/libs/schematic/api-description/src/components/schemas/RelationshipType.yaml +++ /dev/null @@ -1,3 +0,0 @@ -description: A type of schema relationship -type: string -example: requiresDependency diff --git a/libs/schematic/api-description/src/components/schemas/RestrictRules.yaml b/libs/schematic/api-description/src/components/schemas/RestrictRules.yaml deleted file mode 100644 index 647834089..000000000 --- a/libs/schematic/api-description/src/components/schemas/RestrictRules.yaml +++ /dev/null @@ -1,3 +0,0 @@ -description: If True, validation suite will only run with in-house validation rule. If False, the Great Expectations suite will be utilized and all rules will be available. -type: boolean -default: false diff --git a/libs/schematic/api-description/src/components/schemas/ReturnDisplayNames.yaml b/libs/schematic/api-description/src/components/schemas/ReturnDisplayNames.yaml deleted file mode 100644 index 783c8a13f..000000000 --- a/libs/schematic/api-description/src/components/schemas/ReturnDisplayNames.yaml +++ /dev/null @@ -1,3 +0,0 @@ -description: Whether or not to return the display names of the node otherwise the label -type: boolean -default: true diff --git a/libs/schematic/api-description/src/components/schemas/ReturnOrderedBySchema.yaml b/libs/schematic/api-description/src/components/schemas/ReturnOrderedBySchema.yaml deleted file mode 100644 index 6439f6a06..000000000 --- a/libs/schematic/api-description/src/components/schemas/ReturnOrderedBySchema.yaml +++ /dev/null @@ -1,3 +0,0 @@ -description: Whether or not to order the components by their order in the schema, otherwise random -type: boolean -default: true diff --git a/libs/schematic/api-description/src/components/schemas/SchemaUrl.yaml b/libs/schematic/api-description/src/components/schemas/SchemaUrl.yaml deleted file mode 100644 index 80f1c9dd4..000000000 --- a/libs/schematic/api-description/src/components/schemas/SchemaUrl.yaml +++ /dev/null @@ -1,3 +0,0 @@ -description: The URL of a schema in jsonld form -type: string -example: https://raw.githubusercontent.com/Sage-Bionetworks/schematic/develop/tests/data/example.model.jsonld diff --git a/libs/schematic/api-description/src/components/schemas/TangledTreeLayers.yaml b/libs/schematic/api-description/src/components/schemas/TangledTreeLayers.yaml deleted file mode 100644 index 31d3661c3..000000000 --- a/libs/schematic/api-description/src/components/schemas/TangledTreeLayers.yaml +++ /dev/null @@ -1,2 +0,0 @@ -description: Tangled tree node layers to display for a given data model -type: string diff --git a/libs/schematic/api-description/src/components/schemas/TangledTreeText.yaml b/libs/schematic/api-description/src/components/schemas/TangledTreeText.yaml deleted file mode 100644 index 81d1e4f70..000000000 --- a/libs/schematic/api-description/src/components/schemas/TangledTreeText.yaml +++ /dev/null @@ -1,2 +0,0 @@ -description: Tangled tree plain or higlighted text to display for a given data model -type: object diff --git a/libs/schematic/api-description/src/components/schemas/UseFullFilePath.yaml b/libs/schematic/api-description/src/components/schemas/UseFullFilePath.yaml deleted file mode 100644 index 6065ec354..000000000 --- a/libs/schematic/api-description/src/components/schemas/UseFullFilePath.yaml +++ /dev/null @@ -1,3 +0,0 @@ -description: Whether or not to return the full path of output, or just the basename. -type: boolean -default: false diff --git a/libs/schematic/api-description/src/components/schemas/UseStrictCamelCase.yaml b/libs/schematic/api-description/src/components/schemas/UseStrictCamelCase.yaml deleted file mode 100644 index 4a5c13915..000000000 --- a/libs/schematic/api-description/src/components/schemas/UseStrictCamelCase.yaml +++ /dev/null @@ -1,3 +0,0 @@ -description: Whether or not to use the more strict way of converting to camel case -type: boolean -default: true diff --git a/libs/schematic/api-description/src/components/schemas/ValidationRule.yaml b/libs/schematic/api-description/src/components/schemas/ValidationRule.yaml deleted file mode 100644 index d52acb26a..000000000 --- a/libs/schematic/api-description/src/components/schemas/ValidationRule.yaml +++ /dev/null @@ -1,11 +0,0 @@ -type: object -description: A validation rule. -properties: - name: - type: string - description: The name of the rule, along with the arguments for the given rule. - example: list strict -required: - - name -x-java-class-annotations: - - '@lombok.Builder' diff --git a/libs/schematic/api-description/src/components/schemas/ValidationRuleArray.yaml b/libs/schematic/api-description/src/components/schemas/ValidationRuleArray.yaml deleted file mode 100644 index 6758b42c6..000000000 --- a/libs/schematic/api-description/src/components/schemas/ValidationRuleArray.yaml +++ /dev/null @@ -1,8 +0,0 @@ -type: object -description: An array of validation rules. -properties: - validation_rules: - description: An array of validation rules. - type: array - items: - $ref: ValidationRule.yaml diff --git a/libs/schematic/api-description/src/openapi.yaml b/libs/schematic/api-description/src/openapi.yaml deleted file mode 100644 index cf1b9da3e..000000000 --- a/libs/schematic/api-description/src/openapi.yaml +++ /dev/null @@ -1,129 +0,0 @@ -openapi: 3.0.3 -# 1) Define the security scheme type (HTTP bearer) -components: - securitySchemes: - bearerAuth: # arbitrary name for the security scheme - type: http - scheme: bearer - bearerFormat: JWT # optional, arbitrary value for documentation purposes -info: - title: Schematic REST API - version: 0.1.0 - license: - name: Apache 2.0 - url: https://github.com/Sage-Bionetworks/sage-monorepo - contact: - name: Support - url: https://github.com/Sage-Bionetworks/sage-monorepo - x-logo: - url: https://Sage-Bionetworks.github.io/rocc-schemas/logo.png -servers: - - url: http://localhost/api/v1 -tags: - - name: Storage - description: Operations about storages. -paths: - /schematicVersion: - $ref: paths/schematicVersion.yaml - - /assetTypes/{assetType}/projects/{projectId}/datasetMetadataArray: - $ref: paths/assetTypes/@{assetType}/projects/@{projectId}/datasetMetadataArray.yaml - - /assetTypes/{assetType}/projects/{projectId}/datasetMetadataPage: - $ref: paths/assetTypes/@{assetType}/projects/@{projectId}/datasetMetadataPage.yaml - - /assetTypes/{assetType}/projects/{projectId}/manifestMetadataArray: - $ref: paths/assetTypes/@{assetType}/projects/@{projectId}/manifestMetadataArray.yaml - - /assetTypes/{assetType}/projects/{projectId}/manifestMetadataPage: - $ref: paths/assetTypes/@{assetType}/projects/@{projectId}/manifestMetadataPage.yaml - - /assetTypes/{assetType}/assetViews/{assetViewId}/json: - $ref: paths/assetTypes/@{assetType}/assetViews/@{assetViewId}/json.yaml - - /assetTypes/{assetType}/assetViews/{assetViewId}/csv: - $ref: paths/assetTypes/@{assetType}/assetViews/@{assetViewId}/csv.yaml - - /assetTypes/{assetType}/datasets/{datasetId}/manifestJson: - $ref: paths/assetTypes/@{assetType}/datasets/@{datasetId}/manifestJson.yaml - - /assetTypes/{assetType}/datasets/{datasetId}/manifestCsv: - $ref: paths/assetTypes/@{assetType}/datasets/@{datasetId}/manifestCsv.yaml - - /assetTypes/{assetType}/assetViews/{assetViewId}/projectMetadataArray: - $ref: paths/assetTypes/@{assetType}/assetViews/@{assetViewId}/projectMetadataArray.yaml - - /assetTypes/{assetType}/assetViews/{assetViewId}/projectMetadataPage: - $ref: paths/assetTypes/@{assetType}/assetViews/@{assetViewId}/projectMetadataPage.yaml - - /assetTypes/{assetType}/datasets/{datasetId}/fileMetadataArray: - $ref: paths/assetTypes/@{assetType}/datasets/@{datasetId}/fileMetadataArray.yaml - - /assetTypes/{assetType}/datasets/{datasetId}/fileMetadataPage: - $ref: paths/assetTypes/@{assetType}/datasets/@{datasetId}/fileMetadataPage.yaml - - /assetTypes/{assetType}/manifests/{manifestId}/json: - $ref: paths/assetTypes/@{assetType}/manifests/@{manifestId}/json.yaml - - /assetTypes/{assetType}/manifests/{manifestId}/csv: - $ref: paths/assetTypes/@{assetType}/manifests/@{manifestId}/csv.yaml - - /nodes/{nodeLabel}/dependencyArray: - $ref: paths/nodes/@{nodeLabel}/dependencyArray.yaml - - /nodes/{nodeLabel}/dependencyPage: - $ref: paths/nodes/@{nodeLabel}/dependencyPage.yaml - - /nodes/{nodeDisplay}/isRequired: - $ref: paths/nodes/@{nodeDisplay}/isRequired.yaml - - /nodes/{nodeLabel}/nodeProperties: - $ref: paths/nodes/@{nodeLabel}/nodeProperties.yaml - - /nodes/{nodeDisplay}/propertyLabel: - $ref: paths/nodes/@{nodeDisplay}/propertyLabel.yaml - - /nodes/{nodeDisplay}/validationRules: - $ref: paths/nodes/@{nodeDisplay}/validationRules.yaml - - /components/{componentLabel}/: - $ref: paths/components/@{componentLabel}/component.yaml - - /components/{componentLabel}/requirementsArray: - $ref: paths/components/@{componentLabel}/requirementsArray.yaml - - /components/{componentLabel}/requirementsGraph: - $ref: paths/components/@{componentLabel}/requirementsGraph.yaml - - /schemaAttributes: - $ref: paths/schemaAttributes.yaml - - /connectedNodePairArray: - $ref: paths/connectedNodePairArray.yaml - - /connectedNodePairPage: - $ref: paths/connectedNodePairPage.yaml - - /validateManifestJson: - $ref: paths/validateManifestJson.yaml - - /validateManifestCsv: - $ref: paths/validateManifestCsv.yaml - - /submitManifestJson: - $ref: paths/submitManifestJson.yaml - - /submitManifestCsv: - $ref: paths/submitManifestCsv.yaml - - /tangledTreeLayers: - $ref: paths/tangledTreeLayers.yaml - - /tangledTreeText: - $ref: paths/tangledTreeText.yaml - - /generateGoogleSheetManifests: - $ref: paths/generateGoogleSheetManifests.yaml - - /generateExcelManifest: - $ref: paths/generateExcelManifest.yaml diff --git a/libs/schematic/api-description/src/paths/README.md b/libs/schematic/api-description/src/paths/README.md deleted file mode 100644 index 9ec938682..000000000 --- a/libs/schematic/api-description/src/paths/README.md +++ /dev/null @@ -1,107 +0,0 @@ -# Paths - -Organize your path definitions within this folder. You will reference your paths from your main `openapi.yaml` entrypoint file. - -It may help you to adopt some conventions: - -- path separator token (e.g. `@`) or subfolders -- path parameter (e.g. `{example}`) -- file-per-path or file-per-operation - -There are different benefits and drawbacks to each decision. - -You can adopt any organization you wish. We have some tips for organizing paths based on common practices. - -## Each path in a separate file - -Use a predefined "path separator" and keep all of your path files in the top level of the `paths` folder. - -``` -# todo: insert tree view of paths folder -``` - -Redocly recommends using the `@` character for this case. - -In addition, Redocly recommends placing path parameters within `{}` curly braces if you adopt this style. - -#### Motivations - -- Quickly see a list of all paths. Many people think in terms of the "number" of "endpoints" (paths), and not the "number" of "operations" (paths \* http methods). - -- Only the "file-per-path" option is semantically correct with the OpenAPI Specification 3.0.2. However, Redocly's openapi-cli will build valid bundles for any of the other options too. - -#### Drawbacks - -- This may require multiple definitions per http method within a single file. -- It requires settling on a path separator (that is allowed to be used in filenames) and sticking to that convention. - -## Each operation in a separate file - -You may also place each operation in a separate file. - -In this case, if you want all paths at the top-level, you can concatenate the http method to the path name. Similar to the above option, you can - -### Files at top-level of `paths` - -You may name your files with some concatenation for the http method. For example, following a convention such as: `-.yaml`. - -#### Motivations - -- Quickly see all operations without needing to navigate subfolders. - -#### Drawbacks - -- Adopting an unusual path separator convention, instead of using subfolders. - -### Use subfolders to mirror API path structure - -Example: - -``` -GET /customers - -/paths/customers/get.yaml -``` - -In this case, the path id defined within subfolders which mirror the API URL structure. - -Example with path parameter: - -``` -GET /customers/{id} - -/paths/customers/{id}/get.yaml -``` - -#### Motivations - -It matches the URL structure. - -It is pretty easy to reference: - -```yaml -paths: - '/customers/{id}': - get: - $ref: ./paths/customers/{id}/get.yaml - put: - $ref: ./paths/customers/{id}/put.yaml -``` - -#### Drawbacks - -If you have a lot of nested folders, it may be confusing to reference your schemas. - -Example - -``` -file: /paths/customers/{id}/timeline/{messageId}/get.yaml - -# excerpt of file - headers: - Rate-Limit-Remaining: - $ref: ../../../../../components/headers/Rate-Limit-Remaining.yaml - -``` - -Notice the `../../../../../` in the ref which requires some attention to formulate correctly. While openapi-cli has a linter which suggests possible refs when there is a mistake, this is still a net drawback for APIs with deep paths. diff --git a/libs/schematic/api-description/src/paths/assetTypes/@{assetType}/assetViews/@{assetViewId}/csv.yaml b/libs/schematic/api-description/src/paths/assetTypes/@{assetType}/assetViews/@{assetViewId}/csv.yaml deleted file mode 100644 index fa16459bb..000000000 --- a/libs/schematic/api-description/src/paths/assetTypes/@{assetType}/assetViews/@{assetViewId}/csv.yaml +++ /dev/null @@ -1,28 +0,0 @@ -parameters: - - $ref: ../../../../../components/parameters/path/assetViewId.yaml - - $ref: ../../../../../components/parameters/path/assetType.yaml -get: - tags: - - Storage - summary: Gets the asset view table in csv file form - description: Gets the asset view table in csv file form - operationId: getAssetViewCsv - security: - - bearerAuth: [] - responses: - '200': - description: Success - content: - text/csv: - schema: - type: string - '400': - $ref: ../../../../../components/responses/BadRequest.yaml - '401': - $ref: ../../../../../components/responses/Unauthorized.yaml - '403': - $ref: ../../../../../components/responses/Unauthorized.yaml - '404': - $ref: ../../../../../components/responses/NotFound.yaml - '500': - $ref: ../../../../../components/responses/InternalServerError.yaml diff --git a/libs/schematic/api-description/src/paths/assetTypes/@{assetType}/assetViews/@{assetViewId}/json.yaml b/libs/schematic/api-description/src/paths/assetTypes/@{assetType}/assetViews/@{assetViewId}/json.yaml deleted file mode 100644 index 9e6a71f14..000000000 --- a/libs/schematic/api-description/src/paths/assetTypes/@{assetType}/assetViews/@{assetViewId}/json.yaml +++ /dev/null @@ -1,28 +0,0 @@ -parameters: - - $ref: ../../../../../components/parameters/path/assetViewId.yaml - - $ref: ../../../../../components/parameters/path/assetType.yaml -get: - tags: - - Storage - summary: Gets the asset view table in json form - description: Gets the asset view table in json form - operationId: getAssetViewJson - security: - - bearerAuth: [] - responses: - '200': - description: Success - content: - application/json: - schema: - $ref: ../../../../../components/schemas/AssetViewJson.yaml - '400': - $ref: ../../../../../components/responses/BadRequest.yaml - '401': - $ref: ../../../../../components/responses/Unauthorized.yaml - '403': - $ref: ../../../../../components/responses/Unauthorized.yaml - '404': - $ref: ../../../../../components/responses/NotFound.yaml - '500': - $ref: ../../../../../components/responses/InternalServerError.yaml diff --git a/libs/schematic/api-description/src/paths/assetTypes/@{assetType}/assetViews/@{assetViewId}/projectMetadataArray.yaml b/libs/schematic/api-description/src/paths/assetTypes/@{assetType}/assetViews/@{assetViewId}/projectMetadataArray.yaml deleted file mode 100644 index 1ec819940..000000000 --- a/libs/schematic/api-description/src/paths/assetTypes/@{assetType}/assetViews/@{assetViewId}/projectMetadataArray.yaml +++ /dev/null @@ -1,28 +0,0 @@ -parameters: - - $ref: ../../../../../components/parameters/path/assetViewId.yaml - - $ref: ../../../../../components/parameters/path/assetType.yaml -get: - tags: - - Storage - summary: Gets all storage projects the current user has access to. - description: Gets all storage projects the current user has access to. - operationId: getProjectMetadataArray - security: - - bearerAuth: [] - responses: - '200': - description: Success - content: - application/json: - schema: - $ref: ../../../../../components/schemas/ProjectMetadataArray.yaml - '400': - $ref: ../../../../../components/responses/BadRequest.yaml - '401': - $ref: ../../../../../components/responses/Unauthorized.yaml - '403': - $ref: ../../../../../components/responses/Unauthorized.yaml - '404': - $ref: ../../../../../components/responses/NotFound.yaml - '500': - $ref: ../../../../../components/responses/InternalServerError.yaml diff --git a/libs/schematic/api-description/src/paths/assetTypes/@{assetType}/assetViews/@{assetViewId}/projectMetadataPage.yaml b/libs/schematic/api-description/src/paths/assetTypes/@{assetType}/assetViews/@{assetViewId}/projectMetadataPage.yaml deleted file mode 100644 index 919476deb..000000000 --- a/libs/schematic/api-description/src/paths/assetTypes/@{assetType}/assetViews/@{assetViewId}/projectMetadataPage.yaml +++ /dev/null @@ -1,31 +0,0 @@ -parameters: - - $ref: ../../../../../components/parameters/path/assetViewId.yaml - - $ref: ../../../../../components/parameters/path/assetType.yaml -get: - tags: - - Storage - summary: Gets all storage projects the current user has access to. - description: Gets all storage projects the current user has access to. - operationId: getProjectMetadataPage - parameters: - - $ref: ../../../../../components/parameters/query/pageNumber.yaml - - $ref: ../../../../../components/parameters/query/pageMaxItems.yaml - security: - - bearerAuth: [] - responses: - '200': - description: Success - content: - application/json: - schema: - $ref: ../../../../../components/schemas/ProjectMetadataPage.yaml - '400': - $ref: ../../../../../components/responses/BadRequest.yaml - '401': - $ref: ../../../../../components/responses/Unauthorized.yaml - '403': - $ref: ../../../../../components/responses/Unauthorized.yaml - '404': - $ref: ../../../../../components/responses/NotFound.yaml - '500': - $ref: ../../../../../components/responses/InternalServerError.yaml diff --git a/libs/schematic/api-description/src/paths/assetTypes/@{assetType}/datasets/@{datasetId}/fileMetadataArray.yaml b/libs/schematic/api-description/src/paths/assetTypes/@{assetType}/datasets/@{datasetId}/fileMetadataArray.yaml deleted file mode 100644 index aa6662461..000000000 --- a/libs/schematic/api-description/src/paths/assetTypes/@{assetType}/datasets/@{datasetId}/fileMetadataArray.yaml +++ /dev/null @@ -1,32 +0,0 @@ -parameters: - - $ref: ../../../../../components/parameters/path/datasetId.yaml - - $ref: ../../../../../components/parameters/path/assetType.yaml -get: - tags: - - Storage - summary: Gets all files associated with a dataset. - description: Gets all files associated with a dataset. - operationId: getDatasetFileMetadataArray - parameters: - - $ref: ../../../../../components/parameters/query/fileNames.yaml - - $ref: ../../../../../components/parameters/query/useFullFilePath.yaml - - $ref: ../../../../../components/parameters/query/assetViewIdQuery.yaml - security: - - bearerAuth: [] - responses: - '200': - description: Success - content: - application/json: - schema: - $ref: ../../../../../components/schemas/FileMetadataArray.yaml - '400': - $ref: ../../../../../components/responses/BadRequest.yaml - '401': - $ref: ../../../../../components/responses/Unauthorized.yaml - '403': - $ref: ../../../../../components/responses/Unauthorized.yaml - '404': - $ref: ../../../../../components/responses/NotFound.yaml - '500': - $ref: ../../../../../components/responses/InternalServerError.yaml diff --git a/libs/schematic/api-description/src/paths/assetTypes/@{assetType}/datasets/@{datasetId}/fileMetadataPage.yaml b/libs/schematic/api-description/src/paths/assetTypes/@{assetType}/datasets/@{datasetId}/fileMetadataPage.yaml deleted file mode 100644 index bff550c54..000000000 --- a/libs/schematic/api-description/src/paths/assetTypes/@{assetType}/datasets/@{datasetId}/fileMetadataPage.yaml +++ /dev/null @@ -1,34 +0,0 @@ -parameters: - - $ref: ../../../../../components/parameters/path/datasetId.yaml - - $ref: ../../../../../components/parameters/path/assetType.yaml -get: - tags: - - Storage - summary: Gets all files associated with a dataset. - description: Gets all files associated with a dataset. - operationId: getDatasetFileMetadataPage - parameters: - - $ref: ../../../../../components/parameters/query/fileNames.yaml - - $ref: ../../../../../components/parameters/query/useFullFilePath.yaml - - $ref: ../../../../../components/parameters/query/assetViewIdQuery.yaml - - $ref: ../../../../../components/parameters/query/pageNumber.yaml - - $ref: ../../../../../components/parameters/query/pageMaxItems.yaml - security: - - bearerAuth: [] - responses: - '200': - description: Success - content: - application/json: - schema: - $ref: ../../../../../components/schemas/FileMetadataPage.yaml - '400': - $ref: ../../../../../components/responses/BadRequest.yaml - '401': - $ref: ../../../../../components/responses/Unauthorized.yaml - '403': - $ref: ../../../../../components/responses/Unauthorized.yaml - '404': - $ref: ../../../../../components/responses/NotFound.yaml - '500': - $ref: ../../../../../components/responses/InternalServerError.yaml diff --git a/libs/schematic/api-description/src/paths/assetTypes/@{assetType}/datasets/@{datasetId}/manifestCsv.yaml b/libs/schematic/api-description/src/paths/assetTypes/@{assetType}/datasets/@{datasetId}/manifestCsv.yaml deleted file mode 100644 index 98bc40089..000000000 --- a/libs/schematic/api-description/src/paths/assetTypes/@{assetType}/datasets/@{datasetId}/manifestCsv.yaml +++ /dev/null @@ -1,30 +0,0 @@ -parameters: - - $ref: ../../../../../components/parameters/path/assetType.yaml - - $ref: ../../../../../components/parameters/path/datasetId.yaml -get: - tags: - - Storage - summary: Gets the manifest in csv form - description: Gets the manifest in csv form - operationId: getDatasetManifestCsv - parameters: - - $ref: ../../../../../components/parameters/query/assetViewIdQuery.yaml - security: - - bearerAuth: [] - responses: - '200': - description: Success - content: - text/csv: - schema: - type: string - '400': - $ref: ../../../../../components/responses/BadRequest.yaml - '401': - $ref: ../../../../../components/responses/Unauthorized.yaml - '403': - $ref: ../../../../../components/responses/Unauthorized.yaml - '404': - $ref: ../../../../../components/responses/NotFound.yaml - '500': - $ref: ../../../../../components/responses/InternalServerError.yaml diff --git a/libs/schematic/api-description/src/paths/assetTypes/@{assetType}/datasets/@{datasetId}/manifestJson.yaml b/libs/schematic/api-description/src/paths/assetTypes/@{assetType}/datasets/@{datasetId}/manifestJson.yaml deleted file mode 100644 index c4e00e1d3..000000000 --- a/libs/schematic/api-description/src/paths/assetTypes/@{assetType}/datasets/@{datasetId}/manifestJson.yaml +++ /dev/null @@ -1,30 +0,0 @@ -parameters: - - $ref: ../../../../../components/parameters/path/assetType.yaml - - $ref: ../../../../../components/parameters/path/datasetId.yaml -get: - tags: - - Storage - summary: Gets the manifest in json form - description: Gets the manifest in json form - operationId: getDatasetManifestJson - parameters: - - $ref: ../../../../../components/parameters/query/assetViewIdQuery.yaml - security: - - bearerAuth: [] - responses: - '200': - description: Success - content: - application/json: - schema: - $ref: ../../../../../components/schemas/ManifestJson.yaml - '400': - $ref: ../../../../../components/responses/BadRequest.yaml - '401': - $ref: ../../../../../components/responses/Unauthorized.yaml - '403': - $ref: ../../../../../components/responses/Unauthorized.yaml - '404': - $ref: ../../../../../components/responses/NotFound.yaml - '500': - $ref: ../../../../../components/responses/InternalServerError.yaml diff --git a/libs/schematic/api-description/src/paths/assetTypes/@{assetType}/manifests/@{manifestId}/csv.yaml b/libs/schematic/api-description/src/paths/assetTypes/@{assetType}/manifests/@{manifestId}/csv.yaml deleted file mode 100644 index 67ee987e0..000000000 --- a/libs/schematic/api-description/src/paths/assetTypes/@{assetType}/manifests/@{manifestId}/csv.yaml +++ /dev/null @@ -1,28 +0,0 @@ -parameters: - - $ref: ../../../../../components/parameters/path/assetType.yaml - - $ref: ../../../../../components/parameters/path/manifestId.yaml -get: - tags: - - Storage - summary: Gets the manifest in csv form - description: Gets the manifest in csv form - operationId: getManifestCsv - security: - - bearerAuth: [] - responses: - '200': - description: Success - content: - text/csv: - schema: - type: string - '400': - $ref: ../../../../../components/responses/BadRequest.yaml - '401': - $ref: ../../../../../components/responses/Unauthorized.yaml - '403': - $ref: ../../../../../components/responses/Unauthorized.yaml - '404': - $ref: ../../../../../components/responses/NotFound.yaml - '500': - $ref: ../../../../../components/responses/InternalServerError.yaml diff --git a/libs/schematic/api-description/src/paths/assetTypes/@{assetType}/manifests/@{manifestId}/json.yaml b/libs/schematic/api-description/src/paths/assetTypes/@{assetType}/manifests/@{manifestId}/json.yaml deleted file mode 100644 index da61e3349..000000000 --- a/libs/schematic/api-description/src/paths/assetTypes/@{assetType}/manifests/@{manifestId}/json.yaml +++ /dev/null @@ -1,28 +0,0 @@ -parameters: - - $ref: ../../../../../components/parameters/path/assetType.yaml - - $ref: ../../../../../components/parameters/path/manifestId.yaml -get: - tags: - - Storage - summary: Gets the manifest in json form - description: Gets the manifest in json form - operationId: getManifestJson - security: - - bearerAuth: [] - responses: - '200': - description: Success - content: - application/json: - schema: - $ref: ../../../../../components/schemas/ManifestJson.yaml - '400': - $ref: ../../../../../components/responses/BadRequest.yaml - '401': - $ref: ../../../../../components/responses/Unauthorized.yaml - '403': - $ref: ../../../../../components/responses/Unauthorized.yaml - '404': - $ref: ../../../../../components/responses/NotFound.yaml - '500': - $ref: ../../../../../components/responses/InternalServerError.yaml diff --git a/libs/schematic/api-description/src/paths/assetTypes/@{assetType}/projects/@{projectId}/datasetMetadataArray.yaml b/libs/schematic/api-description/src/paths/assetTypes/@{assetType}/projects/@{projectId}/datasetMetadataArray.yaml deleted file mode 100644 index 3b3129d32..000000000 --- a/libs/schematic/api-description/src/paths/assetTypes/@{assetType}/projects/@{projectId}/datasetMetadataArray.yaml +++ /dev/null @@ -1,30 +0,0 @@ -parameters: - - $ref: ../../../../../components/parameters/path/projectId.yaml - - $ref: ../../../../../components/parameters/path/assetType.yaml -get: - tags: - - Storage - summary: Gets all dataset metadata in folder under a given storage project that the current user has access to. - description: Gets all dataset meatdata in folder under a given storage project that the current user has access to. - operationId: getProjectDatasetMetadataArray - parameters: - - $ref: ../../../../../components/parameters/query/assetViewIdQuery.yaml - security: - - bearerAuth: [] - responses: - '200': - description: Success - content: - application/json: - schema: - $ref: ../../../../../components/schemas/DatasetMetadataArray.yaml - '400': - $ref: ../../../../../components/responses/BadRequest.yaml - '401': - $ref: ../../../../../components/responses/Unauthorized.yaml - '403': - $ref: ../../../../../components/responses/Unauthorized.yaml - '404': - $ref: ../../../../../components/responses/NotFound.yaml - '500': - $ref: ../../../../../components/responses/InternalServerError.yaml diff --git a/libs/schematic/api-description/src/paths/assetTypes/@{assetType}/projects/@{projectId}/datasetMetadataPage.yaml b/libs/schematic/api-description/src/paths/assetTypes/@{assetType}/projects/@{projectId}/datasetMetadataPage.yaml deleted file mode 100644 index 3eb10177b..000000000 --- a/libs/schematic/api-description/src/paths/assetTypes/@{assetType}/projects/@{projectId}/datasetMetadataPage.yaml +++ /dev/null @@ -1,32 +0,0 @@ -parameters: - - $ref: ../../../../../components/parameters/path/projectId.yaml - - $ref: ../../../../../components/parameters/path/assetType.yaml -get: - tags: - - Storage - summary: Gets a page of dataset metadata in folder under a given storage project that the current user has access to. - description: Gets a page of dataset meatdata in folder under a given storage project that the current user has access to. - operationId: getProjectDatasetMetadataPage - parameters: - - $ref: ../../../../../components/parameters/query/assetViewIdQuery.yaml - - $ref: ../../../../../components/parameters/query/pageNumber.yaml - - $ref: ../../../../../components/parameters/query/pageMaxItems.yaml - security: - - bearerAuth: [] - responses: - '200': - description: Success - content: - application/json: - schema: - $ref: ../../../../../components/schemas/DatasetMetadataPage.yaml - '400': - $ref: ../../../../../components/responses/BadRequest.yaml - '401': - $ref: ../../../../../components/responses/Unauthorized.yaml - '403': - $ref: ../../../../../components/responses/Unauthorized.yaml - '404': - $ref: ../../../../../components/responses/NotFound.yaml - '500': - $ref: ../../../../../components/responses/InternalServerError.yaml diff --git a/libs/schematic/api-description/src/paths/assetTypes/@{assetType}/projects/@{projectId}/manifestMetadataArray.yaml b/libs/schematic/api-description/src/paths/assetTypes/@{assetType}/projects/@{projectId}/manifestMetadataArray.yaml deleted file mode 100644 index 8507a8090..000000000 --- a/libs/schematic/api-description/src/paths/assetTypes/@{assetType}/projects/@{projectId}/manifestMetadataArray.yaml +++ /dev/null @@ -1,30 +0,0 @@ -parameters: - - $ref: ../../../../../components/parameters/path/projectId.yaml - - $ref: ../../../../../components/parameters/path/assetType.yaml -get: - tags: - - Storage - summary: Gets all manifests in a project folder that users have access to - description: Gets all manifests in a project folder that the current user has access to. - operationId: getProjectManifestMetadataArray - parameters: - - $ref: ../../../../../components/parameters/query/assetViewIdQuery.yaml - security: - - bearerAuth: [] - responses: - '200': - description: Success - content: - application/json: - schema: - $ref: ../../../../../components/schemas/ManifestMetadataArray.yaml - '400': - $ref: ../../../../../components/responses/BadRequest.yaml - '401': - $ref: ../../../../../components/responses/Unauthorized.yaml - '403': - $ref: ../../../../../components/responses/Unauthorized.yaml - '404': - $ref: ../../../../../components/responses/NotFound.yaml - '500': - $ref: ../../../../../components/responses/InternalServerError.yaml diff --git a/libs/schematic/api-description/src/paths/assetTypes/@{assetType}/projects/@{projectId}/manifestMetadataPage.yaml b/libs/schematic/api-description/src/paths/assetTypes/@{assetType}/projects/@{projectId}/manifestMetadataPage.yaml deleted file mode 100644 index 0804995f2..000000000 --- a/libs/schematic/api-description/src/paths/assetTypes/@{assetType}/projects/@{projectId}/manifestMetadataPage.yaml +++ /dev/null @@ -1,32 +0,0 @@ -parameters: - - $ref: ../../../../../components/parameters/path/projectId.yaml - - $ref: ../../../../../components/parameters/path/assetType.yaml -get: - tags: - - Storage - summary: Gets all manifests in a project folder that users have access to - description: Gets all manifests in a project folder that the current user has access to. - operationId: getProjectManifestMetadataPage - parameters: - - $ref: ../../../../../components/parameters/query/assetViewIdQuery.yaml - - $ref: ../../../../../components/parameters/query/pageNumber.yaml - - $ref: ../../../../../components/parameters/query/pageMaxItems.yaml - security: - - bearerAuth: [] - responses: - '200': - description: Success - content: - application/json: - schema: - $ref: ../../../../../components/schemas/ManifestMetadataPage.yaml - '400': - $ref: ../../../../../components/responses/BadRequest.yaml - '401': - $ref: ../../../../../components/responses/Unauthorized.yaml - '403': - $ref: ../../../../../components/responses/Unauthorized.yaml - '404': - $ref: ../../../../../components/responses/NotFound.yaml - '500': - $ref: ../../../../../components/responses/InternalServerError.yaml diff --git a/libs/schematic/api-description/src/paths/components/@{componentLabel}/component.yaml b/libs/schematic/api-description/src/paths/components/@{componentLabel}/component.yaml deleted file mode 100644 index c35493c2e..000000000 --- a/libs/schematic/api-description/src/paths/components/@{componentLabel}/component.yaml +++ /dev/null @@ -1,22 +0,0 @@ -parameters: - - $ref: ../../../components/parameters/path/componentLabel.yaml -get: - tags: - - Schema - summary: Get all the attributes associated with a specific data model component formatted as a dataframe (stored as a JSON String). - description: Get all the attributes associated with a specific data model component formatted as a dataframe (stored as a JSON String). - operationId: getComponent - parameters: - - $ref: ../../../components/parameters/query/schemaUrl.yaml - - $ref: ../../../components/parameters/query/includeIndex.yaml - - $ref: ../../../components/parameters/query/displayLabelType.yaml - responses: - '200': - description: Success - content: - application/json: - schema: - description: The component as a json string - type: string - '500': - $ref: ../../../components/responses/InternalServerError.yaml diff --git a/libs/schematic/api-description/src/paths/components/@{componentLabel}/requirementsArray.yaml b/libs/schematic/api-description/src/paths/components/@{componentLabel}/requirementsArray.yaml deleted file mode 100644 index 2605d9133..000000000 --- a/libs/schematic/api-description/src/paths/components/@{componentLabel}/requirementsArray.yaml +++ /dev/null @@ -1,20 +0,0 @@ -parameters: - - $ref: ../../../components/parameters/path/componentLabel.yaml -get: - tags: - - Schema - summary: Given a source model component (see https://w3id.org/biolink/vocab/category for definnition of component), return all components required by it in array form. - description: Given a source model component (see https://w3id.org/biolink/vocab/category for definnition of component), return all components required by it in array form. - operationId: getComponentRequirementsArray - parameters: - - $ref: ../../../components/parameters/query/schemaUrl.yaml - - $ref: ../../../components/parameters/query/displayLabelType.yaml - responses: - '200': - description: Success - content: - application/json: - schema: - $ref: ../../../components/schemas/ComponentRequirementArray.yaml - '500': - $ref: ../../../components/responses/InternalServerError.yaml diff --git a/libs/schematic/api-description/src/paths/components/@{componentLabel}/requirementsGraph.yaml b/libs/schematic/api-description/src/paths/components/@{componentLabel}/requirementsGraph.yaml deleted file mode 100644 index 50fb97406..000000000 --- a/libs/schematic/api-description/src/paths/components/@{componentLabel}/requirementsGraph.yaml +++ /dev/null @@ -1,20 +0,0 @@ -parameters: - - $ref: ../../../components/parameters/path/componentLabel.yaml -get: - tags: - - Schema - summary: Given a source model component (see https://w3id.org/biolink/vocab/category for definnition of component), return all components required by it in graph form. - description: Given a source model component (see https://w3id.org/biolink/vocab/category for definnition of component), return all components required by it in graph form. - operationId: getComponentRequirementsGraph - parameters: - - $ref: ../../../components/parameters/query/schemaUrl.yaml - - $ref: ../../../components/parameters/query/displayLabelType.yaml - responses: - '200': - description: Success - content: - application/json: - schema: - $ref: ../../../components/schemas/ComponentRequirementGraph.yaml - '500': - $ref: ../../../components/responses/InternalServerError.yaml diff --git a/libs/schematic/api-description/src/paths/connectedNodePairArray.yaml b/libs/schematic/api-description/src/paths/connectedNodePairArray.yaml deleted file mode 100644 index 03bac686b..000000000 --- a/libs/schematic/api-description/src/paths/connectedNodePairArray.yaml +++ /dev/null @@ -1,19 +0,0 @@ -get: - tags: - - Schema - summary: Gets an array of connected node pairs - description: Gets a array of connected node pairs - operationId: getConnectedNodePairArray - parameters: - - $ref: ../components/parameters/query/schemaUrl.yaml - - $ref: ../components/parameters/query/relationshipType.yaml - - $ref: ../components/parameters/query/displayLabelType.yaml - responses: - '200': - description: Success - content: - application/json: - schema: - $ref: ../components/schemas/ConnectedNodePairArray.yaml - '500': - $ref: ../components/responses/InternalServerError.yaml diff --git a/libs/schematic/api-description/src/paths/connectedNodePairPage.yaml b/libs/schematic/api-description/src/paths/connectedNodePairPage.yaml deleted file mode 100644 index 86aac51ee..000000000 --- a/libs/schematic/api-description/src/paths/connectedNodePairPage.yaml +++ /dev/null @@ -1,21 +0,0 @@ -get: - tags: - - Schema - summary: Gets a page of connected node pairs - description: Gets a page of connected node pairs - operationId: getConnectedNodePairPage - parameters: - - $ref: ../components/parameters/query/schemaUrl.yaml - - $ref: ../components/parameters/query/relationshipType.yaml - - $ref: ../components/parameters/query/pageNumber.yaml - - $ref: ../components/parameters/query/pageMaxItems.yaml - - $ref: ../components/parameters/query/displayLabelType.yaml - responses: - '200': - description: Success - content: - application/json: - schema: - $ref: ../components/schemas/ConnectedNodePairPage.yaml - '500': - $ref: ../components/responses/InternalServerError.yaml diff --git a/libs/schematic/api-description/src/paths/generateExcelManifest.yaml b/libs/schematic/api-description/src/paths/generateExcelManifest.yaml deleted file mode 100644 index 818b82044..000000000 --- a/libs/schematic/api-description/src/paths/generateExcelManifest.yaml +++ /dev/null @@ -1,41 +0,0 @@ -get: - tags: - - ManifestGeneration - summary: Generates an excel file - description: Generates an excel file - operationId: generateExcelManifest - parameters: - - $ref: ../components/parameters/query/schemaUrl.yaml - - $ref: ../components/parameters/query/addAnnotations.yaml - - $ref: ../components/parameters/query/manifestTitle.yaml - - $ref: ../components/parameters/query/displayLabelType.yaml - - name: datasetId - in: query - description: The ID of a dataset. - required: false - schema: - $ref: ../components/schemas/DatasetId.yaml - - name: dataType - in: query - description: A data type - required: true - schema: - $ref: ../components/schemas/DataType.yaml - - name: assetViewId - in: query - description: ID of view listing all project data assets. E.g. for Synapse this would be the Synapse ID of the fileview listing all data assets for a given project - required: false - schema: - $ref: ../components/schemas/AssetViewId.yaml - security: - - bearerAuth: [] - responses: - '200': - description: 'Success' - content: - application/vnd.ms-excel: - schema: - type: string - format: binary - '500': - $ref: ../components/responses/InternalServerError.yaml diff --git a/libs/schematic/api-description/src/paths/generateGoogleSheetManifests.yaml b/libs/schematic/api-description/src/paths/generateGoogleSheetManifests.yaml deleted file mode 100644 index 145b35261..000000000 --- a/libs/schematic/api-description/src/paths/generateGoogleSheetManifests.yaml +++ /dev/null @@ -1,50 +0,0 @@ -get: - tags: - - ManifestGeneration - summary: Generates a list of google sheet links - description: Generates a list of google sheet links - operationId: generateGoogleSheetManifests - parameters: - - $ref: ../components/parameters/query/schemaUrl.yaml - - $ref: ../components/parameters/query/addAnnotations.yaml - - $ref: ../components/parameters/query/manifestTitle.yaml - - $ref: ../components/parameters/query/displayLabelType.yaml - - $ref: ../components/parameters/query/useStrictValidation.yaml - - name: datasetIdArray - in: query - description: An array of dataset ids - required: false - schema: - $ref: ../components/schemas/DatasetIdArray.yaml - - name: dataTypeArray - in: query - description: An array of data types - required: false - schema: - $ref: ../components/schemas/DataTypeArray.yaml - - name: assetViewId - in: query - description: ID of view listing all project data assets. E.g. for Synapse this would be the Synapse ID of the fileview listing all data assets for a given project - required: false - schema: - $ref: ../components/schemas/AssetViewId.yaml - - name: generateAllManifests - in: query - description: - If true, a manifest for all components will be generated, datasetIds will be ignored. - If false, manifests for each id in datasetIds will be generated. - required: false - schema: - type: boolean - default: false - security: - - bearerAuth: [] - responses: - '200': - description: Success - content: - application/json: - schema: - $ref: ../components/schemas/GoogleSheetLinks.yaml - '500': - $ref: ../components/responses/InternalServerError.yaml diff --git a/libs/schematic/api-description/src/paths/nodes/@{nodeDisplay}/isRequired.yaml b/libs/schematic/api-description/src/paths/nodes/@{nodeDisplay}/isRequired.yaml deleted file mode 100644 index 6b5ab7b4d..000000000 --- a/libs/schematic/api-description/src/paths/nodes/@{nodeDisplay}/isRequired.yaml +++ /dev/null @@ -1,21 +0,0 @@ -parameters: - - $ref: ../../../components/parameters/path/nodeDisplay.yaml -get: - tags: - - Schema - summary: Gets whether or not the node is required in the schema - description: Gets whether or not the node is required in the schema - operationId: getNodeIsRequired - parameters: - - $ref: ../../../components/parameters/query/schemaUrl.yaml - - $ref: ../../../components/parameters/query/displayLabelType.yaml - responses: - '200': - description: Success - content: - application/json: - schema: - description: Whether or not the node is required in the schema - type: boolean - '500': - $ref: ../../../components/responses/InternalServerError.yaml diff --git a/libs/schematic/api-description/src/paths/nodes/@{nodeDisplay}/propertyLabel.yaml b/libs/schematic/api-description/src/paths/nodes/@{nodeDisplay}/propertyLabel.yaml deleted file mode 100644 index 05fdd71b5..000000000 --- a/libs/schematic/api-description/src/paths/nodes/@{nodeDisplay}/propertyLabel.yaml +++ /dev/null @@ -1,19 +0,0 @@ -parameters: - - $ref: ../../../components/parameters/path/nodeDisplay.yaml -get: - tags: - - Schema - summary: Gets the property label of the node - description: Gets the property label of the node - operationId: getPropertyLabel - parameters: - - $ref: ../../../components/parameters/query/useStrictCamelCase.yaml - responses: - '200': - description: Success - content: - application/json: - schema: - $ref: ../../../components/schemas/PropertyLabel.yaml - '500': - $ref: ../../../components/responses/InternalServerError.yaml diff --git a/libs/schematic/api-description/src/paths/nodes/@{nodeDisplay}/validationRules.yaml b/libs/schematic/api-description/src/paths/nodes/@{nodeDisplay}/validationRules.yaml deleted file mode 100644 index 5e39d0a08..000000000 --- a/libs/schematic/api-description/src/paths/nodes/@{nodeDisplay}/validationRules.yaml +++ /dev/null @@ -1,20 +0,0 @@ -parameters: - - $ref: ../../../components/parameters/path/nodeDisplay.yaml -get: - tags: - - Schema - summary: Gets the validation rules, along with the arguments for each given rule associated with a given node - description: Gets the validation rules, along with the arguments for each given rule associated with a given node - operationId: getNodeValidationRules - parameters: - - $ref: ../../../components/parameters/query/schemaUrl.yaml - - $ref: ../../../components/parameters/query/displayLabelType.yaml - responses: - '200': - description: Success - content: - application/json: - schema: - $ref: ../../../components/schemas/ValidationRuleArray.yaml - '500': - $ref: ../../../components/responses/InternalServerError.yaml diff --git a/libs/schematic/api-description/src/paths/nodes/@{nodeLabel}/dependencyArray.yaml b/libs/schematic/api-description/src/paths/nodes/@{nodeLabel}/dependencyArray.yaml deleted file mode 100644 index 8ef75b301..000000000 --- a/libs/schematic/api-description/src/paths/nodes/@{nodeLabel}/dependencyArray.yaml +++ /dev/null @@ -1,22 +0,0 @@ -parameters: - - $ref: ../../../components/parameters/path/nodeLabel.yaml -get: - tags: - - Schema - summary: Gets the immediate dependencies that are related to the given source node - description: Gets the immediate dependencies that are related to the given source node - operationId: getNodeDependencyArray - parameters: - - $ref: ../../../components/parameters/query/schemaUrl.yaml - - $ref: ../../../components/parameters/query/returnDisplayNames.yaml - - $ref: ../../../components/parameters/query/returnOrderedBySchema.yaml - - $ref: ../../../components/parameters/query/displayLabelType.yaml - responses: - '200': - description: Success - content: - application/json: - schema: - $ref: ../../../components/schemas/NodeArray.yaml - '500': - $ref: ../../../components/responses/InternalServerError.yaml diff --git a/libs/schematic/api-description/src/paths/nodes/@{nodeLabel}/dependencyPage.yaml b/libs/schematic/api-description/src/paths/nodes/@{nodeLabel}/dependencyPage.yaml deleted file mode 100644 index 93af14c64..000000000 --- a/libs/schematic/api-description/src/paths/nodes/@{nodeLabel}/dependencyPage.yaml +++ /dev/null @@ -1,24 +0,0 @@ -parameters: - - $ref: ../../../components/parameters/path/nodeLabel.yaml -get: - tags: - - Schema - summary: Gets the immediate dependencies that are related to the given source node - description: Gets the immediate dependencies that are related to the given source node - operationId: getNodeDependencyPage - parameters: - - $ref: ../../../components/parameters/query/schemaUrl.yaml - - $ref: ../../../components/parameters/query/returnDisplayNames.yaml - - $ref: ../../../components/parameters/query/returnOrderedBySchema.yaml - - $ref: ../../../components/parameters/query/pageNumber.yaml - - $ref: ../../../components/parameters/query/pageMaxItems.yaml - - $ref: ../../../components/parameters/query/displayLabelType.yaml - responses: - '200': - description: Success - content: - application/json: - schema: - $ref: ../../../components/schemas/NodePage.yaml - '500': - $ref: ../../../components/responses/InternalServerError.yaml diff --git a/libs/schematic/api-description/src/paths/nodes/@{nodeLabel}/nodeProperties.yaml b/libs/schematic/api-description/src/paths/nodes/@{nodeLabel}/nodeProperties.yaml deleted file mode 100644 index a73262fe3..000000000 --- a/libs/schematic/api-description/src/paths/nodes/@{nodeLabel}/nodeProperties.yaml +++ /dev/null @@ -1,20 +0,0 @@ -parameters: - - $ref: ../../../components/parameters/path/nodeLabel.yaml -get: - tags: - - Schema - summary: Gets properties associated with a given node - description: Gets properties associated with a given node - operationId: getNodeProperties - parameters: - - $ref: ../../../components/parameters/query/schemaUrl.yaml - - $ref: ../../../components/parameters/query/displayLabelType.yaml - responses: - '200': - description: Success - content: - application/json: - schema: - $ref: ../../../components/schemas/NodePropertyArray.yaml - '500': - $ref: ../../../components/responses/InternalServerError.yaml diff --git a/libs/schematic/api-description/src/paths/schemaAttributes.yaml b/libs/schematic/api-description/src/paths/schemaAttributes.yaml deleted file mode 100644 index b92fdf8f9..000000000 --- a/libs/schematic/api-description/src/paths/schemaAttributes.yaml +++ /dev/null @@ -1,19 +0,0 @@ -get: - tags: - - Schema - summary: Get all the attributes associated with a data model formatted as a dataframe (stored as a JSON String). - description: Get all the attributes associated with a data model formatted as a dataframe (stored as a JSON String). - operationId: getSchemaAttributes - parameters: - - $ref: ../components/parameters/query/schemaUrl.yaml - - $ref: ../components/parameters/query/displayLabelType.yaml - responses: - '200': - description: Success - content: - application/json: - schema: - description: The schema as a json string - type: string - '500': - $ref: ../components/responses/InternalServerError.yaml diff --git a/libs/schematic/api-description/src/paths/schematicVersion.yaml b/libs/schematic/api-description/src/paths/schematicVersion.yaml deleted file mode 100644 index 8d72b31c9..000000000 --- a/libs/schematic/api-description/src/paths/schematicVersion.yaml +++ /dev/null @@ -1,16 +0,0 @@ -get: - tags: - - Versions - summary: Gets the version of the schematic library currently used by the API - description: Gets the version of the schematic library currently used by the API - operationId: getSchematicVersion - responses: - '200': - description: Success - content: - application/json: - schema: - type: string - example: 'v21.1.1' - '500': - $ref: ../components/responses/InternalServerError.yaml diff --git a/libs/schematic/api-description/src/paths/submitManifestCsv.yaml b/libs/schematic/api-description/src/paths/submitManifestCsv.yaml deleted file mode 100644 index 1591d09f5..000000000 --- a/libs/schematic/api-description/src/paths/submitManifestCsv.yaml +++ /dev/null @@ -1,37 +0,0 @@ -post: - tags: - - ManifestValidation - summary: Validates manifest in csv form, then submits it - description: Validates manifest in csv form, then submits it - requestBody: - description: .csv file - content: - application/csv: - schema: - type: string - format: binary - required: true - operationId: submitManifestCsv - parameters: - - $ref: ../components/parameters/query/schemaUrl.yaml - - $ref: ../components/parameters/query/component.yaml - - $ref: ../components/parameters/query/restrictRules.yaml - - $ref: ../components/parameters/query/datasetIdQuery.yaml - - $ref: ../components/parameters/query/storageMethod.yaml - - $ref: ../components/parameters/query/hideBlanks.yaml - - $ref: ../components/parameters/query/assetViewIdQuery.yaml - - $ref: ../components/parameters/query/tableManipulationMethod.yaml - - $ref: ../components/parameters/query/displayLabelType.yaml - - $ref: ../components/parameters/query/annotationKeyStyle.yaml - - $ref: ../components/parameters/query/tableColumnNameStyle.yaml - security: - - bearerAuth: [] - responses: - '200': - description: Success - content: - application/json: - schema: - $ref: ../components/schemas/ManifestId.yaml - '500': - $ref: ../components/responses/InternalServerError.yaml diff --git a/libs/schematic/api-description/src/paths/submitManifestJson.yaml b/libs/schematic/api-description/src/paths/submitManifestJson.yaml deleted file mode 100644 index 7b56395b0..000000000 --- a/libs/schematic/api-description/src/paths/submitManifestJson.yaml +++ /dev/null @@ -1,35 +0,0 @@ -post: - tags: - - ManifestValidation - summary: Validates a manifest in json form, then submits it - description: Validates a manifest in json form, then submits it in csv form - requestBody: - description: A manifest in json form - content: - text/plain: - schema: - type: string - operationId: submitManifestJson - parameters: - - $ref: ../components/parameters/query/schemaUrl.yaml - - $ref: ../components/parameters/query/component.yaml - - $ref: ../components/parameters/query/restrictRules.yaml - - $ref: ../components/parameters/query/datasetIdQuery.yaml - - $ref: ../components/parameters/query/storageMethod.yaml - - $ref: ../components/parameters/query/hideBlanks.yaml - - $ref: ../components/parameters/query/assetViewIdQuery.yaml - - $ref: ../components/parameters/query/tableManipulationMethod.yaml - - $ref: ../components/parameters/query/displayLabelType.yaml - - $ref: ../components/parameters/query/annotationKeyStyle.yaml - - $ref: ../components/parameters/query/tableColumnNameStyle.yaml - security: - - bearerAuth: [] - responses: - '200': - description: Success - content: - application/json: - schema: - $ref: ../components/schemas/ManifestId.yaml - '500': - $ref: ../components/responses/InternalServerError.yaml diff --git a/libs/schematic/api-description/src/paths/tangledTreeLayers.yaml b/libs/schematic/api-description/src/paths/tangledTreeLayers.yaml deleted file mode 100644 index a46094d0c..000000000 --- a/libs/schematic/api-description/src/paths/tangledTreeLayers.yaml +++ /dev/null @@ -1,19 +0,0 @@ -get: - tags: - - TangledTree - summary: Get tangled tree node layers to display for a given data model and figure type - description: Get tangled tree node layers to display for a given data model and figure type - operationId: getTangledTreeLayers - parameters: - - $ref: ../components/parameters/query/schemaUrl.yaml - - $ref: ../components/parameters/query/figureType.yaml - - $ref: ../components/parameters/query/displayLabelType.yaml - responses: - '200': - description: Success - content: - application/json: - schema: - $ref: ../components/schemas/TangledTreeLayers.yaml - '500': - $ref: ../components/responses/InternalServerError.yaml diff --git a/libs/schematic/api-description/src/paths/tangledTreeText.yaml b/libs/schematic/api-description/src/paths/tangledTreeText.yaml deleted file mode 100644 index c1b16c92c..000000000 --- a/libs/schematic/api-description/src/paths/tangledTreeText.yaml +++ /dev/null @@ -1,20 +0,0 @@ -get: - tags: - - TangledTree - summary: Get tangled tree plain or highlighted text to display for a given data model, text formatting and figure type - description: Get tangled tree plain or highlighted text to display for a given data model, text formatting and figure type - operationId: getTangledTreeText - parameters: - - $ref: ../components/parameters/query/schemaUrl.yaml - - $ref: ../components/parameters/query/figureType.yaml - - $ref: ../components/parameters/query/textFormat.yaml - - $ref: ../components/parameters/query/displayLabelType.yaml - responses: - '200': - description: Success - content: - application/json: - schema: - $ref: ../components/schemas/TangledTreeText.yaml - '500': - $ref: ../components/responses/InternalServerError.yaml diff --git a/libs/schematic/api-description/src/paths/validateManifestCsv.yaml b/libs/schematic/api-description/src/paths/validateManifestCsv.yaml deleted file mode 100644 index d4820e356..000000000 --- a/libs/schematic/api-description/src/paths/validateManifestCsv.yaml +++ /dev/null @@ -1,30 +0,0 @@ -post: - tags: - - ManifestValidation - summary: Validates a manifest in csv form - description: Validates a manifest in csv form - requestBody: - description: .csv file - content: - application/csv: - schema: - type: string - format: binary - required: true - operationId: validateManifestCsv - parameters: - - $ref: ../components/parameters/query/schemaUrl.yaml - - $ref: ../components/parameters/query/componentLabelQuery.yaml - - $ref: ../components/parameters/query/restrictRules.yaml - - $ref: ../components/parameters/query/displayLabelType.yaml - security: - - bearerAuth: [] - responses: - '200': - description: Success - content: - application/json: - schema: - $ref: ../components/schemas/ManifestValidationResult.yaml - '500': - $ref: ../components/responses/InternalServerError.yaml diff --git a/libs/schematic/api-description/src/paths/validateManifestJson.yaml b/libs/schematic/api-description/src/paths/validateManifestJson.yaml deleted file mode 100644 index ed41af21a..000000000 --- a/libs/schematic/api-description/src/paths/validateManifestJson.yaml +++ /dev/null @@ -1,26 +0,0 @@ -post: - tags: - - ManifestValidation - summary: Validates a manifest in json form - description: Validates a manifest in json form - requestBody: - description: A manifest in json form - content: - text/plain: - schema: - type: string - operationId: validateManifestJson - parameters: - - $ref: ../components/parameters/query/schemaUrl.yaml - - $ref: ../components/parameters/query/componentLabelQuery.yaml - - $ref: ../components/parameters/query/restrictRules.yaml - - $ref: ../components/parameters/query/displayLabelType.yaml - responses: - '200': - description: Success - content: - application/json: - schema: - $ref: ../components/schemas/ManifestValidationResult.yaml - '500': - $ref: ../components/responses/InternalServerError.yaml diff --git a/libs/schematic/api-description/tmp/output/openapi.yaml b/libs/schematic/api-description/tmp/output/openapi.yaml deleted file mode 100644 index f2d876d33..000000000 --- a/libs/schematic/api-description/tmp/output/openapi.yaml +++ /dev/null @@ -1,54 +0,0 @@ -openapi: 3.0.0 -info: - title: Schematic REST API - version: 0.1.0 - description: >- - This service exposes core functionalities from schematic as REST API - endpoints -servers: - - url: /v1 -paths: - /manifest/generate: - $ref: paths/manifest_generate.yaml - /manifest/download: - $ref: paths/manifest_download.yaml - /model/validate: - $ref: paths/model_validate.yaml - /model/submit: - $ref: paths/model_submit.yaml - /model/component-requirements: - $ref: paths/model_component-requirements.yaml - /manifest/populate: - $ref: paths/manifest_populate.yaml - /get/datatype/manifest: - $ref: paths/get_datatype_manifest.yaml - /storage/projects: - $ref: paths/storage_projects.yaml - /storage/project/datasets: - $ref: paths/storage_project_datasets.yaml - /storage/dataset/files: - $ref: paths/storage_dataset_files.yaml - /storage/assets/tables: - $ref: paths/storage_assets_tables.yaml - /storage/project/manifests: - $ref: paths/storage_project_manifests.yaml - /schemas/get/schema: - $ref: paths/schemas_get_schema.yaml - /explorer/find_class_specific_properties: - $ref: paths/explorer_find_class_specific_properties.yaml - /schemas/get/graph_by_edge_type: - $ref: paths/schemas_get_graph_by_edge_type.yaml - /schemas/is_node_required: - $ref: paths/schemas_is_node_required.yaml - /explorer/get_node_dependencies: - $ref: paths/explorer_get_node_dependencies.yaml - /explorer/get_property_label_from_display_name: - $ref: paths/explorer_get_property_label_from_display_name.yaml - /explorer/get_node_range: - $ref: paths/explorer_get_node_range.yaml - /visualize/tangled_tree/layers: - $ref: paths/visualize_tangled_tree_layers.yaml - /visualize/tangled_tree/text: - $ref: paths/visualize_tangled_tree_text.yaml - /visualize/attributes: - $ref: paths/visualize_attributes.yaml diff --git a/libs/schematic/api-description/tmp/output/paths/explorer_find_class_specific_properties.yaml b/libs/schematic/api-description/tmp/output/paths/explorer_find_class_specific_properties.yaml deleted file mode 100644 index 3e915d4c5..000000000 --- a/libs/schematic/api-description/tmp/output/paths/explorer_find_class_specific_properties.yaml +++ /dev/null @@ -1,28 +0,0 @@ -get: - summary: Find properties specifically associated with a given class - description: Find properties specifically associated with a given class - operationId: api.routes.find_class_specific_properties - parameters: - - in: query - name: schema_url - schema: - type: string - description: Data Model URL - example: >- - https://raw.githubusercontent.com/Sage-Bionetworks/schematic/develop/tests/data/example.model.jsonld - required: true - - in: query - name: schema_class - schema: - type: string - nullable: false - description: schema class - example: MolecularEntity - required: true - responses: - '200': - description: A list of properties of a given class. - '500': - description: Check schematic log. - tags: - - Schema Operation diff --git a/libs/schematic/api-description/tmp/output/paths/explorer_get_node_dependencies.yaml b/libs/schematic/api-description/tmp/output/paths/explorer_get_node_dependencies.yaml deleted file mode 100644 index d884a1148..000000000 --- a/libs/schematic/api-description/tmp/output/paths/explorer_get_node_dependencies.yaml +++ /dev/null @@ -1,44 +0,0 @@ -get: - summary: Get the immediate dependencies that are related to a given source node - description: Get the immediate dependencies that are related to a given source node - operationId: api.routes.get_node_dependencies - parameters: - - in: query - name: schema_url - schema: - type: string - description: Data Model URL - example: >- - https://raw.githubusercontent.com/Sage-Bionetworks/schematic/develop/tests/data/example.model.jsonld - required: true - - in: query - name: source_node - schema: - type: string - nullable: false - description: The node whose dependencies are needed - example: Patient - required: true - - in: query - name: return_display_names - schema: - type: boolean - nullable: true - description: Return display names or not - required: false - example: true - - in: query - name: return_schema_ordered - schema: - type: boolean - nullable: true - description: Return schema ordered or not - required: false - example: true - responses: - '200': - description: List of nodes that are dependent on the source node. - '500': - description: Check schematic log. - tags: - - Schema Operation diff --git a/libs/schematic/api-description/tmp/output/paths/explorer_get_node_range.yaml b/libs/schematic/api-description/tmp/output/paths/explorer_get_node_range.yaml deleted file mode 100644 index 89eeb6d60..000000000 --- a/libs/schematic/api-description/tmp/output/paths/explorer_get_node_range.yaml +++ /dev/null @@ -1,34 +0,0 @@ -get: - summary: Get all the valid values that are associated with a node label. - description: Get all the valid values that are associated with a node label. - operationId: api.routes.get_node_range - parameters: - - in: query - name: schema_url - schema: - type: string - description: Data Model URL - example: >- - https://raw.githubusercontent.com/Sage-Bionetworks/schematic/develop/tests/data/example.model.jsonld - required: true - - in: query - name: node_label - schema: - type: string - nullable: false - description: Node / term for which you need to retrieve the range. - example: FamilyHistory - required: true - - in: query - name: return_display_names - schema: - type: boolean - description: If true returns the display names of the nodes. - required: false - responses: - '200': - description: A list of nodes. - '500': - description: Check schematic log. - tags: - - Schema Operation diff --git a/libs/schematic/api-description/tmp/output/paths/explorer_get_property_label_from_display_name.yaml b/libs/schematic/api-description/tmp/output/paths/explorer_get_property_label_from_display_name.yaml deleted file mode 100644 index e2ceaa044..000000000 --- a/libs/schematic/api-description/tmp/output/paths/explorer_get_property_label_from_display_name.yaml +++ /dev/null @@ -1,34 +0,0 @@ -get: - summary: Converts a given display name string into a proper property label string - description: Converts a given display name string into a proper property label string - operationId: api.routes.get_property_label_from_display_name - parameters: - - in: query - name: schema_url - schema: - type: string - description: Data Model URL - example: >- - https://raw.githubusercontent.com/Sage-Bionetworks/schematic/develop/tests/data/example.model.jsonld - required: true - - in: query - name: display_name - schema: - type: string - nullable: false - description: The display name to be converted - example: MolecularEntity - required: true - - in: query - name: strict_camel_case - schema: - type: boolean - nullable: false - description: If true the more strict way of converting to camel case is used. - responses: - '200': - description: The property label of the display name. - '500': - description: Check schematic log. - tags: - - Schema Operation diff --git a/libs/schematic/api-description/tmp/output/paths/get_datatype_manifest.yaml b/libs/schematic/api-description/tmp/output/paths/get_datatype_manifest.yaml deleted file mode 100644 index 2baeae6b8..000000000 --- a/libs/schematic/api-description/tmp/output/paths/get_datatype_manifest.yaml +++ /dev/null @@ -1,39 +0,0 @@ -get: - summary: Get datatype of attributes in manifest - description: Get datatype of attributes in manifest - operationId: api.routes.get_manifest_datatype - parameters: - - in: query - name: input_token - schema: - type: string - nullable: false - description: Token - example: Token - required: true - - in: query - name: asset_view - schema: - type: string - nullable: false - description: >- - ID of view listing all project data assets. For example, for Synapse - this would be the Synapse ID of the fileview listing all data assets for - a given project.(i.e. master_fileview in config.yml) - example: syn23643253 - required: true - - in: query - name: manifest_id - schema: - type: string - nullable: false - description: Manifest ID - example: syn27600110 - required: true - responses: - '200': - description: A list of json - '500': - description: Check schematic log. - tags: - - Manifest Operations diff --git a/libs/schematic/api-description/tmp/output/paths/manifest_download.yaml b/libs/schematic/api-description/tmp/output/paths/manifest_download.yaml deleted file mode 100644 index 3f0b74e2a..000000000 --- a/libs/schematic/api-description/tmp/output/paths/manifest_download.yaml +++ /dev/null @@ -1,57 +0,0 @@ -get: - summary: Endpoint to download an existing manifest - description: Endpoint to download an existing manifest - parameters: - - in: query - name: input_token - schema: - type: string - nullable: false - description: Token - example: Token - required: true - - in: query - name: asset_view - schema: - type: string - nullable: false - description: >- - ID of view listing all project data assets. For example, for Synapse - this would be the Synapse ID of the fileview listing all data assets for - a given project.(i.e. master_fileview in config.yml) - example: syn28559058 - required: true - - in: query - name: dataset_id - schema: - type: string - nullable: true - description: this dataset_id should be the parent ID of the manifest. - example: syn28268700 - required: true - - in: query - name: as_json - schema: - type: boolean - default: false - description: if True return the manifest in JSON format - required: false - - in: query - name: new_manifest_name - schema: - type: string - nullable: true - description: Fill in if you want to change the filename of the downloaded manifest. - required: false - operationId: api.routes.download_manifest - responses: - '200': - description: >- - A manifest gets downloaded and local file path of the manifest gets - returned. - content: - text/csv: - schema: - type: string - tags: - - Manifest Operations diff --git a/libs/schematic/api-description/tmp/output/paths/manifest_generate.yaml b/libs/schematic/api-description/tmp/output/paths/manifest_generate.yaml deleted file mode 100644 index 902bedde1..000000000 --- a/libs/schematic/api-description/tmp/output/paths/manifest_generate.yaml +++ /dev/null @@ -1,95 +0,0 @@ -get: - summary: Endpoint to facilitate manifest generation - description: Endpoint to create dynamically create metadata manifest files - parameters: - - in: query - name: schema_url - schema: - type: string - description: Data Model URL - example: >- - https://raw.githubusercontent.com/Sage-Bionetworks/schematic/develop/tests/data/example.model.jsonld - required: true - - in: query - name: title - schema: - type: string - description: | - Title of Manifest or Title Prefix, if making multiple manifests - example: Example - required: false - - in: query - name: data_type - style: form - schema: - type: array - items: - type: string - nullable: true - description: | - Data Model Component(s). To make all manifests, enter "all manifests". - example: - - Patient - - Biospecimen - required: true - - in: query - name: use_annotations - schema: - type: boolean - default: false - description: To Use Annotations - required: false - - in: query - name: dataset_id - style: form - schema: - type: array - items: - type: string - nullable: true - description: > - Dataset ID. If you want to get an existing manifest, this dataset_id - should be the parent ID of the manifest. Can enter multiple dataset_ids, - corresponding to order of multiple data_types entered above. Do not - enter multiple if calling 'all manifests' for data_type. - required: false - - in: query - name: asset_view - schema: - type: string - nullable: true - description: >- - ID of view listing all project data assets. E.g. for Synapse this would - be the Synapse ID of the fileview listing all data assets for a given - project.(i.e. master_fileview in config.yml) - required: false - - in: query - name: output_format - schema: - type: string - enum: - - excel - - google_sheet - - dataframe (only if getting existing manifests) - description: >- - If "excel" gets selected, this approach would avoid sending metadata to - Google sheet APIs; if "google_sheet" gets selected, this would return a - Google sheet URL. This parameter could potentially override sheet_url - parameter. - required: false - operationId: api.routes.get_manifest_route - responses: - '200': - description: >- - Googlesheet link created OR an excel file gets returned OR pandas - dataframe gets returned - content: - application/vnd.ms-excel: - schema: - type: string - format: binary - application/json: - schema: - type: string - tags: - - Manifest Operations diff --git a/libs/schematic/api-description/tmp/output/paths/manifest_populate.yaml b/libs/schematic/api-description/tmp/output/paths/manifest_populate.yaml deleted file mode 100644 index 270979925..000000000 --- a/libs/schematic/api-description/tmp/output/paths/manifest_populate.yaml +++ /dev/null @@ -1,57 +0,0 @@ -post: - summary: Create a Google sheet link based on an existing manifest. - description: Create a Google sheet link based on an existing manifest. - requestBody: - content: - multipart/form-data: - schema: - type: object - properties: - csv_file: - type: string - format: binary - parameters: - - in: query - name: schema_url - schema: - type: string - description: Data Model URL - example: >- - https://raw.githubusercontent.com/Sage-Bionetworks/schematic/develop/tests/data/example.model.jsonld - required: true - - in: query - name: data_type - schema: - type: string - nullable: true - description: Data Model Component - example: Patient - required: true - - in: query - name: title - schema: - type: string - description: Title of Manifest - example: Example - required: false - - in: query - name: return_excel - schema: - type: boolean - nullable: true - description: >- - If true, this would return an Excel spreadsheet.(This approach would - avoid sending metadata to Google sheet APIs) - required: false - operationId: api.routes.populate_manifest_route - responses: - '200': - description: Googlesheet link created - content: - application/json: - schema: - type: string - '500': - description: Check schematic log - tags: - - Manifest Operations diff --git a/libs/schematic/api-description/tmp/output/paths/model_component-requirements.yaml b/libs/schematic/api-description/tmp/output/paths/model_component-requirements.yaml deleted file mode 100644 index 5d0c2b29a..000000000 --- a/libs/schematic/api-description/tmp/output/paths/model_component-requirements.yaml +++ /dev/null @@ -1,57 +0,0 @@ -get: - summary: >- - Given a source model component (see https://w3id.org/biolink/vocab/category - for definnition of component), return all components required by it. - description: >- - Given a source model component (see https://w3id.org/biolink/vocab/category - for definnition of component), return all components required by it. Useful - to construct requirement dependencies not only between specific attributes - but also between categories/components of attributes; it can be utilized to - track metadata completion progress across multiple categories of attributes. - parameters: - - in: query - name: schema_url - schema: - type: string - description: Data Model URL - example: >- - https://raw.githubusercontent.com/Sage-Bionetworks/schematic/develop/tests/data/example.model.jsonld - required: true - - in: query - name: source_component - schema: - type: string - description: >- - an attribute label indicating the source component. (i.e. Patient, - Biospecimen, ScRNA-seqLevel1, ScRNA-seqLevel2) - example: Biospecimen - required: true - - in: query - name: as_graph - schema: - type: boolean - default: false - description: >- - if False return component requirements as a list; if True return - component requirements as a dependency graph (i.e. a DAG) - required: true - operationId: api.routes.get_component_requirements - responses: - '200': - description: A list of required components associated with the source component. - content: - application/json: - schema: - type: array - example: - - MolecularTest - - Therapy - - Diagnosis - - FollowUp - - Exposure - - FamilyHistory - - Demographics - - Patient - - BreastCancerTier3 - tags: - - Manifest Operations diff --git a/libs/schematic/api-description/tmp/output/paths/model_submit.yaml b/libs/schematic/api-description/tmp/output/paths/model_submit.yaml deleted file mode 100644 index 97483ef65..000000000 --- a/libs/schematic/api-description/tmp/output/paths/model_submit.yaml +++ /dev/null @@ -1,100 +0,0 @@ -post: - summary: Endpoint to facilitate manifest submission - description: Endpoint to submit annotated manifest files - requestBody: - content: - multipart/form-data: - schema: - type: object - properties: - file_name: - description: Upload a json or a csv file. - type: string - format: binary - parameters: - - in: query - name: schema_url - schema: - type: string - description: Data Model URL - example: >- - https://raw.githubusercontent.com/Sage-Bionetworks/schematic/develop/tests/data/example.model.jsonld - required: true - - in: query - name: data_type - schema: - type: string - nullable: true - description: Data Model Component - example: Patient - required: true - - in: query - name: dataset_id - schema: - type: string - nullable: true - description: Dataset SynID - required: true - - in: query - name: manifest_record_type - schema: - type: string - enum: - - table - - entity - - both - description: Manifest storage type. - example: table - - in: query - name: restrict_rules - schema: - type: boolean - default: false - description: >- - If True, validation suite will only run with in-house validation rule. - If False, the Great Expectations suite will be utilized and all rules - will be available. - required: true - - in: query - name: input_token - schema: - type: string - nullable: false - description: Token - required: true - - in: query - name: asset_view - schema: - type: string - nullable: false - description: >- - ID of view listing all project data assets. For example, for Synapse - this would be the Synapse ID of the fileview listing all data assets for - a given project.(i.e. master_fileview in config.yml) - example: syn28559058 - required: true - - in: query - name: json_str - required: false - schema: - type: string - nullable: false - description: A JSON object - example: >- - [{ "Patient ID": 123, "Sex": "Female", "Year of Birth": "", "Diagnosis": - "Healthy", "Component": "Patient", "Cancer Type": "Breast", "Family - History": "Breast, Lung", }] - operationId: api.routes.submit_manifest_route - responses: - '200': - description: >- - Manifest ID (e.g. Synapse ID if your asset management platform is - Synapse) - content: - application/json: - schema: - type: string - '500': - description: Check schematic log - tags: - - Model Operations diff --git a/libs/schematic/api-description/tmp/output/paths/model_validate.yaml b/libs/schematic/api-description/tmp/output/paths/model_validate.yaml deleted file mode 100644 index e1e78e433..000000000 --- a/libs/schematic/api-description/tmp/output/paths/model_validate.yaml +++ /dev/null @@ -1,60 +0,0 @@ -post: - summary: Endpoint to facilitate manifest validation - description: Endpoint to validate metadata manifest files - requestBody: - content: - multipart/form-data: - schema: - type: object - properties: - file_name: - description: Upload a json or a csv file. - type: string - format: binary - parameters: - - in: query - name: schema_url - schema: - type: string - description: Data Model URL - example: >- - https://raw.githubusercontent.com/Sage-Bionetworks/schematic/develop/tests/data/example.model.jsonld - required: true - - in: query - name: data_type - schema: - type: string - nullable: true - description: Data Model Component - example: Patient - required: true - - in: query - name: json_str - required: false - schema: - type: string - nullable: false - description: A JSON object - example: >- - [{ "Patient ID": 123, "Sex": "Female", "Year of Birth": "", "Diagnosis": - "Healthy", "Component": "Patient", "Cancer Type": "Breast", "Family - History": "Breast, Lung", }] - operationId: api.routes.validate_manifest_route - responses: - '200': - description: Manifest Validated - content: - application/json: - schema: - type: array - items: - type: array - items: - anyOf: - - type: integer - - type: string - - type: array - items: - type: string - tags: - - Model Operations diff --git a/libs/schematic/api-description/tmp/output/paths/schemas_get_graph_by_edge_type.yaml b/libs/schematic/api-description/tmp/output/paths/schemas_get_graph_by_edge_type.yaml deleted file mode 100644 index 7e9b2082a..000000000 --- a/libs/schematic/api-description/tmp/output/paths/schemas_get_graph_by_edge_type.yaml +++ /dev/null @@ -1,41 +0,0 @@ -get: - summary: Get a subgraph containing all edges of a given type (aka relationship) - description: Get a subgraph containing all edges of a given type (aka relationship) - operationId: api.routes.get_subgraph_by_edge_type - parameters: - - in: query - name: schema_url - schema: - type: string - description: Data Model URL - example: >- - https://raw.githubusercontent.com/Sage-Bionetworks/schematic/develop/tests/data/example.model.jsonld - required: true - - in: query - name: relationship - schema: - type: string - nullable: false - description: >- - Relationship (i.e. parentOf, requiresDependency, rangeValue, - domainValue) - example: requiresDependency - required: true - responses: - '200': - description: A list of tuples. - content: - application/json: - schema: - type: array - example: - - - - Patient - - PatientID - - - Patient - - Sex - - - Patient - - YearofBirth - '500': - description: Check schematic log. - tags: - - Schema Operation diff --git a/libs/schematic/api-description/tmp/output/paths/schemas_get_schema.yaml b/libs/schematic/api-description/tmp/output/paths/schemas_get_schema.yaml deleted file mode 100644 index 1f4f4ae07..000000000 --- a/libs/schematic/api-description/tmp/output/paths/schemas_get_schema.yaml +++ /dev/null @@ -1,26 +0,0 @@ -get: - summary: Return schema as a pickle file - description: Return schema as a pickle file - operationId: api.routes.get_schema_pickle - parameters: - - in: query - name: schema_url - schema: - type: string - description: Data Model URL - example: >- - https://raw.githubusercontent.com/Sage-Bionetworks/schematic/develop/tests/data/example.model.jsonld - required: true - responses: - '200': - description: >- - A pickle file gets downloaded and local file path of the pickle file - gets returned. - content: - text/plain: - schema: - type: string - '500': - description: Check schematic log. - tags: - - Schema Operation diff --git a/libs/schematic/api-description/tmp/output/paths/schemas_is_node_required.yaml b/libs/schematic/api-description/tmp/output/paths/schemas_is_node_required.yaml deleted file mode 100644 index e2ef7d96b..000000000 --- a/libs/schematic/api-description/tmp/output/paths/schemas_is_node_required.yaml +++ /dev/null @@ -1,28 +0,0 @@ -get: - summary: Check if a node is required or not - description: Check if a node is required or not - operationId: api.routes.get_if_node_required - parameters: - - in: query - name: schema_url - schema: - type: string - description: Data Model URL - example: >- - https://raw.githubusercontent.com/Sage-Bionetworks/schematic/develop/tests/data/example.model.jsonld - required: true - - in: query - name: node_display_name - schema: - type: string - nullable: false - description: Display label of a node - example: FamilyHistory - required: true - responses: - '200': - description: return a boolean - '500': - description: Check schematic log. - tags: - - Schema Operation diff --git a/libs/schematic/api-description/tmp/output/paths/storage_assets_tables.yaml b/libs/schematic/api-description/tmp/output/paths/storage_assets_tables.yaml deleted file mode 100644 index 30b754093..000000000 --- a/libs/schematic/api-description/tmp/output/paths/storage_assets_tables.yaml +++ /dev/null @@ -1,41 +0,0 @@ -get: - summary: Retrieve asset view table as a dataframe. - description: Retrieve asset view table as a dataframe. - operationId: api.routes.get_asset_view_table - parameters: - - in: query - name: input_token - schema: - type: string - nullable: false - description: Token - example: Token - required: true - - in: query - name: asset_view - schema: - type: string - nullable: false - description: >- - ID of view listing all project data assets. For example, for Synapse - this would be the Synapse ID of the fileview listing all data assets for - a given project.(i.e. master_fileview in config.yml) - example: syn23643253 - required: true - - in: query - name: return_type - schema: - type: string - enum: - - json - - csv - description: Type of return - example: json - required: true - responses: - '200': - description: csv file path or json - '500': - description: Check schematic log. - tags: - - Synapse Storage diff --git a/libs/schematic/api-description/tmp/output/paths/storage_dataset_files.yaml b/libs/schematic/api-description/tmp/output/paths/storage_dataset_files.yaml deleted file mode 100644 index 63aed0870..000000000 --- a/libs/schematic/api-description/tmp/output/paths/storage_dataset_files.yaml +++ /dev/null @@ -1,59 +0,0 @@ -get: - summary: Get all files in a given dataset folder - description: Get all files in a given dataset folder - operationId: api.routes.get_files_storage_dataset - parameters: - - in: query - name: input_token - schema: - type: string - nullable: false - description: Token - example: Token - required: true - - in: query - name: asset_view - schema: - type: string - nullable: false - description: >- - ID of view listing all project data assets. For example, for Synapse - this would be the Synapse ID of the fileview listing all data assets for - a given project.(i.e. master_fileview in config.yml) - example: syn23643253 - required: true - - in: query - name: dataset_id - schema: - type: string - nullable: false - description: synapse ID of a storage dataset. - example: syn23643250 - required: true - - in: query - name: file_names - schema: - type: array - items: - type: string - nullable: true - description: >- - a list of files with particular names (i.e. Sample_A.txt). If you leave - it empty, it will return all dataset files under the dataset ID. - required: false - - in: query - name: full_path - schema: - type: boolean - nullable: false - description: >- - if True return the full path as part of this filename; otherwise return - just base filename - required: true - responses: - '200': - description: A list of tuples - '500': - description: Check schematic log - tags: - - Synapse Storage diff --git a/libs/schematic/api-description/tmp/output/paths/storage_project_datasets.yaml b/libs/schematic/api-description/tmp/output/paths/storage_project_datasets.yaml deleted file mode 100644 index 3efb0453c..000000000 --- a/libs/schematic/api-description/tmp/output/paths/storage_project_datasets.yaml +++ /dev/null @@ -1,43 +0,0 @@ -get: - summary: >- - Gets all datasets in folder under a given storage project that the current - user has access to. - description: >- - Gets all datasets in folder under a given storage project that the current - user has access to. - operationId: api.routes.get_storage_projects_datasets - parameters: - - in: query - name: input_token - schema: - type: string - nullable: false - description: Token - example: Token - required: true - - in: query - name: asset_view - schema: - type: string - nullable: false - description: >- - ID of view listing all project data assets. For example, for Synapse - this would be the Synapse ID of the fileview listing all data assets for - a given project.(i.e. master_fileview in config.yml) - example: syn23643253 - required: true - - in: query - name: project_id - schema: - type: string - nullable: false - description: synapse ID of a storage project. - example: syn26251192 - required: true - responses: - '200': - description: A list of tuples - '500': - description: Check log - tags: - - Synapse Storage diff --git a/libs/schematic/api-description/tmp/output/paths/storage_project_manifests.yaml b/libs/schematic/api-description/tmp/output/paths/storage_project_manifests.yaml deleted file mode 100644 index 59b9295e3..000000000 --- a/libs/schematic/api-description/tmp/output/paths/storage_project_manifests.yaml +++ /dev/null @@ -1,50 +0,0 @@ -get: - summary: Gets all metadata manifest files across all datasets in a specified project. - description: Gets all metadata manifest files across all datasets in a specified project. - operationId: api.routes.get_project_manifests - parameters: - - in: query - name: input_token - schema: - type: string - nullable: false - description: Token - example: Token - required: true - - in: query - name: project_id - schema: - type: string - nullable: false - description: Project ID - example: syn30988314 - required: true - - in: query - name: asset_view - schema: - type: string - nullable: false - description: >- - ID of view listing all project data assets. For example, for Synapse - this would be the Synapse ID of the fileview listing all data assets for - a given project.(i.e. master_fileview in config.yml) - example: syn23643253 - required: true - responses: - '200': - description: A list of tuples(json). - content: - application/json: - schema: - type: array - example: - - - - datasetId - - dataName - - - manifestId - - manifestName - - - componentSchemaLabel - - componentSchemaLabel - '500': - description: Check schematic log. - tags: - - Synapse Storage diff --git a/libs/schematic/api-description/tmp/output/paths/storage_projects.yaml b/libs/schematic/api-description/tmp/output/paths/storage_projects.yaml deleted file mode 100644 index 559047f02..000000000 --- a/libs/schematic/api-description/tmp/output/paths/storage_projects.yaml +++ /dev/null @@ -1,33 +0,0 @@ -get: - summary: Get all storage projects the current user has access to - description: >- - Gets all storage projects the current user has access to, within the scope - of the 'storageFileview' attribute. - operationId: api.routes.get_storage_projects - parameters: - - in: query - name: input_token - schema: - type: string - nullable: false - description: Token - example: Token - required: true - - in: query - name: asset_view - schema: - type: string - nullable: false - description: >- - ID of view listing all project data assets. For example, for Synapse - this would be the Synapse ID of the fileview listing all data assets for - a given project.(i.e. master_fileview in config.yml) - example: syn23643253 - required: true - responses: - '200': - description: A list of tuples - '500': - description: Check log - tags: - - Synapse Storage diff --git a/libs/schematic/api-description/tmp/output/paths/visualize_attributes.yaml b/libs/schematic/api-description/tmp/output/paths/visualize_attributes.yaml deleted file mode 100644 index 9a3ea05cf..000000000 --- a/libs/schematic/api-description/tmp/output/paths/visualize_attributes.yaml +++ /dev/null @@ -1,24 +0,0 @@ -get: - summary: Get an attributes table for a data model, as a CSV (JSON String) - description: >- - Get all the attributes associated with a data model formatted as a dataframe - (stored as a JSON String) for use in Observable visualization. - operationId: api.routes.get_viz_attributes_explorer - parameters: - - in: query - name: schema_url - schema: - type: string - description: Data Model URL - example: >- - https://raw.githubusercontent.com/Sage-Bionetworks/schematic/develop/tests/data/example.model.jsonld - required: true - responses: - '200': - description: Returns a CSV as a JSON String. - content: - text/csv: - schema: - type: string - tags: - - Visualization Operations diff --git a/libs/schematic/api-description/tmp/output/paths/visualize_tangled_tree_layers.yaml b/libs/schematic/api-description/tmp/output/paths/visualize_tangled_tree_layers.yaml deleted file mode 100644 index 66ee06148..000000000 --- a/libs/schematic/api-description/tmp/output/paths/visualize_tangled_tree_layers.yaml +++ /dev/null @@ -1,34 +0,0 @@ -get: - summary: Get layers of tangled tree. - description: >- - Get tangled tree node layers to display for a given data model and figure - type - operationId: api.routes.get_viz_tangled_tree_layers - parameters: - - in: query - name: schema_url - schema: - type: string - description: Data Model URL - example: >- - https://raw.githubusercontent.com/Sage-Bionetworks/schematic/develop/tests/data/example.model.jsonld - required: true - - in: query - name: figure_type - schema: - type: string - enum: - - component - - dependency - description: Figure type to generate. - example: component - required: true - responses: - '200': - description: Returns a dataframe as a JSON String. - content: - text/json: - schema: - type: string - tags: - - Visualization Operations diff --git a/libs/schematic/api-description/tmp/output/paths/visualize_tangled_tree_text.yaml b/libs/schematic/api-description/tmp/output/paths/visualize_tangled_tree_text.yaml deleted file mode 100644 index 4469cd3d5..000000000 --- a/libs/schematic/api-description/tmp/output/paths/visualize_tangled_tree_text.yaml +++ /dev/null @@ -1,44 +0,0 @@ -get: - summary: Get text to display on tangled tree. - description: >- - Get tangled tree plain or higlighted text to display for a given data model, - text formatting and figure type - operationId: api.routes.get_viz_tangled_tree_text - parameters: - - in: query - name: schema_url - schema: - type: string - description: Data Model URL - example: >- - https://raw.githubusercontent.com/Sage-Bionetworks/schematic/develop/tests/data/example.model.jsonld - required: true - - in: query - name: figure_type - schema: - type: string - enum: - - component - - dependency - description: Figure type to generate. - example: component - required: true - - in: query - name: text_format - schema: - type: string - enum: - - plain - - highlighted - description: Text formatting type. - example: plain - required: true - responses: - '200': - description: Returns a dataframe as a JSON String. - content: - text/csv: - schema: - type: string - tags: - - Visualization Operations diff --git a/mkdocs.yml b/mkdocs.yml index a17f481b8..1958d71bb 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -48,8 +48,6 @@ nav: - OpenChallenges: - Microservices: reference/oc-microservices.md - Schemas: reference/oc-schemas.md - - Schematic: - - API: reference/schematic.md # Theme configuration theme: diff --git a/tools/configure-hostnames.sh b/tools/configure-hostnames.sh index 5141884ef..38b4ec147 100755 --- a/tools/configure-hostnames.sh +++ b/tools/configure-hostnames.sh @@ -24,7 +24,6 @@ declare -a hostnames=( "127.0.0.1 openchallenges-service-registry" "127.0.0.1 openchallenges-thumbor" "127.0.0.1 openchallenges-zipkin" - "127.0.0.1 schematic-api" ) # add hostnames diff --git a/tools/redocly/config.yaml b/tools/redocly/config.yaml index ce9501ca9..fb0592903 100644 --- a/tools/redocly/config.yaml +++ b/tools/redocly/config.yaml @@ -16,11 +16,6 @@ apis: root: libs/model-ad/api-description/build/openapi.yaml openchallenges-api-description: root: libs/openchallenges/api-description/build/openapi.yaml - schematic-api-description: - root: libs/schematic/api-description/build/openapi.yaml - rules: - operation-4xx-response: off - no-path-trailing-slash: off synapse-api-description: root: libs/synapse/api-description/build/openapi.json rules: From dfb84834c2ab4cebac781ab4d04ccb1a3011b6f5 Mon Sep 17 00:00:00 2001 From: sagely1 <114952739+sagely1@users.noreply.github.com> Date: Mon, 27 Jan 2025 11:14:58 -0800 Subject: [PATCH 05/10] feat(agora): update not found page design (AG-1590) (#2973) --- .../src/lib/not-found.component.html | 31 +----------- .../src/lib/not-found.component.scss | 3 +- .../not-found/src/lib/not-found.component.ts | 47 ++----------------- 3 files changed, 7 insertions(+), 74 deletions(-) diff --git a/libs/agora/not-found/src/lib/not-found.component.html b/libs/agora/not-found/src/lib/not-found.component.html index fedabd6b0..d18aa0a3a 100644 --- a/libs/agora/not-found/src/lib/not-found.component.html +++ b/libs/agora/not-found/src/lib/not-found.component.html @@ -1,31 +1,4 @@
-
-

We're sorry!

-

Page not found.

-
+

We're sorry!

+

Page not found.

- - - diff --git a/libs/agora/not-found/src/lib/not-found.component.scss b/libs/agora/not-found/src/lib/not-found.component.scss index 586ba6663..65ee84434 100644 --- a/libs/agora/not-found/src/lib/not-found.component.scss +++ b/libs/agora/not-found/src/lib/not-found.component.scss @@ -3,11 +3,12 @@ .page-not-found { min-height: calc(100vh - var(--header-height) - var(--footer-height) + 1px); - background-image: url('/agora-assets/images/page-not-found.svg'); + background-image: url('/agora-assets/images/hero-background.svg'); background-size: cover !important; display: flex; justify-content: center; align-items: center; + flex-direction: column; h1 { text-align: center; diff --git a/libs/agora/not-found/src/lib/not-found.component.ts b/libs/agora/not-found/src/lib/not-found.component.ts index 39abb1721..4442f29d7 100644 --- a/libs/agora/not-found/src/lib/not-found.component.ts +++ b/libs/agora/not-found/src/lib/not-found.component.ts @@ -1,50 +1,9 @@ -import { CommonModule } from '@angular/common'; -import { Component, OnInit, Renderer2 } from '@angular/core'; -import { MatButtonModule } from '@angular/material/button'; -import { RouterModule } from '@angular/router'; -import { MatCardModule } from '@angular/material/card'; -import { ConfigService } from '@sagebionetworks/agora/config'; -import { SeoService } from '@sagebionetworks/shared/util'; -import { DataversionService, Dataversion } from '@sagebionetworks/agora/api-client-angular'; -import { getSeoData } from './seo-data'; -import { Observable } from 'rxjs'; -// import { SynapseApiService } from '@sagebionetworks/agora/services'; -// import { SynapseWiki } from '@sagebionetworks/agora/models'; -// import { OrgSagebionetworksRepoModelWikiWikiPage } from '@sagebionetworks/synapse/api-client-angular'; +import { Component } from '@angular/core'; @Component({ selector: 'agora-not-found', - imports: [CommonModule, RouterModule, MatCardModule, MatButtonModule], + imports: [], templateUrl: './not-found.component.html', styleUrls: ['./not-found.component.scss'], }) -export class NotFoundComponent implements OnInit { - public appVersion: string; - public apiDocsUrl: string; - - dataversion$!: Observable; - // wiki$!: Observable; - // wikiAlternative$!: Observable; - - constructor( - private readonly configService: ConfigService, - private dataversionService: DataversionService, - private seoService: SeoService, - private renderer2: Renderer2, - // private synapseApiService: SynapseApiService, - ) { - this.appVersion = this.configService.config.appVersion; - this.apiDocsUrl = this.configService.config.apiDocsUrl; - - this.seoService.setData(getSeoData(), this.renderer2); - } - - ngOnInit(): void { - this.dataversion$ = this.dataversionService.getDataversion(); - - // const ownerId = 'syn25913473'; - // const wikiId = '612058'; - // this.wiki$ = this.synapseApiService.getWiki(ownerId, wikiId); - // this.wikiAlternative$ = this.synapseApiService.getWikiAlternative(ownerId, wikiId); - } -} +export class NotFoundComponent {} From c7e2cdbc88fb8cc4815dafedd92b105e4b9544f2 Mon Sep 17 00:00:00 2001 From: Thomas Schaffter Date: Mon, 27 Jan 2025 14:02:17 -0800 Subject: [PATCH 06/10] chore: fix critical CVEs in the dev container Docker image (SMR-1) (#2980) --- .github/.devcontainer/Dockerfile | 2 +- .github/.devcontainer/devcontainer.json | 11 ++++++----- 2 files changed, 7 insertions(+), 6 deletions(-) diff --git a/.github/.devcontainer/Dockerfile b/.github/.devcontainer/Dockerfile index dff2521b9..8a3c36501 100644 --- a/.github/.devcontainer/Dockerfile +++ b/.github/.devcontainer/Dockerfile @@ -25,7 +25,7 @@ ARG nodeVersionMajor="22" # https://pypi.org/project/pipenv/ ARG pipenvVersion="2024.4.0" # https://github.com/pnpm/pnpm/releases -ARG pnpmVersion="9.15.2" +ARG pnpmVersion="9.15.4" # List of Python versions separated by spaces ARG pyenvPythonVersions="3.13.1" # https://github.com/SonarSource/sonar-scanner-cli/releases diff --git a/.github/.devcontainer/devcontainer.json b/.github/.devcontainer/devcontainer.json index 11f122da6..e39acaf3f 100644 --- a/.github/.devcontainer/devcontainer.json +++ b/.github/.devcontainer/devcontainer.json @@ -10,16 +10,17 @@ }, "features": { "ghcr.io/devcontainers/features/docker-in-docker:2.12.0": { - "version": "27.4.1" + "version": "27.5.1", + "installDockerComposeSwitch": false }, "ghcr.io/devcontainers/features/go:1.3.1": { - "version": "1.23", + "version": "1.23.5", "golangciLintVersion": "1.63.4" }, "ghcr.io/devcontainers/features/kubectl-helm-minikube:1.2.0": { - "version": "1.32", - "helm": "3.16.4", - "minikube": "1.34.0" + "version": "1.32.1", + "helm": "3.17.0", + "minikube": "1.35.0" } }, "remoteUser": "ubuntu", From fdb0cbb1978da7c0d3182cd2ac72eb85915df67a Mon Sep 17 00:00:00 2001 From: Thomas Schaffter Date: Mon, 27 Jan 2025 14:33:14 -0800 Subject: [PATCH 07/10] chore: create workspace-nuke script (SMR-3) (#2981) --- dev-env.sh | 3 ++- tools/workspace-nuke.sh | 23 +++++++++++++++++++++++ 2 files changed, 25 insertions(+), 1 deletion(-) create mode 100755 tools/workspace-nuke.sh diff --git a/dev-env.sh b/dev-env.sh index 680099167..6e03e9a1b 100644 --- a/dev-env.sh +++ b/dev-env.sh @@ -34,7 +34,8 @@ function workspace-install { workspace-install-python-dependencies nx run-many --target=create-config nx run-many --target=prepare --projects=tag:language:java --parallel=1 - nx run-many --target=prepare --projects=tag:language:python --projects=tag:language:r + nx run-many --target=prepare --projects=tag:language:python --parallel=1 + nx run-many --target=prepare --projects=tag:language:r } function workspace-install-affected { diff --git a/tools/workspace-nuke.sh b/tools/workspace-nuke.sh new file mode 100755 index 000000000..c3221f1cd --- /dev/null +++ b/tools/workspace-nuke.sh @@ -0,0 +1,23 @@ +#!/usr/bin/env bash + +# Remove files and folders from the workspace root folder. +rm -fr \ + .angular \ + .cache \ + .nx \ + .pnpm-store \ + coverage \ + playwright-report \ + reports + +# Remove nested files and folders. +# find . -name "build" -print0 | xargs -0 rm -fr # TODO: prevent OpenAPI build folders to be rm +find . \ + \( -name ".coverage" \ + -o -name ".gradle" \ + -o -name ".pytest_cache" \ + -o -name ".venv" \ + -o -name "bin" \ + -o -name "dist" \ + -o -name "node_modules" \) \ + -print0 | xargs -0 rm -rf \ No newline at end of file From cc2796620ca16bc33f08d6ecd959cd173e3865b6 Mon Sep 17 00:00:00 2001 From: Thomas Schaffter Date: Mon, 27 Jan 2025 14:48:39 -0800 Subject: [PATCH 08/10] chore: disable Java autobuild (SMR-4) (#2982) --- .vscode/settings.json | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.vscode/settings.json b/.vscode/settings.json index 931547af9..7976f3ba2 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -4,7 +4,8 @@ "java.jdt.ls.java.home": "/usr/lib/jvm/java-17-openjdk-amd64", "java.format.enabled": false, "java.compile.nullAnalysis.mode": "disabled", - "java.configuration.updateBuildConfiguration": "disabled", + "java.configuration.updateBuildConfiguration": "automatic", + "java.autobuild.enabled": false, "css.validate": false, "less.validate": false, "scss.validate": false, From dfc00d2bc291865e6b9677d55465a4547a6b199d Mon Sep 17 00:00:00 2001 From: sagely1 <114952739+sagely1@users.noreply.github.com> Date: Mon, 27 Jan 2025 15:16:45 -0800 Subject: [PATCH 09/10] feat(agora): fix url capitalization (AG-1595) (#2979) --- libs/agora/styles/src/lib/components/_table.scss | 1 - 1 file changed, 1 deletion(-) diff --git a/libs/agora/styles/src/lib/components/_table.scss b/libs/agora/styles/src/lib/components/_table.scss index 87fc3b98d..adb3b1780 100644 --- a/libs/agora/styles/src/lib/components/_table.scss +++ b/libs/agora/styles/src/lib/components/_table.scss @@ -92,7 +92,6 @@ justify-content: center; text-align: center; cursor: pointer; - text-transform: capitalize; } tr { From 899e0917fd7f18d29eee8df8943bba5349dcab1f Mon Sep 17 00:00:00 2001 From: Thomas Schaffter Date: Mon, 27 Jan 2025 18:00:35 -0800 Subject: [PATCH 10/10] chore: update the Dev Container 24.01 Edition (#2975) --- .devcontainer/devcontainer.json | 102 ++++++++---------- .../actions/setup-dev-container/action.yml | 87 ++++++++------- apps/iatlas/api/poetry.lock | 18 ++-- apps/iatlas/api/pyproject.toml | 35 +++--- dev-env.sh | 25 ++++- libs/sandbox/py-lib/project.json | 9 +- package.json | 2 +- tools/check-devcontainer-version.js | 2 +- tools/workspace-install.sh | 15 +++ 9 files changed, 163 insertions(+), 132 deletions(-) create mode 100755 tools/workspace-install.sh diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index 0d548718e..18af15fe0 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -1,6 +1,6 @@ { - "name": "Sage Dev Container", - "image": "ghcr.io/sage-bionetworks/sage-devcontainer:fab0893", + "name": "Sage Monorepo Dev Container", + "image": "ghcr.io/sage-bionetworks/sage-monorepo-devcontainer:sha-c7e2cdbc88fb8cc4815dafedd92b105e4b9544f2", "containerEnv": { "NX_BASE": "${localEnv:NX_BASE}", "NX_BRANCH": "${localEnv:NX_BRANCH}", @@ -14,58 +14,7 @@ "DOCKER_USERNAME": "${localEnv:DOCKER_USERNAME}", "DOCKER_PASSWORD": "${localEnv:DOCKER_PASSWORD}" }, - "features": { - "ghcr.io/devcontainers/features/docker-in-docker:2.11.0": { - "version": "27.0.3", - "moby": false - } - }, - "customizations": { - "codespaces": { - "openFiles": ["README.md"] - }, - "vscode": { - "extensions": [ - "alefragnani.Bookmarks", - "Angular.ng-template", - "dbaeumer.vscode-eslint", - "donjayamanne.python-environment-manager", - "dorzey.vscode-sqlfluff", - "eamodio.gitlens", - "emeraldwalk.RunOnSave", - "esbenp.prettier-vscode", - "exiasr.hadolint", - "formulahendry.auto-rename-tag", - "github.vscode-github-actions", - "GitHub.vscode-pull-request-github", - "Gruntfuggly.todo-tree", - "humao.rest-client", - "mhutchie.git-graph", - "mongodb.mongodb-vscode", - "ms-playwright.playwright", - "ms-python.black-formatter", - "ms-python.python", - "ms-toolsai.jupyter", - "mtxr.sqltools-driver-mysql", - "mtxr.sqltools-driver-pg", - "mtxr.sqltools", - "njpwerner.autodocstring", - "Orta.vscode-jest", - "pranaygp.vscode-css-peek", - "ritwickdey.LiveServer", - "shengchen.vscode-checkstyle", - "SonarSource.sonarlint-vscode", - "stkb.rewrap", - "stylelint.vscode-stylelint", - "vmware.vscode-boot-dev-pack", - "vscjava.vscode-gradle", - "vscjava.vscode-java-pack" - ], - "settings": { - "workbench.startupEditor": "readme" - } - } - }, + "features": {}, "forwardPorts": [ 2432, 3306, 3333, 4200, 4211, 5000, 5200, 5432, 5601, 8010, 8071, 8000, 8080, 8081, 8082, 8084, 8085, 8086, 8090, 8200, 8888, 8889, 9200, 9411, 27017 @@ -164,7 +113,48 @@ "onAutoForward": "silent" } }, - "remoteUser": "vscode", + "otherPortsAttributes": { + "onAutoForward": "silent" + }, "shutdownAction": "stopContainer", - "runArgs": ["--name", "sage_devcontainer"] + "runArgs": ["--name", "sage-monorepo-devcontainer"], + "customizations": { + "vscode": { + "extensions": [ + "alefragnani.Bookmarks", + "Angular.ng-template", + "dbaeumer.vscode-eslint", + "donjayamanne.python-environment-manager", + "eamodio.gitlens", + "emeraldwalk.RunOnSave", + "esbenp.prettier-vscode", + "exiasr.hadolint", + "formulahendry.auto-rename-tag", + "github.vscode-github-actions", + "GitHub.vscode-pull-request-github", + "Gruntfuggly.todo-tree", + "humao.rest-client", + "mhutchie.git-graph", + "mongodb.mongodb-vscode", + "ms-playwright.playwright", + "ms-python.black-formatter", + "ms-python.python", + "ms-toolsai.jupyter", + "mtxr.sqltools-driver-mysql", + "mtxr.sqltools-driver-pg", + "mtxr.sqltools", + "njpwerner.autodocstring", + "Orta.vscode-jest", + "pranaygp.vscode-css-peek", + "ritwickdey.LiveServer", + "shengchen.vscode-checkstyle", + "SonarSource.sonarlint-vscode", + "stkb.rewrap", + "stylelint.vscode-stylelint", + "vmware.vscode-boot-dev-pack", + "vscjava.vscode-gradle", + "vscjava.vscode-java-pack" + ] + } + } } diff --git a/.github/actions/setup-dev-container/action.yml b/.github/actions/setup-dev-container/action.yml index a840d6f50..d18b6981e 100644 --- a/.github/actions/setup-dev-container/action.yml +++ b/.github/actions/setup-dev-container/action.yml @@ -1,5 +1,10 @@ name: 'Set up the dev container' -description: 'Installs the dev container CLI, fetches caches (if exist), and starts the dev container' +description: 'Installs the dev container CLI, fetches caches (if exist), and starts the dev container.' +inputs: + devcontainer-user: + description: 'The dev container user.' + required: false + default: 'ubuntu' runs: using: 'composite' steps: @@ -11,43 +16,45 @@ runs: # restore-keys: | # ${{ runner.os }}-pnpm-store- - - name: Set up Renv cache - uses: actions/cache@v3 - with: - path: '/tmp/.cache/R/renv/cache' - key: ${{ runner.os }}-renv-cache-${{ hashFiles('**/renv.lock') }} - restore-keys: | - ${{ runner.os }}-renv-cache- + # - name: Set up Renv cache + # uses: actions/cache@v3 + # with: + # path: '/tmp/.cache/R/renv/cache' + # key: ${{ runner.os }}-renv-cache-${{ hashFiles('**/renv.lock') }} + # restore-keys: | + # ${{ runner.os }}-renv-cache- - - name: Set up Poetry cache - uses: actions/cache@v3 - with: - path: '/tmp/.cache/pypoetry' - key: ${{ runner.os }}-poetry-${{ hashFiles('**/poetry.lock') }} + # - name: Set up Poetry cache + # uses: actions/cache@v3 + # with: + # path: '/tmp/.cache/pypoetry' + # key: ${{ runner.os }}-poetry-${{ hashFiles('**/poetry.lock') }} - - name: Set up venv cache - uses: actions/cache@v3 - with: - path: | - /tmp/.local/share/virtualenv - **/.venv - key: ${{ runner.os }}-venv-${{ hashFiles('**/poetry.lock') }} + # - name: Set up venv cache + # uses: actions/cache@v3 + # with: + # path: | + # /tmp/.local/share/virtualenv + # **/.venv + # key: ${{ runner.os }}-venv-${{ hashFiles('**/poetry.lock') }} - - name: Set up Gradle cache - uses: actions/cache@v3 - with: - path: | - /tmp/.gradle/caches - /tmp/.gradle/wrapper - key: ${{ runner.os }}-gradle-${{ hashFiles('**/*.gradle*', '**/gradle-wrapper.properties') }} - restore-keys: | - ${{ runner.os }}-gradle- + # - name: Set up Gradle cache + # uses: actions/cache@v3 + # with: + # path: | + # /tmp/.gradle/caches + # /tmp/.gradle/wrapper + # key: ${{ runner.os }}-gradle-${{ hashFiles('**/*.gradle*', '**/gradle-wrapper.properties') }} + # restore-keys: | + # ${{ runner.os }}-gradle- - name: Install the Dev Container CLI shell: bash - run: npm install -g @devcontainers/cli@0.69.0 + run: npm install -g @devcontainers/cli@0.72.0 - name: Start the dev container + env: + DEVCONTAINER_USER: ${{ inputs.devcontainer-user }} shell: bash run: | mkdir -p \ @@ -60,21 +67,23 @@ runs: devcontainer up \ --mount type=bind,source=/tmp/.pnpm-store,target=/workspaces/sage-monorepo/.pnpm-store \ - --mount type=bind,source=/tmp/.cache/R/renv/cache,target=/home/vscode/.cache/R/renv/cache \ - --mount type=bind,source=/tmp/.cache/pypoetry,target=/home/vscode/.cache/pypoetry \ - --mount type=bind,source=/tmp/.local/share/virtualenv,target=/home/vscode/.local/share/virtualenv \ - --mount type=bind,source=/tmp/.gradle/caches,target=/home/vscode/.gradle/caches \ - --mount type=bind,source=/tmp/.gradle/wrapper,target=/home/vscode/.gradle/wrapper \ + --mount type=bind,source=/tmp/.cache/R/renv/cache,target=/home/${DEVCONTAINER_USER}/.cache/R/renv/cache \ + --mount type=bind,source=/tmp/.cache/pypoetry,target=/home/${DEVCONTAINER_USER}/.cache/pypoetry \ + --mount type=bind,source=/tmp/.local/share/virtualenv,target=/home/${DEVCONTAINER_USER}/.local/share/virtualenv \ + --mount type=bind,source=/tmp/.gradle/caches,target=/home/${DEVCONTAINER_USER}/.gradle/caches \ + --mount type=bind,source=/tmp/.gradle/wrapper,target=/home/${DEVCONTAINER_USER}/.gradle/wrapper \ --workspace-folder ../sage-monorepo - name: Prepare the workspace + env: + DEVCONTAINER_USER: ${{ inputs.devcontainer-user }} shell: bash run: | devcontainer exec --workspace-folder ../sage-monorepo bash -c " - sudo chown -R vscode:vscode \ + sudo chown -R ${DEVCONTAINER_USER}:${DEVCONTAINER_USER} \ /workspaces/sage-monorepo \ - /home/vscode/.cache \ - /home/vscode/.local \ - /home/vscode/.gradle \ + /home/${DEVCONTAINER_USER}/.cache \ + /home/${DEVCONTAINER_USER}/.local \ + /home/${DEVCONTAINER_USER}/.gradle \ && . ./dev-env.sh \ && workspace-install-affected" diff --git a/apps/iatlas/api/poetry.lock b/apps/iatlas/api/poetry.lock index c8ec28e10..4a2fdd04e 100644 --- a/apps/iatlas/api/poetry.lock +++ b/apps/iatlas/api/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.5 and should not be changed by hand. [[package]] name = "ariadne" @@ -46,19 +46,19 @@ files = [ [[package]] name = "attrs" -version = "24.2.0" +version = "24.3.0" description = "Classes Without Boilerplate" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "attrs-24.2.0-py3-none-any.whl", hash = "sha256:81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2"}, - {file = "attrs-24.2.0.tar.gz", hash = "sha256:5cfb1b9148b5b086569baec03f20d7b6bf3bcacc9a42bebf87ffaaca362f6346"}, + {file = "attrs-24.3.0-py3-none-any.whl", hash = "sha256:ac96cd038792094f438ad1f6ff80837353805ac950cd2aa0e0625ef19850c308"}, + {file = "attrs-24.3.0.tar.gz", hash = "sha256:8f5c07333d543103541ba7be0e2ce16eeee8130cb0b3f9238ab904ce1e85baff"}, ] [package.extras] benchmark = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins", "pytest-xdist[psutil]"] cov = ["cloudpickle", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] -dev = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +dev = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pre-commit-uv", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier (<24.7)"] tests = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] tests-mypy = ["mypy (>=1.11.1)", "pytest-mypy-plugins"] @@ -776,12 +776,12 @@ files = [ [[package]] name = "uwsgi" -version = "2.0.19.1" +version = "2.0.28" description = "The uWSGI server" optional = false python-versions = "*" files = [ - {file = "uWSGI-2.0.19.1.tar.gz", hash = "sha256:faa85e053c0b1be4d5585b0858d3a511d2cd10201802e8676060fd0a109e5869"}, + {file = "uwsgi-2.0.28.tar.gz", hash = "sha256:79ca1891ef2df14508ab0471ee8c0eb94bd2d51d03f32f90c4bbe557ab1e99d0"}, ] [[package]] @@ -812,4 +812,4 @@ files = [ [metadata] lock-version = "2.0" python-versions = "3.8.20" -content-hash = "cc20f624b0a572b4d6e377386a4df25cd8b8034a249ebe2e1d9abf01ee651611" +content-hash = "5d916c47ddf016772ca11a37795ce9164c2d7f600de1d62750ed3cd90cea45c5" diff --git a/apps/iatlas/api/pyproject.toml b/apps/iatlas/api/pyproject.toml index 08388e59d..74b6faf5d 100644 --- a/apps/iatlas/api/pyproject.toml +++ b/apps/iatlas/api/pyproject.toml @@ -9,21 +9,24 @@ authors = [ ] readme = "README.md" -[tool.poetry.dependencies] -python = "3.8.20" -ariadne = "0.13.0" -click = "7.1.2" -flask = "1.1.2" -flask-sqlalchemy = "2.4.3" -graphql-core = "3.1.0" -itsdangerous = "1.1.0" -jinja2 = "2.11.2" -markupsafe = "1.1.1" -psycopg2-binary = "2.8.5" -sqlalchemy = "1.3.17" -starlette = "0.13.4" -typing-extensions = "3.7.4.2" -werkzeug = "1.0.1" + [[tool.poetry.packages]] + include = "api" + + [tool.poetry.dependencies] + python = "3.8.20" + ariadne = "0.13.0" + click = "7.1.2" + flask = "1.1.2" + flask-sqlalchemy = "2.4.3" + graphql-core = "3.1.0" + itsdangerous = "1.1.0" + jinja2 = "2.11.2" + markupsafe = "1.1.1" + psycopg2-binary = "2.8.5" + sqlalchemy = "1.3.17" + starlette = "0.13.4" + typing-extensions = "3.7.4.2" + werkzeug = "1.0.1" [tool.poetry.group.dev.dependencies] autopep8 = "1.5.4" @@ -36,7 +39,7 @@ pytest-xdist = "2.1.0" snakeviz = "2.1.0" [tool.poetry.group.prod.dependencies] -uWSGI = "2.0.19.1" +uWSGI = "2.0.28" [tool.pytest.ini_options] log_cli = true diff --git a/dev-env.sh b/dev-env.sh index 6e03e9a1b..486c8623b 100644 --- a/dev-env.sh +++ b/dev-env.sh @@ -42,8 +42,9 @@ function workspace-install-affected { workspace-install-nodejs-dependencies workspace-install-python-dependencies nx affected --target=create-config - nx affected --target=prepare --exclude '!tag:language:java' --parallel=1 - nx affected --target=prepare --exclude 'tag:language:java' + nx affected --target=prepare --projects=tag:language:java --parallel=1 + nx affected --target=prepare --projects=tag:language:python --parallel=1 + nx affected --target=prepare --projects=tag:language:r } # Setup Python virtualenvs @@ -176,6 +177,22 @@ function workspace-initialize-env { export COREPACK_ENABLE_DOWNLOAD_PROMPT="0" } -function workspace-nuke-venv { - find . -name ".venv" -print0 | xargs -0 rm -fr +function workspace-nuke { + rm -fr \ + .angular \ + .cache \ + .nx \ + .pnpm-store \ + coverage \ + playwright-report \ + reports + + # find . -name "build" -print0 | xargs -0 rm -fr # but not OA build folders + find . -name ".coverage" -print0 | xargs -0 rm -fr + find . -name ".gradle" -print0 | xargs -0 rm -fr + find . -name ".pytest_cache" -print0 | xargs -0 rm -fr + find . -name ".venv" -print0 | xargs -0 rm -fr + find . -name "bin" -print0 | xargs -0 rm -fr + find . -name "dist" -print0 | xargs -0 rm -fr + find . -name "node_modules" -print0 | xargs -0 rm -fr } \ No newline at end of file diff --git a/libs/sandbox/py-lib/project.json b/libs/sandbox/py-lib/project.json index b6296c219..8fb504df8 100644 --- a/libs/sandbox/py-lib/project.json +++ b/libs/sandbox/py-lib/project.json @@ -22,13 +22,10 @@ } }, "prepare": { - "executor": "@nxlv/python:install", + "executor": "nx:run-commands", "options": { - "silent": false, - "args": "", - "cacheDir": ".cache/pypoetry", - "verbose": false, - "debug": false + "command": "./install.sh", + "cwd": "{projectRoot}" } }, "update": { diff --git a/package.json b/package.json index f2092b931..d5174c3ed 100644 --- a/package.json +++ b/package.json @@ -196,5 +196,5 @@ "vite": "5.2.11", "vitest": "1.6.0" }, - "packageManager": "pnpm@9.9.0" + "packageManager": "pnpm@9.15.4" } diff --git a/tools/check-devcontainer-version.js b/tools/check-devcontainer-version.js index bef896a16..4c5305555 100644 --- a/tools/check-devcontainer-version.js +++ b/tools/check-devcontainer-version.js @@ -23,7 +23,7 @@ if (currentDevcontainerVersion === undefined) { process.exit(0); } -if (expectedDevcontainerVersion !== currentDevcontainerVersion) { +if (expectedDevcontainerVersion !== `sha-${currentDevcontainerVersion}`) { console.info('🐋 The dev container has changed. Please rebuild it.'); // console.debug(`Expected dev container version: ${expectedDevcontainerVersion}`); // console.debug(`Current dev container version: ${currentDevcontainerVersion}`); diff --git a/tools/workspace-install.sh b/tools/workspace-install.sh new file mode 100755 index 000000000..7aa3b8425 --- /dev/null +++ b/tools/workspace-install.sh @@ -0,0 +1,15 @@ +#!/usr/bin/env bash + +# Install Node.js dependencies +pnpm install --frozen-lockfile + +# Install workspace Python dependencies +# poetry env use $(pyenv which python) +# poetry install --with dev + +# Prepare projects +pnpm dlx nx run-many --target=create-config +pnpm dlx nx run-many --target=prepare --projects=tag:language:java --parallel=1 +# nx run-many --target=prepare --projects=tag:language:python --parallel=1 +pnpm dlx nx run-many --target=prepare --projects=tag:language:r +